@SuppressWarnings("unchecked")
  private brooklyn.entity.proxying.EntitySpec<? extends Entity> toCoreEntitySpec(
      brooklyn.rest.domain.EntitySpec spec) {
    String type = spec.getType();
    String name = spec.getName();
    Map<String, String> config =
        (spec.getConfig() == null)
            ? Maps.<String, String>newLinkedHashMap()
            : Maps.newLinkedHashMap(spec.getConfig());

    Class<? extends Entity> tempclazz;
    try {
      tempclazz = getCatalog().loadClassByType(type, Entity.class);
    } catch (NoSuchElementException e) {
      try {
        tempclazz = (Class<? extends Entity>) getCatalog().getRootClassLoader().loadClass(type);
        log.info("Catalog does not contain item for type {}; loaded class directly instead", type);
      } catch (ClassNotFoundException e2) {
        log.warn(
            "No catalog item for type {}, and could not load class directly; rethrowing", type);
        throw e;
      }
    }
    final Class<? extends Entity> clazz = tempclazz;
    brooklyn.entity.proxying.EntitySpec<? extends Entity> result;
    if (clazz.isInterface()) {
      result = brooklyn.entity.proxying.EntitySpec.create(clazz);
    } else {
      result = brooklyn.entity.proxying.EntitySpec.create(Entity.class).impl(clazz);
    }
    if (!Strings.isEmpty(name)) result.displayName(name);
    result.configure(convertFlagsToKeys(result.getType(), config));
    configureRenderingMetadata(spec, result);
    return result;
  }
  @SuppressWarnings({"rawtypes", "unchecked"})
  private <T extends Entity> brooklyn.entity.proxying.EntitySpec<?> toCoreEntitySpec(
      Class<T> clazz, String name, Map<String, String> configO) {
    Map<String, String> config =
        (configO == null)
            ? Maps.<String, String>newLinkedHashMap()
            : Maps.newLinkedHashMap(configO);

    brooklyn.entity.proxying.EntitySpec<? extends Entity> result;
    if (clazz.isInterface()) {
      result = brooklyn.entity.proxying.EntitySpec.create(clazz);
    } else {
      // If this is a concrete class, particularly for an Application class, we want the proxy
      // to expose all interfaces it implements.
      Class interfaceclazz =
          (Application.class.isAssignableFrom(clazz)) ? Application.class : Entity.class;
      Set<Class<?>> additionalInterfaceClazzes =
          Reflections.getInterfacesIncludingClassAncestors(clazz);
      result =
          brooklyn.entity.proxying.EntitySpec.create(interfaceclazz)
              .impl(clazz)
              .additionalInterfaces(additionalInterfaceClazzes);
    }

    if (!Strings.isEmpty(name)) result.displayName(name);
    result.configure(convertFlagsToKeys(result.getImplementation(), config));
    return result;
  }
Exemplo n.º 3
0
 /**
  * trans cube
  *
  * @param cube
  * @return new Cube
  */
 public static Cube transformCube(Cube cube) {
   MiniCube newCube = (MiniCube) DeepcopyUtils.deepCopy(cube);
   final Map<String, Measure> measures = Maps.newConcurrentMap();
   cube.getMeasures()
       .values()
       .forEach(
           m -> {
             measures.put(m.getName(), m);
           });
   newCube.setMeasures(measures);
   final Map<String, Dimension> dimensions = Maps.newLinkedHashMap();
   cube.getDimensions()
       .values()
       .forEach(
           dim -> {
             MiniCubeDimension tmp = (MiniCubeDimension) DeepcopyUtils.deepCopy(dim);
             LinkedHashMap<String, Level> tmpLevel = Maps.newLinkedHashMap();
             dim.getLevels()
                 .values()
                 .forEach(
                     level -> {
                       level.setDimension(dim);
                       tmpLevel.put(level.getName(), level);
                     });
             tmp.setLevels(tmpLevel);
             dimensions.put(tmp.getName(), tmp);
           });
   newCube.setDimensions(dimensions);
   return newCube;
 }
Exemplo n.º 4
0
 private void label(SubmitRecord.Label n, AccountInfo who) {
   switch (n.status) {
     case OK:
       if (ok == null) {
         ok = Maps.newLinkedHashMap();
       }
       ok.put(n.label, who);
       break;
     case REJECT:
       if (reject == null) {
         reject = Maps.newLinkedHashMap();
       }
       reject.put(n.label, who);
       break;
     case NEED:
       if (need == null) {
         need = Maps.newLinkedHashMap();
       }
       need.put(n.label, new None());
       break;
     case MAY:
       if (may == null) {
         may = Maps.newLinkedHashMap();
       }
       may.put(n.label, who);
       break;
     case IMPOSSIBLE:
       if (impossible == null) {
         impossible = Maps.newLinkedHashMap();
       }
       impossible.put(n.label, new None());
       break;
   }
 }
 /**
  * Build a node template
  *
  * @param dependencies the dependencies on which new node will be constructed
  * @param indexedNodeType the type of the node
  * @param templateToMerge the template that can be used to merge into the new node template
  * @return new constructed node template
  */
 public static NodeTemplate buildNodeTemplate(
     Set<CSARDependency> dependencies,
     IndexedNodeType indexedNodeType,
     NodeTemplate templateToMerge,
     IToscaElementFinder toscaElementFinder) {
   NodeTemplate nodeTemplate = new NodeTemplate();
   nodeTemplate.setType(indexedNodeType.getElementId());
   Map<String, Capability> capabilities = Maps.newLinkedHashMap();
   Map<String, Requirement> requirements = Maps.newLinkedHashMap();
   Map<String, AbstractPropertyValue> properties = Maps.newLinkedHashMap();
   Map<String, DeploymentArtifact> deploymentArtifacts = null;
   Map<String, DeploymentArtifact> deploymentArtifactsToMerge =
       templateToMerge != null ? templateToMerge.getArtifacts() : null;
   if (deploymentArtifactsToMerge != null) {
     if (indexedNodeType.getArtifacts() != null) {
       deploymentArtifacts = Maps.newLinkedHashMap(indexedNodeType.getArtifacts());
       for (Entry<String, DeploymentArtifact> entryArtifact :
           deploymentArtifactsToMerge.entrySet()) {
         DeploymentArtifact existingArtifact = entryArtifact.getValue();
         if (deploymentArtifacts.containsKey(entryArtifact.getKey())) {
           deploymentArtifacts.put(entryArtifact.getKey(), existingArtifact);
         }
       }
     }
   } else if (indexedNodeType.getArtifacts() != null) {
     deploymentArtifacts = Maps.newLinkedHashMap(indexedNodeType.getArtifacts());
   }
   fillCapabilitiesMap(
       capabilities,
       indexedNodeType.getCapabilities(),
       dependencies,
       templateToMerge != null ? templateToMerge.getCapabilities() : null,
       toscaElementFinder);
   fillRequirementsMap(
       requirements,
       indexedNodeType.getRequirements(),
       dependencies,
       templateToMerge != null ? templateToMerge.getRequirements() : null,
       toscaElementFinder);
   fillProperties(
       properties,
       indexedNodeType.getProperties(),
       templateToMerge != null ? templateToMerge.getProperties() : null);
   nodeTemplate.setCapabilities(capabilities);
   nodeTemplate.setRequirements(requirements);
   nodeTemplate.setProperties(properties);
   nodeTemplate.setAttributes(indexedNodeType.getAttributes());
   nodeTemplate.setInterfaces(indexedNodeType.getInterfaces());
   nodeTemplate.setArtifacts(deploymentArtifacts);
   if (templateToMerge != null && templateToMerge.getRelationships() != null) {
     nodeTemplate.setRelationships(templateToMerge.getRelationships());
   }
   return nodeTemplate;
 }
Exemplo n.º 6
0
 /**
  * @param model
  * @return Map<String, List<Dimension>>
  */
 private static Map<String, List<Dimension>> collectFilterDim(ReportDesignModel model) {
   Map<String, List<Dimension>> rs = Maps.newHashMap();
   for (ExtendArea area : model.getExtendAreaList()) {
     if (isFilterArea(area.getType())) {
       Cube cube = model.getSchema().getCubes().get(area.getCubeId());
       if (rs.get(area.getCubeId()) == null) {
         List<Dimension> dims = Lists.newArrayList();
         area.listAllItems()
             .values()
             .forEach(
                 key -> {
                   MiniCubeDimension dim =
                       (MiniCubeDimension)
                           DeepcopyUtils.deepCopy(cube.getDimensions().get(key.getId()));
                   dim.setLevels(Maps.newLinkedHashMap());
                   cube.getDimensions()
                       .get(key.getId())
                       .getLevels()
                       .values()
                       .forEach(
                           level -> {
                             dim.getLevels().put(level.getName(), level);
                           });
                   dims.add(dim);
                 });
         rs.put(area.getCubeId(), dims);
       } else {
         area.listAllItems()
             .values()
             .forEach(
                 key -> {
                   MiniCubeDimension dim =
                       (MiniCubeDimension)
                           DeepcopyUtils.deepCopy(cube.getDimensions().get(key.getId()));
                   dim.setLevels(Maps.newLinkedHashMap());
                   ;
                   cube.getDimensions()
                       .get(key.getId())
                       .getLevels()
                       .values()
                       .forEach(
                           level -> {
                             dim.getLevels().put(level.getName(), level);
                           });
                   rs.get(area.getCubeId()).add(dim);
                 });
       }
     }
   }
   return rs;
 }
Exemplo n.º 7
0
  public void testSetBlockDeviceMappingForInstanceInRegion()
      throws SecurityException, NoSuchMethodException, IOException {
    Invokable<?, ?> method =
        method(
            InstanceApi.class,
            "setBlockDeviceMappingForInstanceInRegion",
            String.class,
            String.class,
            Map.class);

    Map<String, BlockDevice> mapping = Maps.newLinkedHashMap();
    mapping.put("/dev/sda1", new BlockDevice("vol-test1", true));
    GeneratedHttpRequest request =
        processor.createRequest(method, Lists.<Object>newArrayList(null, "1", mapping));

    request = (GeneratedHttpRequest) request.getFilters().get(0).filter(request);

    assertRequestLineEquals(request, "POST https://ec2.us-east-1.amazonaws.com/ HTTP/1.1");
    assertNonPayloadHeadersEqual(request, "Host: ec2.us-east-1.amazonaws.com\n");
    assertPayloadEquals(
        request,
        filter.filter(setBlockDeviceMapping).getPayload().getRawContent().toString(),
        "application/x-www-form-urlencoded",
        false);

    checkFilters(request);
  }
Exemplo n.º 8
0
  /** Finds the all the __XX__ tokens in the given JavaScript string. */
  private Map<String, Token> findTokens(String src) {
    Map<String, Token> tokens = Maps.newLinkedHashMap();

    findTokens(tokens, "", src);

    return tokens;
  }
Exemplo n.º 9
0
  @Nonnull
  public static TestMatrixArtifact convertToConsumableArtifact(
      @Nonnull final TestMatrixVersion testMatrix) {
    final Audit audit = new Audit();
    final Date published =
        Preconditions.checkNotNull(testMatrix.getPublished(), "Missing publication date");
    audit.setUpdated(published.getTime());
    audit.setVersion(testMatrix.getVersion());
    audit.setUpdatedBy(testMatrix.getAuthor());

    final TestMatrixArtifact artifact = new TestMatrixArtifact();
    artifact.setAudit(audit);

    final TestMatrixDefinition testMatrixDefinition =
        Preconditions.checkNotNull(
            testMatrix.getTestMatrixDefinition(), "Missing test matrix definition");

    final Map<String, TestDefinition> testDefinitions = testMatrixDefinition.getTests();

    final Map<String, ConsumableTestDefinition> consumableTestDefinitions = Maps.newLinkedHashMap();
    for (final Entry<String, TestDefinition> entry : testDefinitions.entrySet()) {
      final TestDefinition td = entry.getValue();
      final ConsumableTestDefinition ctd = convertToConsumableTestDefinition(td);
      consumableTestDefinitions.put(entry.getKey(), ctd);
    }

    artifact.setTests(consumableTestDefinitions);
    return artifact;
  }
Exemplo n.º 10
0
  private void setBlockDeviceMappingForInstanceInRegion() {
    String volumeId = ebsInstance.getEbsBlockDevices().get("/dev/sda1").getVolumeId();

    Map<String, BlockDevice> mapping = Maps.newLinkedHashMap();
    mapping.put("/dev/sda1", new BlockDevice(volumeId, false));
    try {
      client
          .getInstanceServices()
          .setBlockDeviceMappingForInstanceInRegion(null, ebsInstance.getId(), mapping);

      Map<String, BlockDevice> devices =
          client
              .getInstanceServices()
              .getBlockDeviceMappingForInstanceInRegion(null, ebsInstance.getId());
      assertEquals(devices.size(), 1);
      String deviceName = Iterables.getOnlyElement(devices.keySet());
      BlockDevice device = Iterables.getOnlyElement(devices.values());

      assertEquals(device.getVolumeId(), volumeId);
      assertEquals(deviceName, "/dev/sda1");
      assertEquals(device.isDeleteOnTermination(), false);

      System.out.println("OK: setBlockDeviceMappingForInstanceInRegion");
    } catch (Exception e) {
      System.err.println("setBlockDeviceMappingForInstanceInRegion");

      e.printStackTrace();
    }
  }
Exemplo n.º 11
0
public class ObjSetManager {
  private final Map<ObjIndexSet, Integer> _store = Maps.newLinkedHashMap();
  private final List<Integer> _indices = Lists.newArrayList();
  private final List<Integer> _lengths = Lists.newArrayList();

  public int findSet(final ObjIndexSet set) {
    if (_store.containsKey(set)) {
      return _store.get(set);
    }

    final int index = _store.size();
    _store.put(set, index);
    return index;
  }

  public void addIndex(final int index) {
    _indices.add(index);
  }

  public void addLength(final int length) {
    _lengths.add(length);
  }

  public Map<ObjIndexSet, Integer> getStore() {
    return _store;
  }

  public List<Integer> getIndices() {
    return _indices;
  }

  public List<Integer> getLengths() {
    return _lengths;
  }
}
Exemplo n.º 12
0
  @Test(groups = {"integration", "live"})
  public void testPutMoreThanSingleListing()
      throws InterruptedException, ExecutionException, TimeoutException {
    if (maxResultsForTestListings() == 0) return;
    String bucketName = getContainerName();
    try {
      BlobMap map = createMap(context, bucketName);
      Builder<String> keySet = ImmutableSet.<String>builder();
      for (int i = 0; i < maxResultsForTestListings() + 1; i++) {
        keySet.add(i + "");
      }

      Map<String, Blob> newMap = Maps.newLinkedHashMap();
      for (String key : keySet.build()) {
        newMap.put(key, map.blobBuilder().payload(key).build());
      }
      map.putAll(newMap);
      newMap.clear();

      assertConsistencyAwareMapSize(map, maxResultsForTestListings() + 1);
      assertConsistencyAwareKeySetEquals(map, keySet.build());
      map.clear();
      assertConsistencyAwareMapSize(map, 0);
    } finally {
      returnContainer(bucketName);
    }
  }
Exemplo n.º 13
0
 public Map<String, StackEntity.Parameter> getParameterMap() {
   Map<String, StackEntity.Parameter> map = Maps.newLinkedHashMap();
   for (StackEntity.Parameter parameter : parameters) {
     map.put(parameter.getKey(), parameter);
   }
   return map;
 }
/** @author Morten Olav Hansen <*****@*****.**> */
public class MetaDataExportFormAction implements Action {
  @Autowired private SchemaService schemaService;

  // -------------------------------------------------------------------------
  // Input & Output
  // -------------------------------------------------------------------------

  private Map<String, String> exportClasses = Maps.newLinkedHashMap();

  public Map<String, String> getExportClasses() {
    return exportClasses;
  }

  // -------------------------------------------------------------------------
  // Action Implementation
  // -------------------------------------------------------------------------

  @Override
  public String execute() throws Exception {
    List<String> values = Lists.newArrayList();

    for (Schema schema : schemaService.getMetadataSchemas()) {
      values.add(schema.getPlural());
    }

    Collections.sort(values);

    for (String key : values) {
      String[] camelCaseWords = StringUtils.capitalize(key).split("(?=[A-Z])");
      exportClasses.put(key, StringUtils.join(camelCaseWords, " "));
    }

    return SUCCESS;
  }
}
Exemplo n.º 15
0
 private Map<String, String[]> flattenMap(Map<String, List<String>> map) {
   Map<String, String[]> result = Maps.newLinkedHashMap();
   for (String key : map.keySet()) {
     result.put(key, Iterables.toArray(map.get(key), String.class));
   }
   return result;
 }
Exemplo n.º 16
0
  @Override
  public Map<String, String> extract(List<String> messages) {
    Map<String, String> vendorData = Maps.newLinkedHashMap();
    // There should always be 1.
    for (String message : messages) {
      Matcher matcher = PARENS.matcher(message);
      if (matcher.find()) {
        String group = matcher.group(1);

        // Strip parens.
        group = group.substring(1, group.length() - 1);
        String[] pieces = group.split("[ \"]+");
        int start = 0;
        for (int i = 0; i < pieces.length; i++) {
          if (pieces[i].length() == 0) {
            start = i;
            break;
          }
        }
        for (int i = start; i < pieces.length; i += 2) {
          String key = pieces[i].toUpperCase();
          String value = pieces[i + 1].toUpperCase();
          vendorData.put(key, value);
        }
      }
    }
    return vendorData;
  }
Exemplo n.º 17
0
 @Test
 public void testCollectionInstance() {
   Maps.newLinkedHashMap();
   Lists.newArrayList();
   Sets.newHashSet();
   ObjectArrays.newArray(Integer.class, 10);
 }
Exemplo n.º 18
0
 private static LinkedHashMap<IProjectStage, Map<IProjectStageSkill, IWorkLoad>>
     extractSkillToWorkLoadMapList(
         List<String> words, List<IProjectStage> stages, List<IProjectStageSkill> skillsList) {
   LinkedHashMap<IProjectStage, Map<IProjectStageSkill, IWorkLoad>> stageToWorkLoadMap =
       Maps.newLinkedHashMap();
   LinkedHashMap<IProjectStage, List<String>> estimationStringMap =
       extractEstimationStringMap(words, stages);
   Map<IProjectStage, List<IProjectStageSkill>> stageToSkillMap =
       extractStageToSkillMap(stages, skillsList);
   for (Entry<IProjectStage, List<String>> estimationStringEntry :
       estimationStringMap.entrySet()) {
     Map<IProjectStageSkill, IWorkLoad> workLoadMap = Maps.newHashMap();
     IProjectStage stage = estimationStringEntry.getKey();
     List<String> estimationStrings = estimationStringEntry.getValue();
     List<IProjectStageSkill> stageSkillList = stageToSkillMap.get(stage);
     for (int i = 0; i < stageSkillList.size(); i++) {
       String estimationString = estimationStrings.get(i);
       IProjectStageSkill skill = stageSkillList.get(i);
       if (estimationString == null || estimationString.trim().isEmpty()) {
         continue;
       }
       Float estimation = Float.parseFloat(estimationString.replace(',', '.'));
       IWorkLoad load = new WorkLoad(estimation, DEFAULT_WORKLOAD_TIMEUNIT);
       workLoadMap.put(skill, load);
     }
     stageToWorkLoadMap.put(stage, workLoadMap);
   }
   return stageToWorkLoadMap;
 }
Exemplo n.º 19
0
  /**
   * 获取项目的最终依赖,包含父依赖里的。并且返回结果是有序的,最先的是浅层的依赖。
   *
   * @param projectId
   * @return
   */
  public LinkedList<Dependency> queryFinalDependency(int projectId) {
    // 注意,这里必须要新建一个List,不能直接使用原有的,因为缓存里用的是同一个List
    List<Dependency> dependencies = Lists.newLinkedList(this.list(projectId));

    Map<Integer, Dependency> finalDeps = Maps.newLinkedHashMap();
    for (Dependency dep : dependencies) {
      finalDeps.put(dep.getId(), dep);
    }

    // 一层一层地去获取到依赖,然后把每一层的依赖加到最终依赖里。
    int maxDepth = 20;
    for (int i = 0; i < maxDepth; ++i) {
      List<Project> projects = Lists.newLinkedList();

      for (Dependency dependency : dependencies) {
        Project project = projectService.select(dependency.getDependencyProjectId());
        projects.add(project);
      }
      dependencies.clear();

      for (Project project : projects) {
        List<Dependency> deps = this.list(project.getId());
        dependencies.addAll(deps);
        for (Dependency dep : dependencies) {
          finalDeps.put(dep.getId(), dep);
        }
      }
    }

    return Lists.newLinkedList(finalDeps.values());
  }
Exemplo n.º 20
0
  /**
   * In a {@link BuckConfig}, an alias can either refer to a fully-qualified build target, or an
   * alias defined earlier in the {@code alias} section. The mapping produced by this method
   * reflects the result of resolving all aliases as values in the {@code alias} section.
   */
  private static ImmutableMap<String, BuildTarget> createAliasToBuildTargetMap(
      ImmutableMap<String, String> rawAliasMap) {
    // We use a LinkedHashMap rather than an ImmutableMap.Builder because we want both (1) order to
    // be preserved, and (2) the ability to inspect the Map while building it up.
    LinkedHashMap<String, BuildTarget> aliasToBuildTarget = Maps.newLinkedHashMap();
    for (Map.Entry<String, String> aliasEntry : rawAliasMap.entrySet()) {
      String alias = aliasEntry.getKey();
      validateAliasName(alias);

      // Determine whether the mapping is to a build target or to an alias.
      String value = aliasEntry.getValue();
      BuildTarget buildTarget;
      if (isValidAliasName(value)) {
        buildTarget = aliasToBuildTarget.get(value);
        if (buildTarget == null) {
          throw new HumanReadableException("No alias for: %s.", value);
        }
      } else {
        // Here we parse the alias values with a BuildTargetParser to be strict. We could be looser
        // and just grab everything between "//" and ":" and assume it's a valid base path.
        buildTarget =
            BuildTargetParser.INSTANCE.parse(value, BuildTargetPatternParser.fullyQualified());
      }
      aliasToBuildTarget.put(alias, buildTarget);
    }
    return ImmutableMap.copyOf(aliasToBuildTarget);
  }
Exemplo n.º 21
0
    Event toPoint(MetricInfo info) {
      Event p = new Event();
      String metricName = Utils.smoothText.apply(info.metric);
      p.values = ImmutableMap.of(metricName, (Object) info.value);
      Warning warning = warnings.get(info.metric);
      p.tags = Maps.newLinkedHashMap(); // We need the order!
      p.tags.put("service", info.service.replace("/", ":")); // Transform into real service name.
      p.tags.put("host", info.host);
      for (Map.Entry<String, String> e : userMap.entrySet()) {
        String user = e.getKey();
        String dim = e.getValue();
        if ("user1".equals(user)) {
          p.tags.put(dim, info.user1);
        } else if ("user2".equals(user)) {
          p.tags.put(dim, info.user2);
        } else if ("user3".equals(user)) {
          p.tags.put(dim, info.user3);
        } else if ("user4".equals(user)) {
          p.tags.put(dim, info.user4);
        }
      }

      if (warning != null) {
        warning.checkAlarm(info, p.tags);
      }

      return p;
    }
Exemplo n.º 22
0
  @MetaData(title = "树形表格数据")
  public HttpHeaders treeGridData() {
    Map<String, Menu> menuDatas = Maps.newLinkedHashMap();

    String nodeid = this.getParameter("nodeid");
    if (StringUtils.isNotBlank(nodeid)) {
      Menu parent = menuService.findOne(nodeid);
      List<Menu> children = menuService.findChildren(parent);
      for (Menu menu : children) {
        menu.addExtraAttribute("level", menu.getLevel());
        menu.addExtraAttribute("parent", nodeid);
        menu.addExtraAttribute(
            "isLeaf", CollectionUtils.isEmpty(menuService.findChildren(menu)) ? true : false);
        menu.addExtraAttribute("expanded", false);
        menu.addExtraAttribute("loaded", true);
        menuDatas.put(menu.getId(), menu);
      }
    } else {
      GroupPropertyFilter groupFilter =
          GroupPropertyFilter.buildGroupFilterFromHttpRequest(entityClass, getRequest());
      if (groupFilter.isEmpty()) {
        groupFilter.and(new PropertyFilter(MatchType.NU, "parent", true));
      }
      List<Menu> menus =
          menuService.findByFilters(groupFilter, new Sort(Direction.DESC, "parent", "orderRank"));
      for (Menu menu : menus) {
        loopTreeGridData(menuDatas, menu, false);
      }
    }
    setModel(menuDatas.values());
    return buildDefaultHttpHeaders();
  }
Exemplo n.º 23
0
/** @author Adrian Cole */
public abstract class ResourceMetadataBuilder<T extends Enum<T>> {
  protected String providerId;
  protected String name;
  protected Location location;
  protected URI uri;
  protected Map<String, String> userMetadata = Maps.newLinkedHashMap();

  public ResourceMetadataBuilder<T> providerId(String providerId) {
    this.providerId = providerId;
    return this;
  }

  public ResourceMetadataBuilder<T> name(String name) {
    this.name = name;
    return this;
  }

  public ResourceMetadataBuilder<T> location(Location location) {
    this.location = location;
    return this;
  }

  public ResourceMetadataBuilder<T> uri(URI uri) {
    this.uri = uri;
    return this;
  }

  public ResourceMetadataBuilder<T> userMetadata(Map<String, String> userMetadata) {
    this.userMetadata = checkNotNull(userMetadata, "userMetadata");
    return this;
  }
}
Exemplo n.º 24
0
 /**
  * Given a list of predicates to push down, this methods returns the set of predicates that still
  * need to be pushed. Predicates need to be pushed because 1) their String representation is not
  * included in input set of predicates to exclude, or 2) they are already in the subtree rooted at
  * the input node. This method updates the set of predicates to exclude with the String
  * representation of the predicates in the output and in the subtree.
  *
  * @param predicatesToExclude String representation of predicates that should be excluded
  * @param inp root of the subtree
  * @param predsToPushDown candidate predicates to push down through the subtree
  * @return list of predicates to push down
  */
 public static ImmutableList<RexNode> getPredsNotPushedAlready(
     Set<String> predicatesToExclude, RelNode inp, List<RexNode> predsToPushDown) {
   // Bail out if there is nothing to push
   if (predsToPushDown.isEmpty()) {
     return ImmutableList.of();
   }
   // Build map to not convert multiple times, further remove already included predicates
   Map<String, RexNode> stringToRexNode = Maps.newLinkedHashMap();
   for (RexNode r : predsToPushDown) {
     String rexNodeString = r.toString();
     if (predicatesToExclude.add(rexNodeString)) {
       stringToRexNode.put(rexNodeString, r);
     }
   }
   if (stringToRexNode.isEmpty()) {
     return ImmutableList.of();
   }
   // Finally exclude preds that are already in the subtree as given by the metadata provider
   // Note: this is the last step, trying to avoid the expensive call to the metadata provider
   //       if possible
   Set<String> predicatesInSubtree = Sets.newHashSet();
   for (RexNode pred : RelMetadataQuery.instance().getPulledUpPredicates(inp).pulledUpPredicates) {
     predicatesInSubtree.add(pred.toString());
     predicatesInSubtree.addAll(Lists.transform(RelOptUtil.conjunctions(pred), REX_STR_FN));
   }
   final ImmutableList.Builder<RexNode> newConjuncts = ImmutableList.builder();
   for (Entry<String, RexNode> e : stringToRexNode.entrySet()) {
     if (predicatesInSubtree.add(e.getKey())) {
       newConjuncts.add(e.getValue());
     }
   }
   predicatesToExclude.addAll(predicatesInSubtree);
   return newConjuncts.build();
 }
Exemplo n.º 25
0
 public FlattenedGrammarAccess(final RuleNames names, final RuleFilter filter) {
   final Grammar grammar = names.getContextGrammar();
   Grammar flattenedGrammar = this.<Grammar>copy(grammar);
   String _name = grammar.getName();
   flattenedGrammar.setName(_name);
   LinkedHashMap<RuleWithParameterValues, AbstractRule> origToCopy =
       Maps.<RuleWithParameterValues, AbstractRule>newLinkedHashMap();
   List<AbstractRule> _rules = filter.getRules(grammar);
   boolean _isDiscardRuleTypeRef = filter.isDiscardRuleTypeRef();
   final ArrayList<AbstractRule> copies =
       this.copyRuleStubs(names, origToCopy, _rules, _isDiscardRuleTypeRef);
   EList<AbstractRule> _rules_1 = flattenedGrammar.getRules();
   Iterables.<AbstractRule>addAll(_rules_1, copies);
   Multimap<TerminalRule, AbstractRule> calledFrom = this.copyRuleBodies(copies, origToCopy);
   this.setHiddenTokens(flattenedGrammar, grammar, origToCopy);
   this.markAsFragment(calledFrom);
   boolean _isDiscardUnreachableRules = filter.isDiscardUnreachableRules();
   if (_isDiscardUnreachableRules) {
     Set<AbstractRule> usedRules = CollectionLiterals.<AbstractRule>newHashSet();
     boolean _isDiscardTerminalRules = filter.isDiscardTerminalRules();
     boolean _not = (!_isDiscardTerminalRules);
     if (_not) {
       List<TerminalRule> _allTerminalRules = GrammarUtil.allTerminalRules(flattenedGrammar);
       usedRules.addAll(_allTerminalRules);
     }
     UsedRulesFinder finder = new UsedRulesFinder(usedRules);
     finder.compute(flattenedGrammar);
     EList<AbstractRule> _rules_2 = flattenedGrammar.getRules();
     _rules_2.retainAll(usedRules);
   }
   this.flattenedGrammar = flattenedGrammar;
   OriginalGrammar _originalGrammar = new OriginalGrammar(grammar);
   _originalGrammar.attachToEmfObject(flattenedGrammar);
 }
  /**
   * Validates and returns the selected repositories out of the list
   *
   * @param importableRepos List of importable repositories
   * @return repos map
   */
  private Map<String, RemoteRepoDescriptor> validatedAndReturnSelection(
      Collection<ImportableRemoteRepo> importableRepos) {
    Map<String, RemoteRepoDescriptor> map = Maps.newLinkedHashMap();

    if (importableRepos.isEmpty()) {
      error("Please select at least one repository to import.");
      return map;
    }

    // If a repo which key already exists as another local\virtual repo key is selected, throw an
    // error
    for (ImportableRemoteRepo importableRepo : importableRepos) {
      if (importableRepo.isSelected()) {
        if (importableRepo.isExistsAsLocal() || importableRepo.isExistsAsVirtual()) {
          error(getString("existsAsLocalOrVirtualWarn"));
          map.clear();
          return map;
        }

        map.put(importableRepo.getRepoKey(), importableRepo.getRepoDescriptor());
      }
    }

    if (map.isEmpty()) {
      error("Please select at least one repository to import.");
    }

    return map;
  }
Exemplo n.º 27
0
  private static Object[] prepareArgsForEffectorFromMap(Effector<?> eff, Map m) {
    m = Maps.newLinkedHashMap(m); // make editable copy
    List newArgs = Lists.newArrayList();
    int newArgsNeeded = eff.getParameters().size();
    boolean mapUsed = false;

    for (int index = 0; index < eff.getParameters().size(); index++) {
      ParameterType<?> it = eff.getParameters().get(index);
      Object v;
      if (truth(it.getName()) && m.containsKey(it.getName())) {
        // argument is in the map
        v = m.remove(it.getName());
      } else if (it instanceof BasicParameterType && ((BasicParameterType) it).hasDefaultValue()) {
        // finally, default values are used to make up for missing parameters
        v = ((BasicParameterType) it).getDefaultValue();
      } else {
        throw new IllegalArgumentException(
            "Invalid arguments (missing argument " + it + ") for effector " + eff + ": " + m);
      }

      newArgs.add(TypeCoercions.coerce(v, it.getParameterClass()));
      newArgsNeeded--;
    }
    if (newArgsNeeded > 0)
      throw new IllegalArgumentException(
          "Invalid arguments (missing " + newArgsNeeded + ") for effector " + eff + ": " + m);
    return newArgs.toArray(new Object[newArgs.size()]);
  }
Exemplo n.º 28
0
/**
 * Collection of SOAP endpoints that have been published; will automatically assign a unique address
 * to any that have not been {@link SoapEndpoint}s whose {@link SoapEndpointSpec} does not specify
 * an {@link SoapEndpointSpec#getEndpointAddress() address}.
 */
class PublishedEndpoints {

  private int port = SoapEndpointPublishingRule.INITIAL_PORT;
  private Map<Class<?>, SoapEndpoint> soapEndpointByType = Maps.newLinkedHashMap();

  void publishEndpointIfRequired(final List<SoapEndpointSpec> soapEndpointSpecs) {
    // merge in any new endpoints to static cache
    for (SoapEndpointSpec soapEndpointSpec : soapEndpointSpecs) {
      final Class<?> endpointClass = soapEndpointSpec.getEndpointClass();
      SoapEndpoint soapEndpoint = this.soapEndpointByType.get(endpointClass);
      if (soapEndpoint == null) {
        // instantiate and publish,automatically assigning an address to any that don't specify one
        soapEndpoint = new SoapEndpoint(soapEndpointSpec);
        soapEndpointByType.put(endpointClass, soapEndpoint);
        port = soapEndpoint.publish(port) + 1;
      }
    }
  }

  String getEndpointAddress(Class<?> endpointClass) {
    return soapEndpointByType.get(endpointClass).getSpec().getEndpointAddress();
  }

  <T> T getEndpointImplementor(Class<T> endpointClass) {
    return (T) soapEndpointByType.get(endpointClass).getImplementor();
  }
}
  Uri(UriBuilder builder) {
    scheme = builder.getScheme();
    authority = builder.getAuthority();
    path = builder.getPath();
    query = builder.getQuery();
    fragment = builder.getFragment();
    queryParameters =
        Collections.unmodifiableMap(Maps.newLinkedHashMap(builder.getQueryParameters()));

    StringBuilder out = new StringBuilder();

    if (scheme != null) {
      out.append(scheme).append(':');
    }
    if (authority != null) {
      out.append("//").append(authority);
      // insure that there's a separator between authority/path
      if (path != null && path.length() > 1 && !path.startsWith("/")) {
        out.append("/");
      }
    }
    if (path != null) {
      out.append(path);
    }
    if (query != null) {
      out.append('?').append(query);
    }
    if (fragment != null) {
      out.append('#').append(fragment);
    }
    text = out.toString();
  }
Exemplo n.º 30
0
  /**
   * Find and return the {@link CxxPreprocessorInput} objects from {@link CxxPreprocessorDep} found
   * while traversing the dependencies starting from the {@link BuildRule} objects given.
   */
  public static Collection<CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
      final CxxPlatform cxxPlatform,
      Iterable<? extends BuildRule> inputs,
      final Predicate<Object> traverse)
      throws NoSuchBuildTargetException {

    // We don't really care about the order we get back here, since headers shouldn't
    // conflict.  However, we want something that's deterministic, so sort by build
    // target.
    final Map<BuildTarget, CxxPreprocessorInput> deps = Maps.newLinkedHashMap();

    // Build up the map of all C/C++ preprocessable dependencies.
    new AbstractBreadthFirstThrowingTraversal<BuildRule, NoSuchBuildTargetException>(inputs) {
      @Override
      public ImmutableSet<BuildRule> visit(BuildRule rule) throws NoSuchBuildTargetException {
        if (rule instanceof CxxPreprocessorDep) {
          CxxPreprocessorDep dep = (CxxPreprocessorDep) rule;
          deps.putAll(dep.getTransitiveCxxPreprocessorInput(cxxPlatform, HeaderVisibility.PUBLIC));
          return ImmutableSet.of();
        }
        return traverse.apply(rule) ? rule.getDeps() : ImmutableSet.<BuildRule>of();
      }
    }.start();

    // Grab the cxx preprocessor inputs and return them.
    return deps.values();
  }