Esempio n. 1
0
  private UIStyleFamily buildFamily(String family, UISkin skin) {
    UIStyleFamily baseFamily = skin.getFamily(family);
    UIStyle baseStyle = new UIStyle(skin.getDefaultStyleFor(family));
    if (!family.isEmpty()) {
      UIStyleFragment fragment = baseStyles.get(family);
      fragment.applyTo(baseStyle);
    }

    Set<StyleKey> inheritedStyleKey = Sets.newLinkedHashSet();
    for (Class<? extends UIWidget> widget : baseFamily.getWidgets()) {
      inheritedStyleKey.add(new StyleKey(widget, "", ""));
      for (String part : baseFamily.getPartsFor(widget)) {
        inheritedStyleKey.add(new StyleKey(widget, part, ""));
        for (String mode : baseFamily.getModesFor(widget, part)) {
          inheritedStyleKey.add(new StyleKey(widget, part, mode));
        }
      }
    }

    Map<Class<? extends UIWidget>, Table<String, String, UIStyle>> familyStyles = Maps.newHashMap();
    Map<StyleKey, UIStyleFragment> styleLookup = elementStyles.row(family);
    Map<StyleKey, UIStyleFragment> baseStyleLookup =
        (family.isEmpty()) ? Maps.<StyleKey, UIStyleFragment>newHashMap() : elementStyles.row("");
    for (StyleKey styleKey :
        Sets.union(Sets.union(styleLookup.keySet(), baseStyleKeys), inheritedStyleKey)) {
      UIStyle elementStyle =
          new UIStyle(baseSkin.getStyleFor(family, styleKey.element, styleKey.part, styleKey.mode));
      baseStyles.get("").applyTo(elementStyle);
      baseStyles.get(family).applyTo(elementStyle);
      List<Class<? extends UIWidget>> inheritanceTree =
          ReflectionUtil.getInheritanceTree(styleKey.element, UIWidget.class);
      applyStylesForInheritanceTree(
          inheritanceTree, "", "", elementStyle, styleLookup, baseStyleLookup);

      if (!styleKey.part.isEmpty()) {
        applyStylesForInheritanceTree(
            inheritanceTree, styleKey.part, "", elementStyle, styleLookup, baseStyleLookup);
      }

      if (!styleKey.mode.isEmpty()) {
        applyStylesForInheritanceTree(
            inheritanceTree,
            styleKey.part,
            styleKey.mode,
            elementStyle,
            styleLookup,
            baseStyleLookup);
      }

      Table<String, String, UIStyle> elementTable = familyStyles.get(styleKey.element);
      if (elementTable == null) {
        elementTable = HashBasedTable.create();
        familyStyles.put(styleKey.element, elementTable);
      }
      elementTable.put(styleKey.part, styleKey.mode, elementStyle);
    }
    return new UIStyleFamily(baseStyle, familyStyles);
  }
Esempio n. 2
0
    public OwnersReport updatedWith(OwnersReport other) {
      SetMultimap<TargetNode<?>, Path> updatedOwners = TreeMultimap.create(owners);
      updatedOwners.putAll(other.owners);

      return new OwnersReport(
          updatedOwners,
          Sets.intersection(inputsWithNoOwners, other.inputsWithNoOwners),
          Sets.union(nonExistentInputs, other.nonExistentInputs),
          Sets.union(nonFileInputs, other.nonFileInputs));
    }
Esempio n. 3
0
  private final void invalidate(
      IFileSpecification fileSpec,
      Collection<ICompilationUnit> compilationUnits,
      Map<ICompilerProject, Set<ICompilationUnit>> cusToUpdate) {
    mxmlDataManager.invalidate(fileSpec);
    // Tell the SWC manager the SWC file is invalid.
    getSWCManager().remove(new File(fileSpec.getPath()));

    if (compilationUnits.size() == 0) return;

    Set<ICompilationUnit> unitsToInvalidate = new HashSet<ICompilationUnit>();
    unitsToInvalidate.addAll(compilationUnits);
    Set<ICompilationUnit> unitsToClean =
        Sets.<ICompilationUnit>union(
            DependencyGraph.computeInvalidationSet(unitsToInvalidate),
            getCompilationUnitsDependingOnMissingDefinitions(unitsToInvalidate));

    notifyInvalidationListener(unitsToClean);

    // Do the actual invalidation
    Map<ICompilerProject, Set<File>> invalidatedSWCFiles =
        new HashMap<ICompilerProject, Set<File>>();
    for (ICompilationUnit compilationUnit : unitsToClean) {
      boolean clearCUFileScope = unitsToInvalidate.contains(compilationUnit);
      compilationUnit.clean(invalidatedSWCFiles, cusToUpdate, clearCUFileScope);
    }

    // invalidate any library files in the project
    for (Map.Entry<ICompilerProject, Set<File>> e : invalidatedSWCFiles.entrySet()) {
      if (e.getKey() instanceof IASProject)
        ((IASProject) e.getKey()).invalidateLibraries(e.getValue());
    }
  }
  @Test
  public void union_of_sets_guava() {

    Set<String> totalFriends = Sets.union(yourFriends, myFriends);

    assertEquals(16, totalFriends.size());
  }
Esempio n. 5
0
 public PnfsCreateEntryMessage(String path, int uid, int gid, int mode, Set<FileAttribute> attr) {
   super(
       path,
       EnumSet.copyOf(
           Sets.union(
               attr,
               EnumSet.of(
                   OWNER,
                   OWNER_GROUP,
                   MODE,
                   TYPE,
                   SIZE,
                   CREATION_TIME,
                   ACCESS_TIME,
                   MODIFICATION_TIME,
                   CHANGE_TIME,
                   PNFSID,
                   STORAGEINFO,
                   ACCESS_LATENCY,
                   RETENTION_POLICY))));
   _path = path;
   _uid = uid;
   _gid = gid;
   _mode = mode;
 }
    @Override
    public PlanNode visitIndexSource(IndexSourceNode node, RewriteContext<Set<Symbol>> context) {
      List<Symbol> newOutputSymbols =
          node.getOutputSymbols()
              .stream()
              .filter(context.get()::contains)
              .collect(toImmutableList());

      Set<Symbol> newLookupSymbols =
          node.getLookupSymbols()
              .stream()
              .filter(context.get()::contains)
              .collect(toImmutableSet());

      Set<Symbol> requiredAssignmentSymbols = context.get();
      if (!node.getEffectiveTupleDomain().isNone()) {
        Set<Symbol> requiredSymbols =
            Maps.filterValues(
                    node.getAssignments(),
                    in(node.getEffectiveTupleDomain().getDomains().get().keySet()))
                .keySet();
        requiredAssignmentSymbols = Sets.union(context.get(), requiredSymbols);
      }
      Map<Symbol, ColumnHandle> newAssignments =
          Maps.filterKeys(node.getAssignments(), in(requiredAssignmentSymbols));

      return new IndexSourceNode(
          node.getId(),
          node.getIndexHandle(),
          node.getTableHandle(),
          newLookupSymbols,
          newOutputSymbols,
          newAssignments,
          node.getEffectiveTupleDomain());
    }
Esempio n. 7
0
 public static String translateImplication(
     SymbolicConstraint leftHandSide, SymbolicConstraint rightHandSide) {
   KILtoSMTLib leftTransformer = new KILtoSMTLib(true);
   KILtoSMTLib rightTransformer = new KILtoSMTLib(false);
   String leftExpression = ((SMTLibTerm) leftHandSide.accept(leftTransformer)).expression();
   String rightExpression = ((SMTLibTerm) rightHandSide.accept(rightTransformer)).expression();
   StringBuilder sb = new StringBuilder();
   sb.append(
       getSortAndFunctionDeclarations(
           leftHandSide.termContext().definition(),
           Sets.union(leftTransformer.variables(), rightTransformer.variables())));
   sb.append(getAxioms(leftHandSide.termContext().definition()));
   sb.append(getConstantDeclarations(leftTransformer.variables()));
   sb.append("(assert (and ");
   sb.append(leftExpression);
   sb.append(" (not ");
   Set<Variable> rightHandSideOnlyVariables =
       Sets.difference(rightTransformer.variables(), leftTransformer.variables());
   if (!rightHandSideOnlyVariables.isEmpty()) {
     sb.append("(exists (");
     sb.append(getQuantifiedVariables(rightHandSideOnlyVariables));
     sb.append(") ");
   }
   sb.append(rightExpression);
   if (!rightHandSideOnlyVariables.isEmpty()) {
     sb.append(")");
   }
   sb.append(")))");
   return sb.toString();
 }
  /**
   * Helper to add srcs and deps Soy files to a SoyFileSet builder. Also does sanity checks.
   *
   * @param sfsBuilder The SoyFileSet builder to add to.
   * @param inputPrefix The input path prefix to prepend to all the file paths.
   * @param srcs The srcs from the --srcs flag. Exactly one of 'srcs' and 'args' must be nonempty.
   * @param args The old-style srcs from the command line (that's how they were specified before we
   *     added the --srcs flag). Exactly one of 'srcs' and 'args' must be nonempty.
   * @param deps The deps from the --deps flag, or empty list if not applicable.
   * @param exitWithErrorFn A function that exits with an error message followed by a usage message.
   */
  static void addSoyFilesToBuilder(
      Builder sfsBuilder,
      String inputPrefix,
      Collection<String> srcs,
      Collection<String> args,
      Collection<String> deps,
      Collection<String> indirectDeps,
      Function<String, Void> exitWithErrorFn) {
    if (srcs.isEmpty() && args.isEmpty()) {
      exitWithErrorFn.apply("Must provide list of source Soy files (--srcs).");
    }
    if (!srcs.isEmpty() && !args.isEmpty()) {
      exitWithErrorFn.apply(
          "Found source Soy files from --srcs and from args (please use --srcs only).");
    }

    // Create Set versions of each of the arguments, and de-dupe. If something is included as
    // multiple file kinds, we'll keep the strongest one; a file in both srcs and deps will be a
    // src, and one in both deps and indirect_deps will be a dep.
    // TODO(gboyer): Maybe stop supporting old style (srcs from command line args) at some point.
    Set<String> srcsSet = ImmutableSet.<String>builder().addAll(srcs).addAll(args).build();
    Set<String> depsSet = Sets.difference(ImmutableSet.copyOf(deps), srcsSet);
    Set<String> indirectDepsSet =
        Sets.difference(ImmutableSet.copyOf(indirectDeps), Sets.union(srcsSet, depsSet));

    for (String src : srcsSet) {
      sfsBuilder.addWithKind(new File(inputPrefix + src), SoyFileKind.SRC);
    }
    for (String dep : depsSet) {
      sfsBuilder.addWithKind(new File(inputPrefix + dep), SoyFileKind.DEP);
    }
    for (String dep : indirectDepsSet) {
      sfsBuilder.addWithKind(new File(inputPrefix + dep), SoyFileKind.INDIRECT_DEP);
    }
  }
Esempio n. 9
0
  // TODO(user): Handle separate_revisions! (an 'svn commit' per exported change)
  @Override
  public DraftRevision putCodebase(Codebase c) throws WritingError {
    c.checkProjectSpace(config.getProjectSpace());

    // Filter out files that either start with .svn or have .svn after a slash, plus the repo
    // config's ignore_file_res.
    List<String> ignoreFilePatterns =
        ImmutableList.<String>builder()
            .addAll(config.getIgnoreFileRes())
            .add("(^|.*/)\\.svn(/.*|$)")
            .build();

    Set<String> codebaseFiles = c.getRelativeFilenames();
    Set<String> writerFiles =
        Utils.filterByRegEx(
            Utils.makeFilenamesRelative(
                AppContext.RUN.fileSystem.findFiles(rootDirectory), rootDirectory),
            ignoreFilePatterns);
    Set<String> union = Sets.union(codebaseFiles, writerFiles);

    for (String filename : union) {
      putFile(filename, c);
    }

    return new SvnDraftRevision(rootDirectory);
  }
Esempio n. 10
0
  private Set<Long> getTimestampsToSweep(
      Cell cell,
      Collection<Long> timestamps /* start timestamps */,
      @Modified Map<Long, Long> startTsToCommitTs,
      @Output Set<Cell> sentinelsToAdd,
      long sweepTimestamp,
      boolean sweepLastCommitted,
      SweepStrategy sweepStrategy) {
    Set<Long> uncommittedTimestamps = Sets.newHashSet();
    SortedSet<Long> committedTimestampsToSweep = Sets.newTreeSet();
    long maxStartTs = TransactionConstants.FAILED_COMMIT_TS;
    boolean maxStartTsIsCommitted = false;
    for (long startTs : timestamps) {
      long commitTs = ensureCommitTimestampExists(startTs, startTsToCommitTs);

      if (startTs > maxStartTs && commitTs < sweepTimestamp) {
        maxStartTs = startTs;
        maxStartTsIsCommitted = commitTs != TransactionConstants.FAILED_COMMIT_TS;
      }
      // Note: there could be an open transaction whose start timestamp is equal to
      // sweepTimestamp; thus we want to sweep all cells such that:
      // (1) their commit timestamp is less than sweepTimestamp
      // (2) their start timestamp is NOT the greatest possible start timestamp
      //     passing condition (1)
      if (commitTs > 0 && commitTs < sweepTimestamp) {
        committedTimestampsToSweep.add(startTs);
      } else if (commitTs == TransactionConstants.FAILED_COMMIT_TS) {
        uncommittedTimestamps.add(startTs);
      }
    }

    if (committedTimestampsToSweep.isEmpty()) {
      return uncommittedTimestamps;
    }

    if (sweepStrategy == SweepStrategy.CONSERVATIVE && committedTimestampsToSweep.size() > 1) {
      // We need to add a sentinel if we are removing a committed value
      sentinelsToAdd.add(cell);
    }

    if (sweepLastCommitted && maxStartTsIsCommitted) {
      return Sets.union(uncommittedTimestamps, committedTimestampsToSweep);
    }
    return Sets.union(
        uncommittedTimestamps,
        committedTimestampsToSweep.subSet(0L, committedTimestampsToSweep.last()));
  }
  @Test
  public void whenCalculateUnionOfSets_thenCorrect() {
    final Set<Character> first = ImmutableSet.of('a', 'b', 'c');
    final Set<Character> second = ImmutableSet.of('b', 'c', 'd');

    final Set<Character> union = Sets.union(first, second);
    assertThat(union, containsInAnyOrder('a', 'b', 'c', 'd'));
  }
 static ImmutableSet<EnumType> union(ImmutableSet<EnumType> s1, ImmutableSet<EnumType> s2) {
   if (s1.isEmpty()) {
     return s2;
   }
   if (s2.isEmpty() || s1.equals(s2)) {
     return s1;
   }
   return Sets.union(s1, s2).immutableCopy();
 }
 /**
  * Lee las clasificaciones del usuario y del plugin de sus respectivos ficheros xml y devuelve la
  * union de ambas.
  *
  * @return conjunto que contiene las clasificaciones de usuario y del plugin
  */
 protected static Set<Classification> readAllClassificationsFromFiles() {
   final Set<Classification> pluginClassifications =
       AbstractCatalog.getClassificationsFromFile(
           PluginClassificationsCatalog.PLUGIN_CLASSIFICATION_TYPES_FILE, false);
   final Set<Classification> userClassifications =
       AbstractCatalog.getClassificationsFromFile(
           PluginClassificationsCatalog.USER_CLASSIFICATION_TYPES_FILE, true);
   return Sets.union(pluginClassifications, userClassifications);
 }
Esempio n. 14
0
 @Override
 public Response rmdir(String path, Map<Byte, Long> instanceMap) throws TException {
   path = makePathAbsolute(path);
   Set<Byte> parts = oracle.partitionsOf(path);
   Set<Byte> parentParts = oracle.partitionsOf(Paths.dirname(path));
   Set<Byte> involvedPartitions = Sets.union(parts, parentParts);
   Command cmd = newCommand(CommandType.RMDIR, involvedPartitions, instanceMap);
   RmdirCmd rmdir = new RmdirCmd(path, parentParts, parts);
   cmd.setRmdir(rmdir);
   replica.submitCommand(cmd);
   Response r = new Response(replica.getInstanceMap());
   return r;
 }
Esempio n. 15
0
 public boolean isTypeImmutable(String desc) {
   Type type = Type.getType(desc);
   if (IMMUTABLE_TYPE_SORTS.contains(type.getSort())) {
     return true;
   }
   if (type.getSort() != Type.OBJECT) {
     return false;
   }
   if (Sets.union(EXTERNAL_IMMUTABLE_CLASSES, immutableClasses).contains(type.getInternalName())) {
     return true;
   }
   return false;
 }
Esempio n. 16
0
 @Override
 public Response mknod(
     String path, int mode, int rdev, int uid, int gid, Map<Byte, Long> instanceMap)
     throws TException {
   path = makePathAbsolute(path);
   Set<Byte> parts = oracle.partitionsOf(path);
   Set<Byte> parentParts = oracle.partitionsOf(Paths.dirname(path));
   Set<Byte> involvedPartitions = Sets.union(parts, parentParts);
   Command cmd = newCommand(CommandType.MKNOD, involvedPartitions, instanceMap);
   MknodCmd mknod = new MknodCmd(path, mode, uid, gid, parentParts, parts);
   cmd.setMknod(mknod);
   replica.submitCommand(cmd);
   Response r = new Response(replica.getInstanceMap());
   return r;
 }
  public Set<OPMProcess> findNextProcessesToExecute(OPMProcess process) {
    final List<OPMProcess> followingProcesses =
        OPDExecutionAnalysis.calculateFollowingProcesses(opdDag, process);
    final Set<OPMProcess> retVal = Sets.newHashSet();
    for (OPMProcess followingProcess : followingProcesses) {
      Set<OPMProcess> requiredProcesses =
          OPDExecutionAnalysis.calculateRequiredProcesses(opdDag, followingProcess);
      if (Sets.difference(requiredProcesses, Sets.union(skippedProcesses, executedProcesses)).size()
          == 0) {
        retVal.add(followingProcess);
      }
    }

    return retVal;
  }
Esempio n. 18
0
 @Override
 public Response symlink(String target, String path, Map<Byte, Long> instanceMap)
     throws TException {
   path = makePathAbsolute(path);
   target = makePathAbsolute(target);
   Set<Byte> parts = oracle.partitionsOf(path);
   Set<Byte> parentParts = oracle.partitionsOf(Paths.dirname(path));
   Set<Byte> involvedPartitions = Sets.union(parts, parentParts);
   Command cmd = newCommand(CommandType.SYMLINK, involvedPartitions, instanceMap);
   SymlinkCmd symlink = new SymlinkCmd(target, path, parentParts, parts);
   cmd.setSymlink(symlink);
   replica.submitCommand(cmd);
   Response r = new Response(replica.getInstanceMap());
   return r;
 }
Esempio n. 19
0
  /**
   * Returns the count of squares reachable by the first player
   *
   * @param grid
   * @param startingLoc
   * @return
   */
  public static boolean getConnectedSquareCount(GridChar grid, int startingLoc) {

    // int oldCount = (1+getConnectedSquareCountOld(grid,startingLoc) );

    // The boolean means it is player 1, the connected square co
    Set<Pair<Integer, Boolean>> visitedNodes = Sets.newHashSet();

    LinkedList<Pair<Integer, Boolean>> toVisit = new LinkedList<>();
    toVisit.add(new ImmutablePair<>(startingLoc, true));

    while (!toVisit.isEmpty()) {

      Pair<Integer, Boolean> loc = toVisit.poll();

      if (visitedNodes.contains(loc)) continue;

      visitedNodes.add(loc);

      for (Direction dir : Direction.values()) {
        Integer childIdx = grid.getIndex(loc.getLeft(), dir);
        if (childIdx == null) continue;

        char sq = grid.getEntry(childIdx);

        if (sq == '#' || sq == 'K' || sq == 'T') continue;

        toVisit.add(new ImmutablePair<>(childIdx, !loc.getRight()));
      }
    }

    Set<Integer> fpPoints = Sets.newHashSet();
    Set<Integer> spPoints = Sets.newHashSet();
    for (Pair<Integer, Boolean> p : visitedNodes) {
      if (p.getRight()) fpPoints.add(p.getLeft());
      else spPoints.add(p.getLeft());
    }

    Set<Integer> shared = Sets.intersection(fpPoints, spPoints);
    Set<Integer> union = Sets.union(fpPoints, spPoints);

    if (!shared.isEmpty()) {
      return (union.size() + 1) % 2 == 0;
    } else {
      // log.debug("size {} size {}", fpPoints.size(), spPoints.size());
      return spPoints.size() < fpPoints.size();
    }
    // return (countFP + countSP + 1) % 2 == 0;
  }
Esempio n. 20
0
  @Override
  public double distance(Histogram unknownHistogram, Histogram knownHistogram) {

    Set<Event> events = Sets.union(unknownHistogram.uniqueEvents(), knownHistogram.uniqueEvents());

    double distance = 0.0, sumNumer = 0.0, sumDenom = 0.0;

    for (Event event : events) {
      double known = knownHistogram.relativeFrequency(event);
      double unknown = unknownHistogram.relativeFrequency(event);
      sumNumer += Math.abs(unknown - known);
      sumDenom += unknown + known;
    }
    distance = sumNumer / sumDenom;
    return distance;
  }
Esempio n. 21
0
  private void go() {
    while (true) {
      madeProgress = false;
      for (ClassNode klass : allClasses.values()) {
        analyzeClass(klass);
      }
      if (!madeProgress) {
        break;
      }
    }

    immutableClasses =
        ImmutableSet.copyOf(
            Sets.difference(
                allClasses.keySet(),
                Sets.union(trulyMutableClasses, classesWithMutableDescendents)));
  }
Esempio n. 22
0
  @Override
  public double distance(Histogram unknownHistogram, Histogram knownHistogram)
      throws DistanceCalculationException {
    Set<Event> events = Sets.union(unknownHistogram.uniqueEvents(), knownHistogram.uniqueEvents());

    double distance = 0.0, sumNumer = 0.0, sumUnknown = 0.0, sumKnown = 0.0;

    for (Event event : events) {
      sumNumer +=
          unknownHistogram.relativeFrequency(event) * knownHistogram.relativeFrequency(event);
      sumUnknown += unknownHistogram.relativeFrequency(event);
      sumKnown += knownHistogram.relativeFrequency(event);
    }
    distance =
        Math.sqrt(2 - 2 * (sumNumer / Math.sqrt(sumUnknown * sumUnknown * sumKnown * sumKnown)));

    return distance;
  }
Esempio n. 23
0
  private UIStyleFamily buildFamily(String family, UIStyle defaultStyle) {
    UIStyle baseStyle = new UIStyle(defaultStyle);
    if (!family.isEmpty()) {
      UIStyleFragment fragment = baseStyles.get(family);
      fragment.applyTo(baseStyle);
    }

    Map<Class<? extends UIWidget>, Table<String, String, UIStyle>> familyStyles = Maps.newHashMap();
    Map<StyleKey, UIStyleFragment> styleLookup = elementStyles.row(family);
    Map<StyleKey, UIStyleFragment> baseStyleLookup =
        (family.isEmpty()) ? Maps.<StyleKey, UIStyleFragment>newHashMap() : elementStyles.row("");
    for (StyleKey styleKey : Sets.union(styleLookup.keySet(), baseStyleKeys)) {
      UIStyle elementStyle = new UIStyle(baseStyle);
      List<Class<? extends UIWidget>> inheritanceTree =
          ReflectionUtil.getInheritanceTree(styleKey.element, UIWidget.class);
      applyStylesForInheritanceTree(
          inheritanceTree, "", "", elementStyle, styleLookup, baseStyleLookup);

      if (!styleKey.part.isEmpty()) {
        applyStylesForInheritanceTree(
            inheritanceTree, styleKey.part, "", elementStyle, styleLookup, baseStyleLookup);
      }

      if (!styleKey.mode.isEmpty()) {
        applyStylesForInheritanceTree(
            inheritanceTree,
            styleKey.part,
            styleKey.mode,
            elementStyle,
            styleLookup,
            baseStyleLookup);
      }

      Table<String, String, UIStyle> elementTable = familyStyles.get(styleKey.element);
      if (elementTable == null) {
        elementTable = HashBasedTable.create();
        familyStyles.put(styleKey.element, elementTable);
      }
      elementTable.put(styleKey.part, styleKey.mode, elementStyle);
    }
    return new UIStyleFamily(baseStyle, familyStyles);
  }
Esempio n. 24
0
 private boolean isRegion(BasicBlock entry, BasicBlock exit) {
   // 2 cases
   //   Entry does not dominates Exit
   //   Entry dominates Exit
   if (!getDomInfo().dominates(entry, exit)) {
     // Exit has to be the only element in the dominance frontier of Entry.
     // In this case the only edges leaving the region end at Exit
     if (!Collections.singleton(exit).containsAll(getDomInfo().getDominanceFrontierOf2(entry))) {
       return false;
     }
   } else {
     // Only basic blocks that are part of the dominance frontier of Exit
     // are allowed to be element of the dominance frontier of Entry
     if (!Sets.union(getDomInfo().getDominanceFrontierOf2(exit), Collections.singleton(entry))
         .containsAll(getDomInfo().getDominanceFrontierOf2(entry))) {
       return false;
     }
     // Basic blocks of the dominance frontier of Entry can only be reached
     // from Entry through a path passing Exit. To show this the isCommonDomFrontier
     // function is used. isCommonDomFrontier ( BB, Entry, Exit) checks if there exists a path
     // from Entry to BB that does not pass Exit. This is done by checking
     // for every predecessor of BB, that if it is dominated by Entry it is
     // also dominated by Exit.
     for (BasicBlock bb : getDomInfo().getDominanceFrontierOf2(entry)) {
       if (!isCommonDomFrontier(bb, entry, exit)) {
         return false;
       }
     }
     // It has still to be shown that there are no edges entering the region.
     // As all basic blocks are dominated by Entry the only case where edges
     // enter the region is if Exit is dominated by Entry and has back edges
     // pointing into the region. These back edges will point to basic blocks
     // dominated by Entry but not by Exit. So the dominance frontier of Exit
     // is not allowed to contain any basic blocks that are dominated by Entry
     for (BasicBlock bb : getDomInfo().getDominanceFrontierOf2(exit)) {
       if (getDomInfo().strictlyDominates(entry, bb)) {
         return false;
       }
     }
   }
   return true;
 }
 // This method needs the nominal types because otherwise a property may become
 // optional by mistake after the join.
 // joinPropsLoosely doesn't need that, because we don't create optional props
 // on loose types.
 private static PersistentMap<String, Property> joinProps(
     Map<String, Property> props1,
     Map<String, Property> props2,
     NominalType nom1,
     NominalType nom2) {
   PersistentMap<String, Property> newProps = PersistentMap.create();
   for (String pname : Sets.union(props1.keySet(), props2.keySet())) {
     Property prop1 = getProp(props1, nom1, pname);
     Property prop2 = getProp(props2, nom2, pname);
     Property newProp = null;
     if (prop1 == null) {
       newProp = prop2.withOptional();
     } else if (prop2 == null) {
       newProp = prop1.withOptional();
     } else {
       newProp = Property.join(prop1, prop2);
     }
     newProps = newProps.with(pname, newProp);
   }
   return newProps;
 }
Esempio n. 26
0
  private void updateGeneratedJobs(
      final AbstractBuild<?, ?> build, BuildListener listener, Set<GeneratedJob> freshJobs)
      throws IOException {
    // Update Project
    Set<GeneratedJob> generatedJobs =
        extractGeneratedObjects(build.getProject(), GeneratedJobsAction.class);
    Set<GeneratedJob> added = Sets.difference(freshJobs, generatedJobs);
    Set<GeneratedJob> existing = Sets.intersection(generatedJobs, freshJobs);
    Set<GeneratedJob> removed = Sets.difference(generatedJobs, freshJobs);

    logItems(listener, "Adding items", added);
    logItems(listener, "Existing items", existing);
    logItems(listener, "Removing items", removed);

    // Update unreferenced jobs
    for (GeneratedJob removedJob : removed) {
      Item removedItem =
          getLookupStrategy().getItem(build.getProject(), removedJob.getJobName(), Item.class);
      if (removedItem != null && removedJobAction != RemovedJobAction.IGNORE) {
        if (removedJobAction == RemovedJobAction.DELETE) {
          try {
            removedItem.delete();
          } catch (InterruptedException e) {
            listener.getLogger().println(String.format("Delete item failed: %s", removedJob));
            if (removedItem instanceof AbstractProject) {
              listener.getLogger().println(String.format("Disabling item instead: %s", removedJob));
              ((AbstractProject) removedItem).disable();
            }
          }
        } else {
          if (removedItem instanceof AbstractProject) {
            ((AbstractProject) removedItem).disable();
          }
        }
      }
    }

    updateGeneratedJobMap(build.getProject(), Sets.union(added, existing), removed);
  }
Esempio n. 27
0
  public static int countLoadedChunks(
      ICommandSender sender, EntityPlayerMP player, boolean details, LoadedChunksVisitor visitor) {
    MinecraftServer server = ForgeTools.server;
    int total = 0;
    for (WorldServer s : server.worldServers) {
      World tmp = ((World) s);
      ImmutableSetMultimap<ChunkCoordIntPair, ForgeChunkManager.Ticket> forcedChunks =
          tmp.getPersistentChunks();
      Set loadedChunks = new LinkedHashSet<ChunkCoordIntPair>();
      for (ChunkCoordIntPair c : forcedChunks.keys()) {
        for (ForgeChunkManager.Ticket t : forcedChunks.get(c)) {
          loadedChunks = Sets.union(t.getChunkList(), loadedChunks);
        }
      }
      total += loadedChunks.size();

      boolean playerInWorld =
          (player != null) ? s.getWorldInfo().equals(player.worldObj.getWorldInfo()) : false;
      visitor.visit(sender, player, details, playerInWorld, s, loadedChunks.size());
    }
    return total;
  }
Esempio n. 28
0
 @Override
 public void buildTransitiveClosure(Set<QueryTarget> targets, int maxDepth)
     throws QueryException, InterruptedException {
   // Filter QueryTargets that are build targets and not yet present in the build target graph.
   Set<BuildTarget> graphTargets = getTargetsFromNodes(graph.getNodes());
   Set<BuildTarget> newBuildTargets = new HashSet<>();
   for (QueryTarget target : targets) {
     if (target instanceof QueryBuildTarget) {
       BuildTarget buildTarget = ((QueryBuildTarget) target).getBuildTarget();
       if (!graphTargets.contains(buildTarget)) {
         newBuildTargets.add(buildTarget);
       }
     }
   }
   if (!newBuildTargets.isEmpty()) {
     buildGraphForBuildTargets(Sets.union(newBuildTargets, graphTargets));
     for (BuildTarget buildTarget : getTargetsFromNodes(graph.getNodes())) {
       if (!buildTargetToQueryTarget.containsKey(buildTarget)) {
         buildTargetToQueryTarget.put(buildTarget, QueryBuildTarget.of(buildTarget));
       }
     }
   }
 }
Esempio n. 29
0
  @Activate
  public void activate() {
    localNodeId = clusterService.getLocalNode().id();
    // load database configuration
    File databaseDefFile = new File(PARTITION_DEFINITION_FILE);
    log.info("Loading database definition: {}", databaseDefFile.getAbsolutePath());

    Map<String, Set<NodeInfo>> partitionMap;
    try {
      DatabaseDefinitionStore databaseDefStore = new DatabaseDefinitionStore(databaseDefFile);
      if (!databaseDefFile.exists()) {
        createDefaultDatabaseDefinition(databaseDefStore);
      }
      partitionMap = databaseDefStore.read().getPartitions();
    } catch (IOException e) {
      throw new IllegalStateException("Failed to load database config", e);
    }

    String[] activeNodeUris =
        partitionMap
            .values()
            .stream()
            .reduce((s1, s2) -> Sets.union(s1, s2))
            .get()
            .stream()
            .map(this::nodeToUri)
            .toArray(String[]::new);

    String localNodeUri = nodeToUri(NodeInfo.of(clusterService.getLocalNode()));
    Protocol protocol = new CopycatCommunicationProtocol(clusterService, clusterCommunicator);

    ClusterConfig clusterConfig =
        new ClusterConfig()
            .withProtocol(protocol)
            .withElectionTimeout(electionTimeoutMillis(activeNodeUris))
            .withHeartbeatInterval(heartbeatTimeoutMillis(activeNodeUris))
            .withMembers(activeNodeUris)
            .withLocalMember(localNodeUri);

    CopycatConfig copycatConfig =
        new CopycatConfig()
            .withName("onos")
            .withClusterConfig(clusterConfig)
            .withDefaultSerializer(new DatabaseSerializer())
            .withDefaultExecutor(
                Executors.newSingleThreadExecutor(
                    new NamedThreadFactory("copycat-coordinator-%d")));

    coordinator = new DefaultClusterCoordinator(copycatConfig.resolve());

    DatabaseConfig inMemoryDatabaseConfig =
        newDatabaseConfig(BASE_PARTITION_NAME, newInMemoryLog(), activeNodeUris);
    inMemoryDatabase =
        coordinator.getResource(
            inMemoryDatabaseConfig.getName(),
            inMemoryDatabaseConfig
                .resolve(clusterConfig)
                .withSerializer(copycatConfig.getDefaultSerializer())
                .withDefaultExecutor(copycatConfig.getDefaultExecutor()));

    List<Database> partitions =
        partitionMap
            .entrySet()
            .stream()
            .map(
                entry -> {
                  String[] replicas =
                      entry.getValue().stream().map(this::nodeToUri).toArray(String[]::new);
                  return newDatabaseConfig(entry.getKey(), newPersistentLog(), replicas);
                })
            .map(
                config -> {
                  Database db =
                      coordinator.getResource(
                          config.getName(),
                          config
                              .resolve(clusterConfig)
                              .withSerializer(copycatConfig.getDefaultSerializer())
                              .withDefaultExecutor(copycatConfig.getDefaultExecutor()));
                  return db;
                })
            .collect(Collectors.toList());

    partitionedDatabase = new PartitionedDatabase("onos-store", partitions);

    CompletableFuture<Void> status =
        coordinator
            .open()
            .thenCompose(
                v ->
                    CompletableFuture.allOf(inMemoryDatabase.open(), partitionedDatabase.open())
                        .whenComplete(
                            (db, error) -> {
                              if (error != null) {
                                log.error("Failed to initialize database.", error);
                              } else {
                                log.info("Successfully initialized database.");
                              }
                            }));

    Futures.getUnchecked(status);

    transactionManager = new TransactionManager(partitionedDatabase, consistentMapBuilder());
    partitionedDatabase.setTransactionManager(transactionManager);

    eventDispatcher =
        Executors.newSingleThreadExecutor(
            groupedThreads("onos/store/manager", "map-event-dispatcher"));

    queuePollExecutor =
        Executors.newFixedThreadPool(4, groupedThreads("onos/store/manager", "queue-poll-handler"));

    clusterCommunicator.<String>addSubscriber(
        QUEUE_UPDATED_TOPIC,
        data -> new String(data, Charsets.UTF_8),
        name -> {
          DefaultDistributedQueue q = queues.get(name);
          if (q != null) {
            q.tryPoll();
          }
        },
        queuePollExecutor);
    log.info("Started");
  }
Esempio n. 30
0
  private BLANCResult scoreSets(
      final Iterable<Set<Object>> predicted, final Iterable<Set<Object>> gold) {

    final Multimap<Object, Set<Object>> predictedItemToGroup =
        CollectionUtils.makeSetElementsToContainersMultimap(predicted);
    final Multimap<Object, Set<Object>> goldItemToGroup =
        CollectionUtils.makeSetElementsToContainersMultimap(gold);

    final Set<Object> keyItems = goldItemToGroup.keySet();
    final Set<Object> responseItems = predictedItemToGroup.keySet();
    final ImmutableSet<Object> itemsInBoth =
        Sets.intersection(keyItems, responseItems).immutableCopy();

    // |C_k \cap C_r|
    int corefLinksInBoth = 0;
    // |C_k|
    int corefLinksInKey = 0;
    // |C_r|
    int corefLinksInResponse = 0;
    // |N_K \cap N_r|
    int nonCorefInBoth = 0;
    // |N_k|
    int nonCorefLinksInKey = 0;
    // |N_r|
    int nonCorefLinksInResponse = 0;

    final Set<Object> allItems = Sets.union(responseItems, keyItems).immutableCopy();

    for (final Object item : allItems) {
      final boolean inKey = keyItems.contains(item);
      final boolean inResponse = responseItems.contains(item);

      final Collection<Set<Object>> predictedClusters = predictedItemToGroup.get(item);
      final Collection<Set<Object>> goldClusters = goldItemToGroup.get(item);

      final Predicate<Object> SELF_ADJUSTMENT_FILTER;
      if (useSelfEdges) {
        SELF_ADJUSTMENT_FILTER = Predicates.alwaysTrue();
      } else {
        SELF_ADJUSTMENT_FILTER = not(equalTo(item));
      }
      final int selfAdjustment = useSelfEdges ? 0 : -1;

      final ImmutableSet<Object> predictedNeighbors =
          FluentIterable.from(concat(predictedClusters)).filter(SELF_ADJUSTMENT_FILTER).toSet();
      final ImmutableSet<Object> goldNeighbors =
          FluentIterable.from(concat(goldClusters)).filter(SELF_ADJUSTMENT_FILTER).toSet();

      // The contribution for this item is the size of the intersection
      // of the gold and predicted neighbor sets.
      corefLinksInBoth += Sets.intersection(predictedNeighbors, goldNeighbors).size();
      corefLinksInResponse += predictedNeighbors.size();
      corefLinksInKey += goldNeighbors.size();
      if (inKey) {
        nonCorefLinksInKey += keyItems.size() - goldNeighbors.size() + selfAdjustment;
      }

      if (inResponse) {
        nonCorefLinksInResponse +=
            responseItems.size() - predictedNeighbors.size() + selfAdjustment;
      }

      if (inKey && inResponse) {
        final ImmutableSet<Object> neighborsInEither =
            Sets.union(predictedNeighbors, goldNeighbors).immutableCopy();
        // -1 = don't count this item itself as a link
        nonCorefInBoth += Sets.difference(itemsInBoth, neighborsInEither).size() + selfAdjustment;
      }
    }

    return BLANCResult.fromSetCounts(
        keyItems.equals(responseItems),
        corefLinksInBoth,
        corefLinksInKey,
        corefLinksInResponse,
        nonCorefInBoth,
        nonCorefLinksInKey,
        nonCorefLinksInResponse);
  }