示例#1
0
  @NotNull
  private static Set<DeclarationDescriptor> getTopLevelDescriptorsByFqName(
      @NotNull KotlinCodeAnalyzer resolveSession, @NotNull FqName fqName) {
    FqName parentFqName = fqName.parent();

    Set<DeclarationDescriptor> descriptors = new HashSet<DeclarationDescriptor>();

    LazyPackageDescriptor parentFragment = resolveSession.getPackageFragment(parentFqName);
    if (parentFragment != null) {
      // Filter out extension properties
      descriptors.addAll(
          KotlinPackage.filter(
              parentFragment.getMemberScope().getProperties(fqName.shortName()),
              new Function1<VariableDescriptor, Boolean>() {
                @Override
                public Boolean invoke(VariableDescriptor descriptor) {
                  return descriptor.getReceiverParameter() == null;
                }
              }));
    }

    ContainerUtil.addIfNotNull(descriptors, resolveSession.getPackageFragment(fqName));

    descriptors.addAll(resolveSession.getTopLevelClassDescriptors(fqName));
    return descriptors;
  }
  private Throwable checkpoint(Throwable accumulate) {
    if (logger.isDebugEnabled())
      logger.debug("Checkpointing update:{}, obsolete:{}", staged.update, staged.obsolete);

    if (staged.isEmpty()) return accumulate;

    Set<SSTableReader> toUpdate = toUpdate();
    Set<SSTableReader> fresh = copyOf(fresh());

    // check the current versions of the readers we're replacing haven't somehow been replaced by
    // someone else
    checkNotReplaced(filterIn(toUpdate, staged.update));

    // ensure any new readers are in the compacting set, since we aren't done with them yet
    // and don't want anyone else messing with them
    // apply atomically along with updating the live set of readers
    tracker.apply(
        compose(updateCompacting(emptySet(), fresh), updateLiveSet(toUpdate, staged.update)));

    // log the staged changes and our newly marked readers
    marked.addAll(fresh);
    logged.log(staged);

    // setup our tracker, and mark our prior versions replaced, also releasing our references to
    // them
    // we do not replace/release obsoleted readers, since we may need to restore them on rollback
    accumulate = setReplaced(filterOut(toUpdate, staged.obsolete), accumulate);
    accumulate = release(selfRefs(filterOut(toUpdate, staged.obsolete)), accumulate);

    staged.clear();
    return accumulate;
  }
示例#3
0
 private void addMergedValues(@NotNull MergeInstruction instruction) {
   Set<PseudoValue> result = new LinkedHashSet<PseudoValue>();
   for (PseudoValue value : instruction.getInputValues()) {
     result.addAll(getMergedValues(value));
     result.add(value);
   }
   mergedValues.put(instruction.getOutputValue(), result);
 }
  public Collection<InetAddress> pendingEndpointsFor(Token token, String keyspaceName) {
    Map<Range<Token>, Collection<InetAddress>> ranges = getPendingRanges(keyspaceName);
    if (ranges.isEmpty()) return Collections.emptyList();

    Set<InetAddress> endpoints = new HashSet<InetAddress>();
    for (Map.Entry<Range<Token>, Collection<InetAddress>> entry : ranges.entrySet()) {
      if (entry.getKey().contains(token)) endpoints.addAll(entry.getValue());
    }

    return endpoints;
  }
示例#5
0
 @NotNull
 private static Set<LocalFunctionDeclarationInstruction> getLocalDeclarations(
     @NotNull Pseudocode pseudocode) {
   Set<LocalFunctionDeclarationInstruction> localDeclarations = Sets.newLinkedHashSet();
   for (Instruction instruction : ((PseudocodeImpl) pseudocode).mutableInstructionList) {
     if (instruction instanceof LocalFunctionDeclarationInstruction) {
       localDeclarations.add((LocalFunctionDeclarationInstruction) instruction);
       localDeclarations.addAll(
           getLocalDeclarations(((LocalFunctionDeclarationInstruction) instruction).getBody()));
     }
   }
   return localDeclarations;
 }
示例#6
0
  /** A recursive routine for collecting unique documents from this cluster and subclusters. */
  private static Set<Document> collectAllDocuments(Cluster cluster, Set<Document> docs) {
    if (cluster == null) {
      return docs;
    }

    docs.addAll(cluster.getDocuments());

    final List<Cluster> subclusters = cluster.getSubclusters();
    for (final Cluster subcluster : subclusters) {
      collectAllDocuments(subcluster, docs);
    }

    return docs;
  }
 /**
  * If no experiments were specified, inject into statisticsQuery a superset of all experiments for
  * which stats exist across all attributes
  *
  * @param statisticsQuery
  * @param statisticsStorage
  */
 private static void setQueryExperiments(
     StatisticsQueryCondition statisticsQuery, StatisticsStorage statisticsStorage) {
   Set<ExperimentInfo> exps = statisticsQuery.getExperiments();
   if (exps
       .isEmpty()) { // No experiments conditions were specified - assemble a superset of all
                     // experiments for which stats exist across all attributes
     for (EfAttribute attr : statisticsQuery.getAttributes()) {
       Map<ExperimentInfo, ConciseSet> expsToStats =
           getStatisticsForAttribute(statisticsQuery.getStatisticsType(), attr, statisticsStorage);
       if (expsToStats != null) exps.addAll(expsToStats.keySet());
     }
     statisticsQuery.inExperiments(exps);
   }
 }
 void log(State staged) {
   update.removeAll(staged.obsolete);
   update.removeAll(staged.update);
   update.addAll(staged.update);
   obsolete.addAll(staged.obsolete);
 }