@Test(enabled = true, dependsOnMethods = "testTemplateMatch")
  public void testCreateTwoNodesWithRunScript() throws Exception {
    try {
      client.destroyNodesMatching(NodePredicates.withTag(tag));
    } catch (HttpResponseException e) {
      // TODO hosting.com throws 400 when we try to delete a vApp
    } catch (NoSuchElementException e) {

    }
    template = buildTemplate(client.templateBuilder());

    template
        .getOptions()
        .installPrivateKey(keyPair.get("private"))
        .authorizePublicKey(keyPair.get("public"))
        .runScript(buildScript(template.getImage().getOsFamily()).getBytes());
    try {
      nodes = Sets.newTreeSet(client.runNodesWithTag(tag, 2, template));
    } catch (RunNodesException e) {
      nodes = Sets.newTreeSet(Iterables.concat(e.getSuccessfulNodes(), e.getNodeErrors().keySet()));
      throw e;
    }
    assertEquals(nodes.size(), 2);
    checkNodes(nodes, tag);
    NodeMetadata node1 = nodes.first();
    NodeMetadata node2 = nodes.last();
    // credentials aren't always the same
    // assertEquals(node1.getCredentials(), node2.getCredentials());

    assertLocationSameOrChild(node1.getLocation(), template.getLocation());
    assertLocationSameOrChild(node2.getLocation(), template.getLocation());

    assertEquals(node1.getImage(), template.getImage());
    assertEquals(node2.getImage(), template.getImage());
  }
 private static int fingerprint(
     final NetworkInfoSource source,
     final List<com.eucalyptus.cluster.Cluster> clusters,
     final Set<String> dirtyPublicAddresses,
     final String networkConfiguration) {
   final HashFunction hashFunction = goodFastHash(32);
   final Hasher hasher = hashFunction.newHasher();
   final Funnel<VersionedNetworkView> versionedItemFunnel =
       new Funnel<VersionedNetworkView>() {
         @Override
         public void funnel(final VersionedNetworkView o, final PrimitiveSink primitiveSink) {
           primitiveSink.putString(o.getId(), StandardCharsets.UTF_8);
           primitiveSink.putChar('=');
           primitiveSink.putInt(o.getVersion());
         }
       };
   for (final Map.Entry<String, Iterable<? extends VersionedNetworkView>> entry :
       source.getView().entrySet()) {
     hasher.putString(entry.getKey(), StandardCharsets.UTF_8);
     for (final VersionedNetworkView item : entry.getValue()) {
       hasher.putObject(item, versionedItemFunnel);
     }
   }
   hasher.putString(
       Joiner.on(',').join(Sets.newTreeSet(Iterables.transform(clusters, HasName.GET_NAME))),
       StandardCharsets.UTF_8);
   hasher.putString(
       Joiner.on(',').join(Sets.newTreeSet(dirtyPublicAddresses)), StandardCharsets.UTF_8);
   hasher.putInt(networkConfiguration.hashCode());
   return hasher.hash().asInt();
 }
  public ViewStatusCalculationWorker(
      final ToolContext toolContext,
      UniqueId portfolioId,
      final ViewStatusOption option,
      final ExecutorService executorService) {
    ArgumentChecker.notNull(toolContext, "toolContex");
    ArgumentChecker.notNull(portfolioId, "portfolioId");
    ArgumentChecker.notNull(option, "option");
    ArgumentChecker.notNull(option.getUser(), "option.user");
    ArgumentChecker.notNull(option.getMarketDataSpecification(), "option.marketDataSpecification");
    ArgumentChecker.notNull(executorService, "executorService");

    validateComponentsInToolContext(toolContext);
    _portfolioId = portfolioId;
    _user = option.getUser();
    _marketDataSpecification = option.getMarketDataSpecification();
    Map<String, Collection<String>> valueRequirementBySecType =
        scanValueRequirementBySecType(portfolioId, toolContext);
    if (s_logger.isDebugEnabled()) {
      StringBuilder strBuf = new StringBuilder();
      for (String securityType : Sets.newTreeSet(valueRequirementBySecType.keySet())) {
        Set<String> valueNames = Sets.newTreeSet(valueRequirementBySecType.get(securityType));
        strBuf.append(
            String.format(
                "%s\t%s\n", StringUtils.rightPad(securityType, 40), valueNames.toString()));
      }
      s_logger.debug("\n{}\n", strBuf.toString());
    }
    _toolContext = toolContext;
    _executor = executorService;
    _valueRequirementBySecType = valueRequirementBySecType;
  }
Esempio n. 4
0
 private static class PushedData {
   volatile boolean idleExitSent = false;
   // guarded by idleMutex.
   final SortedSet<Integer> pushAdds = Sets.<Integer>newTreeSet();
   // guarded by idleMutex.
   final SortedSet<Integer> pushRemoves = Sets.<Integer>newTreeSet();
 }
  @Test
  void testDescribeAddresses() {
    for (String region : ec2Api.getConfiguredRegions()) {
      SortedSet<PublicIpInstanceIdPair> allResults =
          Sets.newTreeSet(client.describeAddressesInRegion(region));
      assertNotNull(allResults);
      if (!allResults.isEmpty()) {
        PublicIpInstanceIdPair pair = allResults.last();
        SortedSet<PublicIpInstanceIdPair> result =
            Sets.newTreeSet(client.describeAddressesInRegion(region, pair.getPublicIp()));
        assertNotNull(result);
        PublicIpInstanceIdPair compare = result.last();
        assertEquals(compare, pair);

        SortedSet<PublicIpInstanceIdPair> filterResult =
            Sets.newTreeSet(
                client.describeAddressesInRegionWithFilter(
                    region,
                    ImmutableMultimap.<String, String>builder()
                        .put("public-ip", pair.getPublicIp())
                        .build()));
        assertNotNull(filterResult);
        PublicIpInstanceIdPair filterCompare = filterResult.last();
        assertEquals(filterCompare, pair);
      }
    }
  }
Esempio n. 6
0
 @Test
 void testDescribeKeyPairs() {
   for (String region : ec2Api.getConfiguredRegions()) {
     SortedSet<KeyPair> allResults = Sets.newTreeSet(client.describeKeyPairsInRegion(region));
     assertNotNull(allResults);
     if (!allResults.isEmpty()) {
       KeyPair pair = allResults.last();
       SortedSet<KeyPair> result =
           Sets.newTreeSet(client.describeKeyPairsInRegion(region, pair.getKeyName()));
       assertNotNull(result);
       KeyPair compare = result.last();
       assertEquals(compare, pair);
     }
   }
 }
Esempio n. 7
0
  @Override
  public Job addInputPaths(HadoopDruidIndexerConfig config, Job job) throws IOException {
    final Set<Interval> intervals = Sets.newTreeSet(Comparators.intervals());
    Optional<Set<Interval>> optionalIntervals = config.getSegmentGranularIntervals();
    if (optionalIntervals.isPresent()) {
      for (Interval segmentInterval : optionalIntervals.get()) {
        for (Interval dataInterval : dataGranularity.getIterable(segmentInterval)) {
          intervals.add(dataInterval);
        }
      }
    }

    Path betaInput = new Path(inputPath);
    FileSystem fs = betaInput.getFileSystem(job.getConfiguration());
    Set<String> paths = Sets.newTreeSet();
    Pattern fileMatcher = Pattern.compile(filePattern);

    DateTimeFormatter customFormatter = null;
    if (pathFormat != null) {
      customFormatter = DateTimeFormat.forPattern(pathFormat);
    }

    for (Interval interval : intervals) {
      DateTime t = interval.getStart();
      String intervalPath = null;
      if (customFormatter != null) {
        intervalPath = customFormatter.print(t);
      } else {
        intervalPath = dataGranularity.toPath(t);
      }

      Path granularPath = new Path(betaInput, intervalPath);
      log.info("Checking path[%s]", granularPath);
      for (FileStatus status : FSSpideringIterator.spiderIterable(fs, granularPath)) {
        final Path filePath = status.getPath();
        if (fileMatcher.matcher(filePath.toString()).matches()) {
          paths.add(filePath.toString());
        }
      }
    }

    for (String path : paths) {
      log.info("Appending path[%s]", path);
      FileInputFormat.addInputPath(job, new Path(path));
    }

    return job;
  }
/**
 * Collection of messages.
 *
 * @author Oliver Burn
 * @version 1.0
 */
public final class LocalizedMessages {
  /** contains the messages logged * */
  private final TreeSet<LocalizedMessage> mMessages = Sets.newTreeSet();

  /** @return the logged messages * */
  public TreeSet<LocalizedMessage> getMessages() {
    return Sets.newTreeSet(mMessages);
  }

  /** Reset the object. * */
  public void reset() {
    mMessages.clear();
  }

  /**
   * Logs a message to be reported.
   *
   * @param aMsg the message to log
   */
  public void add(LocalizedMessage aMsg) {
    mMessages.add(aMsg);
  }

  /** @return the number of messages */
  public int size() {
    return mMessages.size();
  }
}
 public SortedSet<Locomotive> getAllLocomotives() {
   final SortedSet<Locomotive> allLocomotives = Sets.newTreeSet();
   for (final LocomotiveGroup locomotiveGroup : updatedLocomotiveGroups) {
     allLocomotives.addAll(locomotiveGroup.getLocomotives());
   }
   return allLocomotives;
 }
 @Test
 void testDescribeAddresses() {
   for (String region : ec2Api.getConfiguredRegions()) {
     SortedSet<PublicIpInstanceIdPair> allResults =
         Sets.newTreeSet(client.describeAddressesInRegion(region));
     assertNotNull(allResults);
     if (allResults.size() >= 1) {
       PublicIpInstanceIdPair pair = allResults.last();
       SortedSet<PublicIpInstanceIdPair> result =
           Sets.newTreeSet(client.describeAddressesInRegion(region, pair.getPublicIp()));
       assertNotNull(result);
       PublicIpInstanceIdPair compare = result.last();
       assertEquals(compare, pair);
     }
   }
 }
Esempio n. 11
0
 public void testNewTreeSetEmptyDerived() {
   TreeSet<Derived> set = Sets.newTreeSet();
   assertTrue(set.isEmpty());
   set.add(new Derived("foo"));
   set.add(new Derived("bar"));
   assertThat(set).containsExactly(new Derived("bar"), new Derived("foo")).inOrder();
 }
Esempio n. 12
0
  @Test
  public void testBucketMonthComparator() throws Exception {
    DataSegment[] sortedOrder = {
      makeDataSegment("test1", "2011-01-01/2011-01-02", "a"),
      makeDataSegment("test1", "2011-01-02/2011-01-03", "a"),
      makeDataSegment("test1", "2011-01-02/2011-01-03", "b"),
      makeDataSegment("test2", "2011-01-01/2011-01-02", "a"),
      makeDataSegment("test2", "2011-01-02/2011-01-03", "a"),
      makeDataSegment("test1", "2011-02-01/2011-02-02", "a"),
      makeDataSegment("test1", "2011-02-02/2011-02-03", "a"),
      makeDataSegment("test1", "2011-02-02/2011-02-03", "b"),
      makeDataSegment("test2", "2011-02-01/2011-02-02", "a"),
      makeDataSegment("test2", "2011-02-02/2011-02-03", "a"),
    };

    List<DataSegment> shuffled = Lists.newArrayList(sortedOrder);
    Collections.shuffle(shuffled);

    Set<DataSegment> theSet = Sets.newTreeSet(DataSegment.bucketMonthComparator());
    theSet.addAll(shuffled);

    int index = 0;
    for (DataSegment dataSegment : theSet) {
      Assert.assertEquals(sortedOrder[index], dataSegment);
      ++index;
    }
  }
Esempio n. 13
0
 private String typeToString() {
   switch (mask) {
     case BOTTOM_MASK:
     case TOP_MASK:
     case UNKNOWN_MASK:
       return tagToString(mask, null, null);
     default:
       int tags = mask;
       Set<String> types = Sets.newTreeSet();
       for (int mask = 1; mask != END_MASK; mask <<= 1) {
         if ((tags & mask) != 0) {
           types.add(tagToString(mask, objs, typeVar));
           tags = tags & ~mask; // Remove current mask from union
         }
       }
       if (tags == 0) { // Found all types in the union
         return Joiner.on("|").join(types);
       } else if (tags == TRUTHY_MASK) {
         return "truthy";
       } else if (tags == FALSY_MASK) {
         return "falsy";
       } else {
         return "Unrecognized type: " + tags;
       }
   }
 }
Esempio n. 14
0
  @Test
  public void testAddListener() throws Exception {
    final DebugChecker checker = new DebugChecker();
    final DebugAuditAdapter auditAdapter = new DebugAuditAdapter();
    checker.addListener(auditAdapter);

    // Let's try fire some events
    checker.fireAuditStarted();
    assertTrue("Checker.fireAuditStarted() doesn't call listener", auditAdapter.wasCalled());

    auditAdapter.resetListener();
    checker.fireAuditFinished();
    assertTrue("Checker.fireAuditFinished() doesn't call listener", auditAdapter.wasCalled());

    auditAdapter.resetListener();
    checker.fireFileStarted("Some File Name");
    assertTrue("Checker.fireFileStarted() doesn't call listener", auditAdapter.wasCalled());

    auditAdapter.resetListener();
    checker.fireFileFinished("Some File Name");
    assertTrue("Checker.fireFileFinished() doesn't call listener", auditAdapter.wasCalled());

    auditAdapter.resetListener();
    final SortedSet<LocalizedMessage> messages = Sets.newTreeSet();
    messages.add(
        new LocalizedMessage(
            0, 0, "a Bundle", "message.key", new Object[] {"arg"}, null, getClass(), null));
    checker.fireErrors("Some File Name", messages);
    assertTrue("Checker.fireErrors() doesn't call listener", auditAdapter.wasCalled());
  }
Esempio n. 15
0
  /** Constructor. */
  public WaveletContainerImpl(WaveletName waveletName) {
    this.waveletName = waveletName;
    waveletData = new WaveletDataImpl(waveletName.waveId, waveletName.waveletId);
    currentVersion = HASHED_HISTORY_VERSION_FACTORY.createVersionZero(waveletName);
    lastCommittedVersion = null;

    appliedDeltas = Sets.newTreeSet(appliedDeltaComparator);
    transformedDeltas = Sets.newTreeSet(transformedDeltaComparator);
    deserializedTransformedDeltas = Sets.newTreeSet(deserializedDeltaComparator);

    // Configure the locks used by this Wavelet.
    final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
    readLock = readWriteLock.readLock();
    writeLock = readWriteLock.writeLock();
    state = State.OK;
  }
Esempio n. 16
0
  /**
   * Merges service properties from parent into the the service properties of this this service.
   * Current properties overrides properties with same name from parent
   *
   * @param other service properties to merge with the current service property list
   */
  private void mergeServiceProperties(List<ServicePropertyInfo> other) {
    if (!other.isEmpty()) {
      List<ServicePropertyInfo> servicePropertyList = serviceInfo.getServicePropertyList();
      List<ServicePropertyInfo> servicePropertiesToAdd = Lists.newArrayList();

      Set<String> servicePropertyNames =
          Sets.newTreeSet(
              Iterables.transform(
                  servicePropertyList,
                  new Function<ServicePropertyInfo, String>() {
                    @Nullable
                    @Override
                    public String apply(ServicePropertyInfo serviceProperty) {
                      return serviceProperty.getName();
                    }
                  }));

      for (ServicePropertyInfo otherServiceProperty : other) {
        if (!servicePropertyNames.contains(otherServiceProperty.getName()))
          servicePropertiesToAdd.add(otherServiceProperty);
      }

      List<ServicePropertyInfo> mergedServicePropertyList =
          ImmutableList.<ServicePropertyInfo>builder()
              .addAll(servicePropertyList)
              .addAll(servicePropertiesToAdd)
              .build();

      serviceInfo.setServicePropertyList(mergedServicePropertyList);

      validateServiceInfo();
    }
  }
Esempio n. 17
0
  /**
   * Prints out the results of the given tests, and returns true if they all passed. Posts any
   * targets which weren't already completed by the listener to the EventBus. Reports all targets on
   * the console via the given notifier. Run at the end of the build, run only once.
   *
   * @param testTargets The list of targets being run
   * @param listener An aggregating listener with intermediate results
   * @param notifier A console notifier to echo results to.
   * @return true if all the tests passed, else false
   */
  public boolean differentialAnalyzeAndReport(
      Collection<ConfiguredTarget> testTargets,
      AggregatingTestListener listener,
      TestResultNotifier notifier) {

    Preconditions.checkNotNull(testTargets);
    Preconditions.checkNotNull(listener);
    Preconditions.checkNotNull(notifier);

    // The natural ordering of the summaries defines their output order.
    Set<TestSummary> summaries = Sets.newTreeSet();

    int totalRun = 0; // Number of targets running at least one non-cached test.
    int passCount = 0;

    for (ConfiguredTarget testTarget : testTargets) {
      TestSummary summary = aggregateAndReportSummary(testTarget, listener).build();
      summaries.add(summary);

      // Finished aggregating; build the final console output.
      if (summary.actionRan()) {
        totalRun++;
      }

      if (TestResult.isBlazeTestStatusPassed(summary.getStatus())) {
        passCount++;
      }
    }

    Preconditions.checkState(summaries.size() == testTargets.size());

    notifier.notify(summaries, totalRun);
    return passCount == testTargets.size();
  }
  public PageSet<ObjectInfo> apply(InputStream stream) {
    checkState(args != null, "request should be initialized at this point");
    Type listType = new TypeToken<SortedSet<ObjectInfoImpl>>() {}.getType();

    try {
      SortedSet<ObjectInfoImpl> list = apply(stream, listType);
      SortedSet<ObjectInfo> returnVal =
          Sets.newTreeSet(
              Iterables.transform(
                  list,
                  new Function<ObjectInfoImpl, ObjectInfo>() {
                    public ObjectInfo apply(ObjectInfoImpl from) {
                      return from.toBuilder()
                          .container(container)
                          .uri(
                              uriBuilder(request.getEndpoint())
                                  .clearQuery()
                                  .appendPath(from.getName())
                                  .build())
                          .build();
                    }
                  }));
      boolean truncated = options.getMaxResults() == returnVal.size();
      String marker = truncated ? returnVal.last().getName() : null;
      return new PageSetImpl<ObjectInfo>(returnVal, marker);
    } catch (IOException e) {
      throw new RuntimeException("problem reading response from request: " + request, e);
    }
  }
  private void printImports(CompilationUnit node) {
    ImplementationImportCollector collector = new ImplementationImportCollector();
    collector.collect(node, getSourceFileName());
    Set<Import> imports = collector.getImports();

    if (!imports.isEmpty()) {
      Set<String> includeStmts = Sets.newTreeSet();
      for (Import imp : imports) {
        includeStmts.add(String.format("#include \"%s.h\"", imp.getImportFileName()));
      }
      for (String stmt : includeStmts) {
        println(stmt);
      }

      // Print native includes.
      int endOfImportText =
          node.types().isEmpty()
              ? node.getLength()
              : ((ASTNode) node.types().get(0)).getStartPosition();
      for (Comment c : ASTUtil.getCommentList(node)) {
        int start = c.getStartPosition();
        if (start >= endOfImportText) {
          break;
        }
        if (c instanceof BlockComment) {
          String nativeImport = extractNativeCode(start, c.getLength(), true);
          if (nativeImport != null) { // if it has a JSNI section
            println(nativeImport.trim());
          }
        }
      }

      newline();
    }
  }
  /**
   * Adds to the operations list all operations required to mutate {@code originalNode} to the state
   * of {@code newNode}.
   *
   * <p>The returned set of child product dimensions will only <em>potentially</em> be non-empty if
   * both {@code originalNode != null} and {@code newNode != null}.
   *
   * @param originalNode may be null
   * @param newNode may be null
   * @param ops the operations list to add to
   * @return the set of child product dimensions that require further processing
   */
  private Set<ProductDimension> addMutateOperations(
      @Nullable ProductPartitionNode originalNode,
      @Nullable ProductPartitionNode newNode,
      List<OperationPair> ops) {
    Set<ProductDimension> childDimensionsToProcess = Sets.newTreeSet(dimensionComparator);

    NodeDifference nodeDifference =
        ProductPartitionNodeDiffer.diff(originalNode, newNode, dimensionComparator);
    boolean isProcessChildren;
    switch (nodeDifference) {
      case NEW_NODE:
        ops.addAll(createAddOperations(newNode));
        // No need to further process children. The ADD operations above will include operations
        // for all children of newNode.
        isProcessChildren = false;
        break;
      case REMOVED_NODE:
        ops.add(createRemoveOperation(originalNode));
        // No need to further process children. The REMOVE operation above will perform a
        // cascading delete of all children of newNode.
        isProcessChildren = false;
        break;
      case PARTITION_TYPE_CHANGE:
      case EXCLUDED_UNIT_CHANGE:
        ops.add(createRemoveOperation(originalNode));
        ops.addAll(createAddOperations(newNode));
        // No need to further process children. The ADD operations above will include operations
        // for all children of newNode.
        isProcessChildren = false;
        break;
      case BID_CHANGE:
        // Ensure that the new node has the proper ID (this may have been lost if the node
        // was removed and then re-added).
        newNode = newNode.setProductPartitionId(originalNode.getProductPartitionId());
        ops.add(createSetBidOperation(newNode));
        // Process the children of newNode. The SET operation above will only handle changes
        // made to newNode, not its children.
        isProcessChildren = true;
        break;
      case NONE:
        // Ensure that the new node has the proper ID (this may have been lost if the node
        // was removed and then re-added).
        newNode = newNode.setProductPartitionId(originalNode.getProductPartitionId());
        // This node does not have changes, but its children may.
        isProcessChildren = true;
        break;
      default:
        throw new IllegalStateException("Unrecognized difference: " + nodeDifference);
    }

    if (isProcessChildren) {
      for (ProductPartitionNode childNode :
          Iterables.concat(originalNode.getChildren(), newNode.getChildren())) {
        childDimensionsToProcess.add(childNode.getDimension());
      }
    }

    return childDimensionsToProcess;
  }
Esempio n. 21
0
 @Test(expectedExceptions = AWSResponseException.class)
 void testDescribeKeyPairsWithInvalidFilter() {
   for (String region : ec2Api.getConfiguredRegions()) {
     SortedSet<KeyPair> allResults = Sets.newTreeSet(client.describeKeyPairsInRegion(region));
     assertNotNull(allResults);
     if (!allResults.isEmpty()) {
       KeyPair pair = allResults.last();
       SortedSet<KeyPair> result =
           Sets.newTreeSet(
               client.describeKeyPairsInRegionWithFilter(
                   region,
                   ImmutableMultimap.<String, String>builder()
                       .put("invalid-filter", pair.getKeyName())
                       .build()));
     }
   }
 }
 private Set<IMethodName> rebaseMethodCalls(final ObjectUsage obj) {
   final Set<IMethodName> rebasedMethodCalls = Sets.newTreeSet();
   for (final IMethodName method : obj.calls) {
     final VmMethodName rebased = VmMethodName.rebase(obj.type, method);
     rebasedMethodCalls.add(rebased);
   }
   return rebasedMethodCalls;
 }
Esempio n. 23
0
 public void testNewTreeSetFromIterableNonGeneric() {
   Iterable<LegacyComparable> iterable =
       Arrays.asList(new LegacyComparable("foo"), new LegacyComparable("bar"));
   TreeSet<LegacyComparable> set = Sets.newTreeSet(iterable);
   assertThat(set)
       .containsExactly(new LegacyComparable("bar"), new LegacyComparable("foo"))
       .inOrder();
 }
Esempio n. 24
0
  /**
   * Return a manifest of what finalized edit logs are available. All available edit logs are
   * returned starting from the transaction id passed.
   *
   * @param fromTxId Starting transaction id to read the logs.
   * @return RemoteEditLogManifest object.
   */
  public synchronized RemoteEditLogManifest getEditLogManifest(long fromTxId) {
    // Collect RemoteEditLogs available from each FileJournalManager
    List<RemoteEditLog> allLogs = Lists.newArrayList();
    for (JournalAndStream j : journals) {
      if (j.getManager() instanceof FileJournalManager) {
        FileJournalManager fjm = (FileJournalManager) j.getManager();
        try {
          allLogs.addAll(fjm.getRemoteEditLogs(fromTxId));
        } catch (Throwable t) {
          LOG.warn("Cannot list edit logs in " + fjm, t);
        }
      }
    }

    // Group logs by their starting txid
    ImmutableListMultimap<Long, RemoteEditLog> logsByStartTxId =
        Multimaps.index(allLogs, RemoteEditLog.GET_START_TXID);
    long curStartTxId = fromTxId;

    List<RemoteEditLog> logs = Lists.newArrayList();
    while (true) {
      ImmutableList<RemoteEditLog> logGroup = logsByStartTxId.get(curStartTxId);
      if (logGroup.isEmpty()) {
        // we have a gap in logs - for example because we recovered some old
        // storage directory with ancient logs. Clear out any logs we've
        // accumulated so far, and then skip to the next segment of logs
        // after the gap.
        SortedSet<Long> startTxIds = Sets.newTreeSet(logsByStartTxId.keySet());
        startTxIds = startTxIds.tailSet(curStartTxId);
        if (startTxIds.isEmpty()) {
          break;
        } else {
          if (LOG.isDebugEnabled()) {
            LOG.debug(
                "Found gap in logs at "
                    + curStartTxId
                    + ": "
                    + "not returning previous logs in manifest.");
          }
          logs.clear();
          curStartTxId = startTxIds.first();
          continue;
        }
      }

      // Find the one that extends the farthest forward
      RemoteEditLog bestLog = Collections.max(logGroup);
      logs.add(bestLog);
      // And then start looking from after that point
      curStartTxId = bestLog.getEndTxId() + 1;
    }
    RemoteEditLogManifest ret = new RemoteEditLogManifest(logs);

    if (LOG.isDebugEnabled()) {
      LOG.debug("Generated manifest for logs since " + fromTxId + ":" + ret);
    }
    return ret;
  }
Esempio n. 25
0
  public int createIntellijProject(
      File jsonTempFile,
      ProcessExecutor processExecutor,
      boolean generateMinimalProject,
      PrintStream stdOut,
      PrintStream stdErr)
      throws IOException {
    List<Module> modules = createModulesForProjectConfigs();
    writeJsonConfig(jsonTempFile, modules);

    List<String> modifiedFiles = Lists.newArrayList();

    // Process the JSON config to generate the .xml and .iml files for IntelliJ.
    ExitCodeAndOutput result = processJsonConfig(jsonTempFile, generateMinimalProject);
    if (result.exitCode != 0) {
      return result.exitCode;
    } else {
      // intellij.py writes the list of modified files to stdout, so parse stdout and add the
      // resulting file paths to the modifiedFiles list.
      Iterable<String> paths =
          Splitter.on('\n').trimResults().omitEmptyStrings().split(result.stdOut);
      Iterables.addAll(modifiedFiles, paths);
    }

    // Write out the project.properties files.
    List<String> modifiedPropertiesFiles = generateProjectDotPropertiesFiles(modules);
    modifiedFiles.addAll(modifiedPropertiesFiles);

    // Write out the .idea/compiler.xml file (the .idea/ directory is guaranteed to exist).
    CompilerXml compilerXml = new CompilerXml(modules);
    final String pathToCompilerXml = ".idea/compiler.xml";
    File compilerXmlFile = projectFilesystem.getFileForRelativePath(pathToCompilerXml);
    if (compilerXml.write(compilerXmlFile)) {
      modifiedFiles.add(pathToCompilerXml);
    }

    // If the user specified a post-processing script, then run it.
    if (pathToPostProcessScript.isPresent()) {
      String pathToScript = pathToPostProcessScript.get();
      Process process = Runtime.getRuntime().exec(new String[] {pathToScript});
      ProcessExecutor.Result postProcessResult = processExecutor.execute(process);
      int postProcessExitCode = postProcessResult.getExitCode();
      if (postProcessExitCode != 0) {
        return postProcessExitCode;
      }
    }

    // If any files have been modified by `buck project`, then list them for the user.
    if (!modifiedFiles.isEmpty()) {
      SortedSet<String> modifiedFilesInSortedForder = Sets.newTreeSet(modifiedFiles);
      stdOut.printf("MODIFIED FILES:\n%s\n", Joiner.on('\n').join(modifiedFilesInSortedForder));
    }
    // Blit stderr from intellij.py to parent stderr.
    stdErr.print(result.stdErr);

    return 0;
  }
Esempio n. 26
0
 private String formatDeleteEquals(Connection connection, String separator) throws SQLException {
   // sorting is necessary to ensure keys and values match up
   SortedSet<String> keys = Sets.newTreeSet();
   String[] primaryKeys = getPrimaryKeyColumns(connection);
   for (String key : primaryKeys) {
     keys.add(key + " = ?");
   }
   return StringUtilities.implodeItems(Lists.newArrayList(keys), separator);
 }
Esempio n. 27
0
 public void testNewTreeSetEmptyNonGeneric() {
   TreeSet<LegacyComparable> set = Sets.newTreeSet();
   assertTrue(set.isEmpty());
   set.add(new LegacyComparable("foo"));
   set.add(new LegacyComparable("bar"));
   assertThat(set)
       .containsExactly(new LegacyComparable("bar"), new LegacyComparable("foo"))
       .inOrder();
 }
 public static <T> SortedSet<T> sortTopN(Iterable<T> iterable, int n, Comparator<T> comparator) {
   TreeSet<T> r = Sets.newTreeSet(comparator);
   for (T t : iterable) {
     r.add(t);
     if (r.size() > n) {
       r.pollLast();
     }
   }
   return r;
 }
 @Override
 public void initialize(UimaContext ctx) throws ResourceInitializationException {
   super.initialize(ctx);
   try {
     out = IoUtils.openPrintWriter(outputFile);
     outEntries = Sets.newTreeSet();
   } catch (IOException e) {
     throw new ResourceInitializationException(e);
   }
 }
 @Test(enabled = true, dependsOnMethods = "testCreateTwoNodesWithRunScript")
 public void testCreateAnotherNodeWithANewContextToEnsureSharedMemIsntRequired() throws Exception {
   initializeContextAndClient();
   TreeSet<NodeMetadata> nodes = Sets.newTreeSet(client.runNodesWithTag(tag, 1, template));
   checkNodes(nodes, tag);
   NodeMetadata node = nodes.first();
   this.nodes.add(node);
   assertEquals(nodes.size(), 1);
   assertLocationSameOrChild(node.getLocation(), template.getLocation());
   assertEquals(node.getImage(), template.getImage());
 }