예제 #1
0
  public static void main(String args[]) {
    Card n = null;
    List<Optional<Card>> board = new ArrayList<Optional<Card>>();
    List<Optional<Card>> holeCards = new ArrayList<Optional<Card>>();
    List<Optional<Card>> opponentHoleCards = new ArrayList<Optional<Card>>();

    holeCards.add(
        Optional.of(new Card(Suit.fromFirstLetterLowerCase("s"), Rank.fromFirstLetter("14"))));
    holeCards.add(
        Optional.of(new Card(Suit.fromFirstLetterLowerCase("c"), Rank.fromFirstLetter("13"))));

    opponentHoleCards.add(
        Optional.of(new Card(Suit.fromFirstLetterLowerCase("d"), Rank.fromFirstLetter("2"))));
    opponentHoleCards.add(
        Optional.of(new Card(Suit.fromFirstLetterLowerCase("h"), Rank.fromFirstLetter("3"))));

    // board.add(Optional.of(new Card( Suit.fromFirstLetterLowerCase("s"),
    // Rank.fromFirstLetter("13"))));
    // board.add(Optional.of(new Card( Suit.fromFirstLetterLowerCase("h"),
    // Rank.fromFirstLetter("2"))));
    // board.add(Optional.of(new Card( Suit.fromFirstLetterLowerCase("h"),
    // Rank.fromFirstLetter("3"))));
    // board.add(Optional.of(new Card( Suit.fromFirstLetterLowerCase("s"),
    // Rank.fromFirstLetter("13"))));
    board.add(Optional.fromNullable(n));
    board.add(Optional.fromNullable(n));
    board.add(Optional.fromNullable(n));
    board.add(Optional.fromNullable(n));
    board.add(Optional.fromNullable(n));

    AIHelper ai = new AIHelper();
    System.out.println(ai.getWinningChance(board, holeCards, opponentHoleCards));
  }
예제 #2
0
  HeliosSoloDeployment(final Builder builder) {
    final String username = Optional.fromNullable(builder.heliosUsername).or(randomString());

    this.dockerClient = checkNotNull(builder.dockerClient, "dockerClient");
    this.dockerHost = Optional.fromNullable(builder.dockerHost).or(DockerHost.fromEnv());
    this.containerDockerHost =
        Optional.fromNullable(builder.containerDockerHost).or(containerDockerHostFromEnv());
    this.namespace = Optional.fromNullable(builder.namespace).or(randomString());
    this.env = containerEnv();
    this.binds = containerBinds();

    final String heliosHost;
    final String heliosPort;
    // TODO(negz): Determine and propagate NetworkManager DNS servers?
    try {
      assertDockerReachableFromContainer();
      if (dockerHost.address().equals("localhost") || dockerHost.address().equals("127.0.0.1")) {
        heliosHost = containerGateway();
      } else {
        heliosHost = dockerHost.address();
      }
      this.heliosContainerId = deploySolo(heliosHost);
      heliosPort = getHostPort(this.heliosContainerId, HELIOS_MASTER_PORT);
    } catch (HeliosDeploymentException e) {
      throw new AssertionError("Unable to deploy helios-solo container.", e);
    }

    // Running the String host:port through HostAndPort does some validation for us.
    this.heliosClient =
        HeliosClient.newBuilder()
            .setUser(username)
            .setEndpoints(
                "http://" + HostAndPort.fromString(dockerHost.address() + ":" + heliosPort))
            .build();
  }
    private ConversionConfig(Config config, Table table, String destinationFormat) {

      Preconditions.checkArgument(
          config.hasPath(DESTINATION_TABLE_KEY),
          String.format("Key %s.%s is not specified", destinationFormat, DESTINATION_TABLE_KEY));
      Preconditions.checkArgument(
          config.hasPath(DESTINATION_DB_KEY),
          String.format("Key %s.%s is not specified", destinationFormat, DESTINATION_DB_KEY));
      Preconditions.checkArgument(
          config.hasPath(DESTINATION_DATA_PATH_KEY),
          String.format(
              "Key %s.%s is not specified", destinationFormat, DESTINATION_DATA_PATH_KEY));

      // Required
      this.destinationFormat = destinationFormat;
      this.destinationTableName = resolveTemplate(config.getString(DESTINATION_TABLE_KEY), table);
      this.destinationStagingTableName =
          String.format(
              "%s_%s", this.destinationTableName, "staging"); // Fixed and non-configurable
      this.destinationDbName = resolveTemplate(config.getString(DESTINATION_DB_KEY), table);
      this.destinationDataPath =
          resolveTemplate(config.getString(DESTINATION_DATA_PATH_KEY), table);

      // Optional
      this.clusterBy = ConfigUtils.getStringList(config, CLUSTER_BY_KEY);
      this.numBuckets = Optional.fromNullable(ConfigUtils.getInt(config, NUM_BUCKETS_KEY, null));
      this.hiveRuntimeProperties =
          ConfigUtils.configToProperties(
              ConfigUtils.getConfig(
                  config, HIVE_RUNTIME_PROPERTIES_KEY_PREFIX, ConfigFactory.empty()));
      this.evolutionEnabled = ConfigUtils.getBoolean(config, EVOLUTION_ENABLED, false);
      this.rowLimit = Optional.fromNullable(ConfigUtils.getInt(config, ROW_LIMIT_KEY, null));
      this.sourceDataPathIdentifier =
          ConfigUtils.getStringList(config, SOURCE_DATA_PATH_IDENTIFIER_KEY);
    }
예제 #4
0
  @Override
  public Optional<ContentHierarchyAndSummaries> process(
      ProgData progData, Channel channel, DateTimeZone zone, Timestamp updatedAt) {
    try {
      log.trace("Channel: {} ProgData: {} UpdatedAt: {}", channel, progData, updatedAt);
      if (shouldNotProcess(progData)) {
        return Optional.absent();
      }

      Optional<Brand> possibleBrand = getBrand(progData, channel, updatedAt);
      Brand brandSummary = null;
      if (possibleBrand.isPresent() && hasBrandSummary(progData)) {
        brandSummary = getBrandSummary(progData, possibleBrand.get(), updatedAt);
      }

      Optional<Series> possibleSeries = getSeries(progData, channel, updatedAt);
      Series seriesSummary = null;
      if (possibleSeries.isPresent() && hasSeriesSummary(progData)) {
        seriesSummary = getSeriesSummary(progData, possibleSeries.get(), updatedAt);
      }

      boolean isEpisode = possibleBrand.isPresent() || possibleSeries.isPresent();

      ItemAndBroadcast itemAndBroadcast =
          isClosedBrand(possibleBrand)
              ? getClosedEpisode(progData, channel, zone, updatedAt)
              : getFilmOrEpisode(progData, channel, zone, isEpisode, updatedAt);

      Item item = itemAndBroadcast.getItem();

      // TODO: there is an unknown bug preventing this from working (MBST-17174)
      if (!isEpisode) {
        item.setParentRef(null);
      }

      item.setGenericDescription(isGenericDescription(progData));
      item.addAlias(PaHelper.getProgIdAlias(progData.getProgId()));
      item.setLastUpdated(updatedAt.toDateTimeUTC());

      return Optional.of(
          new ContentHierarchyAndSummaries(
              possibleBrand,
              possibleSeries,
              item,
              itemAndBroadcast.getBroadcast().requireValue(),
              Optional.fromNullable(brandSummary),
              Optional.fromNullable(seriesSummary)));
    } catch (Exception e) {
      log.error("Failed to process PA programme data", e);
      adapterLog.record(
          new AdapterLogEntry(Severity.ERROR)
              .withCause(e)
              .withSource(PaProgrammeProcessor.class)
              .withDescription(e.getMessage()));
    }
    return Optional.absent();
  }
 @Override
 public TimestampFormatter get() {
   TimestamperConfig config = GlobalConfiguration.all().get(TimestamperConfig.class);
   // JENKINS-16778: The request can be null when the slave goes off-line.
   Optional<StaplerRequest> request = Optional.fromNullable(Stapler.getCurrentRequest());
   Optional<String> timeZoneId =
       Optional.fromNullable(System.getProperty(TIME_ZONE_PROPERTY));
   return new TimestampFormatter(
       config.getSystemTimeFormat(), config.getElapsedTimeFormat(), request, timeZoneId);
 }
예제 #6
0
  private void enqueueHealthAndNewTaskChecks() {
    final long start = System.currentTimeMillis();

    final List<SingularityTask> activeTasks = taskManager.getActiveTasks();
    final Map<SingularityTaskId, SingularityTask> activeTaskMap =
        Maps.uniqueIndex(activeTasks, SingularityTaskIdHolder.getTaskIdFunction());

    final Map<SingularityTaskId, List<SingularityTaskHistoryUpdate>> taskUpdates =
        taskManager.getTaskHistoryUpdates(activeTaskMap.keySet());

    final Map<SingularityDeployKey, SingularityPendingDeploy> pendingDeploys =
        Maps.uniqueIndex(
            deployManager.getPendingDeploys(), SingularityDeployKey.FROM_PENDING_TO_DEPLOY_KEY);
    final Map<String, SingularityRequestWithState> idToRequest =
        Maps.uniqueIndex(
            requestManager.getRequests(), SingularityRequestWithState.REQUEST_STATE_TO_REQUEST_ID);

    requestManager.getActiveRequests();
    int enqueuedNewTaskChecks = 0;
    int enqueuedHealthchecks = 0;

    for (Map.Entry<SingularityTaskId, SingularityTask> entry : activeTaskMap.entrySet()) {
      SingularityTaskId taskId = entry.getKey();
      SingularityTask task = entry.getValue();
      SimplifiedTaskState simplifiedTaskState =
          SingularityTaskHistoryUpdate.getCurrentState(taskUpdates.get(taskId));

      if (simplifiedTaskState != SimplifiedTaskState.DONE) {
        SingularityDeployKey deployKey =
            new SingularityDeployKey(taskId.getRequestId(), taskId.getDeployId());
        Optional<SingularityPendingDeploy> pendingDeploy =
            Optional.fromNullable(pendingDeploys.get(deployKey));
        Optional<SingularityRequestWithState> request =
            Optional.fromNullable(idToRequest.get(taskId.getRequestId()));

        if (!pendingDeploy.isPresent()) {
          newTaskChecker.enqueueNewTaskCheck(task, request, healthchecker);
          enqueuedNewTaskChecks++;
        }
        if (simplifiedTaskState == SimplifiedTaskState.RUNNING) {
          if (healthchecker.enqueueHealthcheck(task, pendingDeploy, request)) {
            enqueuedHealthchecks++;
          }
        }
      }
    }

    LOG.info(
        "Enqueued {} health checks and {} new task checks (out of {} active tasks) in {}",
        enqueuedHealthchecks,
        enqueuedNewTaskChecks,
        activeTasks.size(),
        JavaUtils.duration(start));
  }
예제 #7
0
  @Override
  public PaginaDTO extract(Context context) {
    PaginaDTO pagina = new PaginaDTO();

    Optional<Integer> oNumero = Optional.fromNullable(context.getParameterAsInteger("numero"));
    Optional<Integer> oTamano = Optional.fromNullable(context.getParameterAsInteger("tamano"));
    pagina.setNumero(oNumero.or(numero));
    pagina.setTamano(oTamano.or(tamano));

    return pagina;
  }
예제 #8
0
 public String toString() {
   switch (type) {
     case ADDED:
       return "[MISSING] -> "
           + TextValueSerializer.asString(Optional.fromNullable((Object) newGeometry.orNull()));
     case REMOVED:
       return TextValueSerializer.asString(Optional.fromNullable((Object) oldGeometry.orNull()))
           + " -> [MISSING]";
     case MODIFIED:
     default:
       return diff.toString();
   }
 }
  public File asAFile() {
    Optional<String> defaultPath = Optional.fromNullable(CommandLine.find(exeName));

    Optional<String> configuredBinaryPath =
        Optional.fromNullable(environmentVariables.getProperty(exeProperty));
    String exePath = configuredBinaryPath.or(defaultPath).orNull();

    File executableLocation = (exePath != null) ? new File(exePath) : null;

    if (reportMissingBinary) {
      checkForMissingBinaries(executableLocation);
    }
    return executableLocation;
  }
예제 #10
0
파일: Parser.java 프로젝트: h87kg/buck
  /**
   * @param buildTargets the build targets to generate a dependency graph for.
   * @param defaultIncludes the files to include before executing build files.
   * @param eventBus used to log events while parsing.
   * @return the dependency graph containing the build targets and their related targets.
   */
  public DependencyGraph parseBuildFilesForTargets(
      Iterable<BuildTarget> buildTargets, Iterable<String> defaultIncludes, BuckEventBus eventBus)
      throws BuildFileParseException, BuildTargetException, IOException {
    // Make sure that knownBuildTargets is initially populated with the BuildRuleBuilders for the
    // seed BuildTargets for the traversal.
    eventBus.post(ParseEvent.started(buildTargets));
    DependencyGraph graph = null;
    try (ProjectBuildFileParser buildFileParser =
        buildFileParserFactory.createParser(defaultIncludes)) {
      if (!isCacheComplete(defaultIncludes)) {
        Set<File> buildTargetFiles = Sets.newHashSet();
        for (BuildTarget buildTarget : buildTargets) {
          File buildFile = buildTarget.getBuildFile(projectFilesystem);
          boolean isNewElement = buildTargetFiles.add(buildFile);
          if (isNewElement) {
            parseBuildFile(buildFile, defaultIncludes, buildFileParser);
          }
        }
      }

      graph = findAllTransitiveDependencies(buildTargets, defaultIncludes, buildFileParser);
      return graph;
    } finally {
      eventBus.post(ParseEvent.finished(buildTargets, Optional.fromNullable(graph)));
    }
  }
예제 #11
0
 public ContentQueryBuilder withPublisher(Publisher publisher) {
   Preconditions.checkArgument(
       this.urls.isEmpty() && this.ids.isEmpty(),
       "Cannot set publisher if urls or ids are set, they're mutually exclusive");
   this.publisher = Optional.fromNullable(publisher);
   return this;
 }
  /**
   * Load an xml file and perform placeholder substitution
   *
   * @param manifestInfo the android manifest information like if it is a library, an overlay or a
   *     main manifest file.
   * @param selectors all the libraries selectors
   * @param mergingReportBuilder the merging report to store events and errors.
   * @return a loaded manifest info.
   * @throws MergeFailureException
   */
  private LoadedManifestInfo load(
      ManifestInfo manifestInfo,
      KeyResolver<String> selectors,
      MergingReport.Builder mergingReportBuilder)
      throws MergeFailureException {

    XmlDocument xmlDocument;
    try {
      xmlDocument =
          XmlLoader.load(
              selectors,
              mSystemPropertyResolver,
              manifestInfo.mName,
              manifestInfo.mLocation,
              manifestInfo.getType(),
              manifestInfo.getMainManifestPackageName());
    } catch (Exception e) {
      throw new MergeFailureException(e);
    }

    String originalPackageName = xmlDocument.getPackageName();
    MergingReport.Builder builder =
        manifestInfo.getType() == XmlDocument.Type.MAIN
            ? mergingReportBuilder
            : new MergingReport.Builder(mergingReportBuilder.getLogger());

    builder.getActionRecorder().recordDefaultNodeAction(xmlDocument.getRootNode());

    // perform place holder substitution, this is necessary to do so early in case placeholders
    // are used in key attributes.
    performPlaceHolderSubstitution(manifestInfo, xmlDocument, builder);

    return new LoadedManifestInfo(
        manifestInfo, Optional.fromNullable(originalPackageName), xmlDocument);
  }
예제 #13
0
 @Override
 /* @Nullable */
 protected Map<JvmIdentifiableElement, LightweightTypeReference> getFlattenedReassignedTypes() {
   if (flattenedReassignedTypes != null) {
     // already computed
     return flattenedReassignedTypes.orNull();
   }
   Map<JvmIdentifiableElement, LightweightTypeReference> result =
       parent.getFlattenedReassignedTypes();
   if (result == null) {
     // parent doesn't have reassigned types
     // use only locally reassigned types
     return (flattenedReassignedTypes = Optional.fromNullable(super.getFlattenedReassignedTypes()))
         .orNull();
   }
   Map<JvmIdentifiableElement, LightweightTypeReference> myReassignedTypes =
       basicGetReassignedTypes();
   if (myReassignedTypes.isEmpty()) {
     // no locally reassigned types, use result from parent which was already checked for null
     return (flattenedReassignedTypes = Optional.of(result)).orNull();
   }
   // merge parent's reassigned types and locally reassigned types
   result = Maps.newHashMap(result);
   result.putAll(myReassignedTypes);
   return (flattenedReassignedTypes = Optional.of(result)).orNull();
 }
예제 #14
0
  // Dispatch a job for the given rule (if we haven't already) and return a future tracking it's
  // result.
  private synchronized ListenableFuture<BuildResult> getBuildRuleResult(
      final BuildRule rule,
      final BuildContext context,
      final ConcurrentLinkedQueue<ListenableFuture<Void>> asyncCallbacks) {

    // If the rule is already executing, return it's result future from the cache.
    Optional<ListenableFuture<BuildResult>> existingResult =
        Optional.fromNullable(results.get(rule.getBuildTarget()));
    if (existingResult.isPresent()) {
      return existingResult.get();
    }

    // Otherwise submit a new job for this rule, cache the future, and return it.
    ListenableFuture<RuleKey> ruleKey = calculateRuleKey(rule, context);
    ListenableFuture<BuildResult> result =
        Futures.transform(
            ruleKey,
            new AsyncFunction<RuleKey, BuildResult>() {
              @Override
              public ListenableFuture<BuildResult> apply(@Nonnull RuleKey input) throws Exception {
                return processBuildRule(rule, context, asyncCallbacks);
              }
            },
            service);
    results.put(rule.getBuildTarget(), result);
    return result;
  }
 /** @return The payment request from the payment session object */
 public Optional<Protos.PaymentRequest> getPaymentSessionPaymentRequest() {
   if (hasPaymentSession()) {
     return Optional.fromNullable(paymentSession.get().getPaymentRequest());
   } else {
     return Optional.absent();
   }
 }
 /** @return The value from the payment session object */
 public Optional<Coin> getPaymentSessionValue() {
   if (hasPaymentSession()) {
     return Optional.fromNullable(paymentSession.get().getValue());
   } else {
     return Optional.absent();
   }
 }
예제 #17
0
 private TestRunningOptions getTestRunningOptions(CommandRunnerParams params) {
   return TestRunningOptions.builder()
       .setUsingOneTimeOutputDirectories(isUsingOneTimeOutput)
       .setCodeCoverageEnabled(isCodeCoverageEnabled)
       .setRunAllTests(isRunAllTests())
       .setTestSelectorList(testSelectorOptions.getTestSelectorList())
       .setShouldExplainTestSelectorList(testSelectorOptions.shouldExplain())
       .setResultsCacheEnabled(isResultsCacheEnabled(params.getBuckConfig()))
       .setDryRun(isDryRun)
       .setShufflingTests(isShufflingTests)
       .setPathToXmlTestOutput(Optional.fromNullable(pathToXmlTestOutput))
       .setPathToJavaAgent(Optional.fromNullable(pathToJavaAgent))
       .setCoverageReportFormat(coverageReportFormat)
       .setCoverageReportTitle(coverageReportTitle)
       .build();
 }
 /**
  * Return an Optional&lt;String&gt; representing the path to a web page for a TopLevelPage class.
  * The path is extracted using reflection from the {@link WebPagePath} annotation, if present.
  *
  * @param pageClass
  * @return - the Optional&lt;String&gt; that is present with a value if the given Page class was
  *     annotated with {@link com.jivesoftware.selenium.pagefactory.framework.pages.WebPagePath}
  */
 public Optional<String> getWebPagePathForClass(Class<? extends TopLevelPage> pageClass) {
   WebPagePath annotation = pageClass.getAnnotation(WebPagePath.class);
   if (annotation == null) {
     return Optional.absent();
   }
   return Optional.fromNullable(annotation.path());
 }
 /** @return The payment URL from the payment session object */
 public Optional<String> getPaymentSessionPaymentUrl() {
   if (hasPaymentSession()) {
     return Optional.fromNullable(paymentSession.get().getPaymentUrl());
   } else {
     return Optional.absent();
   }
 }
예제 #20
0
 @Override
 public String asText() {
   switch (type) {
     case ADDED:
       return type.name().toCharArray()[0]
           + "\t"
           + TextValueSerializer.asString(Optional.fromNullable((Object) newGeometry.orNull()));
     case REMOVED:
       return type.name().toCharArray()[0]
           + "\t"
           + TextValueSerializer.asString(Optional.fromNullable((Object) oldGeometry.orNull()));
     case MODIFIED:
     default:
       return type.name().toCharArray()[0] + "\t" + diff.asText();
   }
 }
예제 #21
0
 @Override
 public Optional<T> lookupComputedNode(Cell cell, BuildTarget target)
     throws BuildTargetException {
   try (AutoCloseableLock readLock = rawAndComputedNodesLock.readLock()) {
     return Optional.fromNullable(allComputedNodes.getIfPresent(target));
   }
 }
예제 #22
0
 @Test
 public void testPartialApplication() throws Exception {
   insert(points1, points2);
   Patch patch = new Patch();
   String pathRemove = NodeRef.appendChild(pointsName, points2.getIdentifier().getID());
   patch.addRemovedFeature(pathRemove, points2, RevFeatureType.build(pointsType));
   String pathModify = NodeRef.appendChild(pointsName, points1B.getIdentifier().getID());
   Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
   Optional<?> oldValue = Optional.fromNullable(points1B.getProperty("extra").getValue());
   GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, null);
   map.put(modifiedPointsType.getDescriptor("extra"), diff);
   FeatureDiff featureDiff =
       new FeatureDiff(
           pathModify,
           map,
           RevFeatureType.build(modifiedPointsType),
           RevFeatureType.build(pointsType));
   patch.addModifiedFeature(featureDiff);
   Patch rejected =
       geogit.command(ApplyPatchOp.class).setPatch(patch).setApplyPartial(true).call();
   assertFalse(rejected.isEmpty());
   RevTree root = repo.getWorkingTree().getTree();
   assertNotNull(root);
   Optional<Node> featureBlobId = findTreeChild(root, pathRemove);
   assertFalse(featureBlobId.isPresent());
   // now we take the rejected patch and apply it, and the new rejected should be identical to
   // it
   Patch newRejected =
       geogit.command(ApplyPatchOp.class).setPatch(rejected).setApplyPartial(true).call();
   assertEquals(rejected, newRejected);
 }
예제 #23
0
 @Override
 public <T extends DataManipulator<T, I>, I extends ImmutableDataManipulator<I, T>>
     Optional<DataManipulatorBuilder<T, I>> getBuilder(Class<T> manipulatorClass) {
   return Optional.fromNullable(
       (DataManipulatorBuilder<T, I>)
           (Object) this.builderMap.get(checkNotNull(manipulatorClass)));
 }
예제 #24
0
 @Test
 public void testReversedPatch() throws Exception {
   insert(points1, points2);
   Patch patch = new Patch();
   String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
   Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
   Optional<?> oldValue = Optional.fromNullable(points1.getProperty("sp").getValue());
   GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, Optional.of("new"));
   map.put(pointsType.getDescriptor("sp"), diff);
   FeatureDiff feaureDiff =
       new FeatureDiff(
           path, map, RevFeatureType.build(pointsType), RevFeatureType.build(pointsType));
   patch.addModifiedFeature(feaureDiff);
   String removedPath = NodeRef.appendChild(pointsName, points2.getIdentifier().getID());
   patch.addRemovedFeature(removedPath, points2, RevFeatureType.build(pointsType));
   String addedPath = NodeRef.appendChild(pointsName, points3.getIdentifier().getID());
   patch.addAddedFeature(addedPath, points3, RevFeatureType.build(pointsType));
   geogit.command(ApplyPatchOp.class).setPatch(patch).call();
   geogit.command(ApplyPatchOp.class).setPatch(patch.reversed()).call();
   RevTree root = repo.getWorkingTree().getTree();
   Optional<Node> featureBlobId = findTreeChild(root, removedPath);
   assertTrue(featureBlobId.isPresent());
   featureBlobId = findTreeChild(root, addedPath);
   assertFalse(featureBlobId.isPresent());
   Optional<RevFeature> feature =
       geogit.command(RevObjectParse.class).setRefSpec("WORK_HEAD:" + path).call(RevFeature.class);
   assertTrue(feature.isPresent());
   assertEquals(oldValue, feature.get().getValues().get(0));
 }
  /**
   * Asynchronously connect to rabbitmq, and retry until successful
   *
   * @param env dropwizard environment
   * @param deliveryExecutor the executor used by rabbitmq client to deliver messages
   * @param name name of rabbitmq connection
   * @param callback callback when done - which may be after application start
   */
  public void buildRetryInitialConnect(
      final Environment env,
      final ExecutorService deliveryExecutor,
      final String name,
      final ConnectedCallback callback) {
    final com.rabbitmq.client.ConnectionFactory connectionFactory = makeConnectionFactory();
    final ScheduledExecutorService initialConnectExecutor =
        env.lifecycle()
            .scheduledExecutorService(name + "-initial-connect-thread")
            .threads(1)
            .build();

    final ConnectionMetrics connectionMetrics =
        Optional.fromNullable(metrics).or(() -> new DefaultConnectionMetrics(name, env.metrics()));
    final WrappedConnectionMetrics connectionMetricsWrapper =
        new WrappedConnectionMetrics(connectionMetrics);
    final ConnectedCallback callbackWithMetrics =
        connection -> {
          final Connection metricsConnection = connectionMetricsWrapper.wrap(connection);
          callback.connected(metricsConnection);
        };
    final ConnectAsync connectAsync =
        new ConnectAsync(
            connectionFactory, deliveryExecutor, name, initialConnectExecutor, callbackWithMetrics);
    registerWithEnvironment(env.healthChecks(), env.lifecycle(), connectAsync::getConnection, name);
    connectAsync.run();
  }
예제 #26
0
 @Test
 public void testModifyFeatureAttributePatch() throws Exception {
   insert(points1);
   Patch patch = new Patch();
   String path = NodeRef.appendChild(pointsName, points1.getIdentifier().getID());
   Map<PropertyDescriptor, AttributeDiff> map = Maps.newHashMap();
   Optional<?> oldValue = Optional.fromNullable(points1.getProperty("sp").getValue());
   GenericAttributeDiffImpl diff = new GenericAttributeDiffImpl(oldValue, Optional.of("new"));
   map.put(pointsType.getDescriptor("sp"), diff);
   FeatureDiff feaureDiff =
       new FeatureDiff(
           path, map, RevFeatureType.build(pointsType), RevFeatureType.build(pointsType));
   patch.addModifiedFeature(feaureDiff);
   geogit.command(ApplyPatchOp.class).setPatch(patch).call();
   RevTree root = repo.getWorkingTree().getTree();
   Optional<Node> featureBlobId = findTreeChild(root, path);
   assertTrue(featureBlobId.isPresent());
   Iterator<DiffEntry> unstaged = repo.getWorkingTree().getUnstaged(pointsName);
   ArrayList<DiffEntry> diffs = Lists.newArrayList(unstaged);
   assertEquals(2, diffs.size());
   Optional<RevFeature> feature =
       geogit.command(RevObjectParse.class).setRefSpec("WORK_HEAD:" + path).call(RevFeature.class);
   assertTrue(feature.isPresent());
   ImmutableList<Optional<Object>> values = feature.get().getValues();
   assertEquals("new", values.get(0).get());
 }
 private Optional<InetAddress> getRemoteAddress(byte[] addressAsBytes) {
   try {
     return Optional.fromNullable(InetAddress.getByAddress(addressAsBytes));
   } catch (Exception ex) {
     return Optional.absent();
   }
 }
  private static Optional<IScheduledTask> getActiveInstance(
      TaskStore taskStore, IJobKey job, int instanceId) {

    return Optional.fromNullable(
        Iterables.getOnlyElement(
            taskStore.fetchTasks(Query.instanceScoped(job, instanceId).active()), null));
  }
 @Override
 public ReconfigurationTaskStatus getReconfigurationStatus() throws IOException {
   GetReconfigurationStatusResponseProto response;
   Map<PropertyChange, Optional<String>> statusMap = null;
   long startTime;
   long endTime = 0;
   try {
     response = rpcProxy.getReconfigurationStatus(NULL_CONTROLLER, VOID_GET_RECONFIG_STATUS);
     startTime = response.getStartTime();
     if (response.hasEndTime()) {
       endTime = response.getEndTime();
     }
     if (response.getChangesCount() > 0) {
       statusMap = Maps.newHashMap();
       for (GetReconfigurationStatusConfigChangeProto change : response.getChangesList()) {
         PropertyChange pc =
             new PropertyChange(change.getName(), change.getNewValue(), change.getOldValue());
         String errorMessage = null;
         if (change.hasErrorMessage()) {
           errorMessage = change.getErrorMessage();
         }
         statusMap.put(pc, Optional.fromNullable(errorMessage));
       }
     }
   } catch (ServiceException e) {
     throw ProtobufHelper.getRemoteException(e);
   }
   return new ReconfigurationTaskStatus(startTime, endTime, statusMap);
 }
  @Override
  public void decorateContainerConfig(
      Job job, ImageInfo imageInfo, ContainerConfig.Builder containerConfig) {
    final ContainerConfig imageConfig = imageInfo.config();

    // Inject syslog-redirector in the entrypoint to capture std out/err
    final String syslogRedirectorPath =
        Optional.fromNullable(job.getEnv().get("SYSLOG_REDIRECTOR"))
            .or("/helios/syslog-redirector");

    final List<String> entrypoint =
        Lists.newArrayList(
            syslogRedirectorPath, "-h", syslogHostPort, "-n", job.getId().toString(), "--");
    if (imageConfig.entrypoint() != null) {
      entrypoint.addAll(imageConfig.entrypoint());
    }
    containerConfig.entrypoint(entrypoint);

    // If there's no explicit container cmd specified, copy over the one from the image.
    // Only setting the entrypoint causes dockerd to not use the image cmd.
    if ((containerConfig.cmd() == null || containerConfig.cmd().isEmpty())
        && imageConfig.cmd() != null) {
      containerConfig.cmd(imageConfig.cmd());
    }

    final Set<String> volumes = Sets.newHashSet();
    if (containerConfig.volumes() != null) {
      volumes.addAll(containerConfig.volumes());
    }
    volumes.add("/helios");
    containerConfig.volumes(volumes);
  }