Ejemplo n.º 1
0
 @GET
 @Path("job/{cluster}/{jobId}")
 @Produces(MediaType.APPLICATION_JSON)
 public JobDetails getJobById(
     @PathParam("cluster") String cluster, @PathParam("jobId") String jobId) throws IOException {
   LOG.info("Fetching JobDetails for jobId=" + jobId);
   Stopwatch timer = new Stopwatch().start();
   serializationContext.set(new SerializationContext(SerializationContext.DetailLevel.EVERYTHING));
   JobDetails jobDetails = getJobHistoryService().getJobByJobID(cluster, jobId);
   timer.stop();
   if (jobDetails != null) {
     LOG.info(
         "For job/{cluster}/{jobId} with input query:"
             + " job/"
             + cluster
             + SLASH
             + jobId
             + " fetched jobDetails for "
             + jobDetails.getJobName()
             + " in "
             + timer);
   } else {
     LOG.info(
         "For job/{cluster}/{jobId} with input query:"
             + " job/"
             + cluster
             + SLASH
             + jobId
             + " No jobDetails found, but spent "
             + timer);
   }
   return jobDetails;
 }
Ejemplo n.º 2
0
  @Override
  public void execute() throws IOException, RecommenderBuildException {
    LenskitConfiguration dataConfig = input.getConfiguration();
    LenskitRecommenderEngineBuilder builder = LenskitRecommenderEngine.newBuilder();
    for (LenskitConfiguration config : environment.loadConfigurations(getConfigFiles())) {
      builder.addConfiguration(config);
    }
    builder.addConfiguration(dataConfig, ModelDisposition.EXCLUDED);

    Stopwatch timer = Stopwatch.createStarted();
    LenskitRecommenderEngine engine = builder.build();
    timer.stop();
    logger.info("built model in {}", timer);
    File output = getOutputFile();
    CompressionMode comp = CompressionMode.autodetect(output);
    logger.info("writing model to {}", output);
    Closer closer = Closer.create();
    try {
      OutputStream stream = closer.register(new FileOutputStream(output));
      stream = closer.register(comp.wrapOutput(stream));
      engine.write(stream);
    } catch (Throwable th) { // NOSONAR using a closer
      throw closer.rethrow(th);
    } finally {
      closer.close();
    }
  }
Ejemplo n.º 3
0
  @SuppressWarnings("unchecked")
  public static void register() throws IOException {
    Stopwatch watch = Stopwatch.createStarted();

    InputStream in = PoreEventWrapper.class.getResourceAsStream("events.txt");
    if (in == null) {
      Pore.getLogger().warn("No registered events found, Bukkit events will not be called.");
      return;
    }

    try (BufferedReader reader = new BufferedReader(new InputStreamReader(in))) {
      String line;
      while ((line = reader.readLine()) != null) {
        line = line.trim();
        if (line.isEmpty() || line.charAt(0) == '#') {
          continue;
        }

        try {
          register((Class<? extends Event>) Class.forName(line));
        } catch (ClassNotFoundException e) {
          Pore.getLogger().warn("Failed to register class {} as an event", line, e);
        }
      }
    }

    Pore.getLogger().debug("Registered events in {}", watch.stop());
  }
  @Override
  public void write(OntologyVersion o, Collection<Statement> statements)
      throws IOException, ParseException {
    LOG.debug(
        "Exporting to Meta Snomed Model in TriG format. \nGraph name is <" + GRAPH_NAME + ">");
    Stopwatch stopwatch = new Stopwatch().start();

    int counter = 1;
    for (Concept c : o.getConcepts()) {
      parse(c);
      counter++;
      if (counter % 10000 == 0) {
        LOG.info("Processed {} concepts", counter);
      }
    }

    counter = 1;
    for (Statement s : statements) {
      write(s);
      counter++;
      if (counter % 10000 == 0) {
        LOG.info("Processed {} statements", counter);
      }
    }
    footer();

    stopwatch.stop();
    LOG.info("Completed Meta Snomed export in " + stopwatch.elapsed(TimeUnit.SECONDS) + " seconds");
  }
Ejemplo n.º 5
0
 @GET
 @Path("jobFlow/{cluster}/{jobId}")
 @Produces(MediaType.APPLICATION_JSON)
 public Flow getJobFlowById(@PathParam("cluster") String cluster, @PathParam("jobId") String jobId)
     throws IOException {
   LOG.info(String.format("Fetching Flow for cluster=%s, jobId=%s", cluster, jobId));
   Stopwatch timer = new Stopwatch().start();
   serializationContext.set(new SerializationContext(SerializationContext.DetailLevel.EVERYTHING));
   Flow flow = getJobHistoryService().getFlowByJobID(cluster, jobId, false);
   timer.stop();
   if (flow != null) {
     LOG.info(
         "For jobFlow/{cluster}/{jobId} with input query: "
             + "jobFlow/"
             + cluster
             + SLASH
             + jobId
             + " fetched flow "
             + flow.getFlowName()
             + " with #jobs "
             + flow.getJobCount()
             + " in "
             + timer);
   } else {
     LOG.info(
         "For jobFlow/{cluster}/{jobId} with input query: "
             + "jobFlow/"
             + cluster
             + SLASH
             + jobId
             + " No flow found, spent "
             + timer);
   }
   return flow;
 }
  @Override
  protected void shutDown() throws Exception {
    LOG.debug("Stopping InputSetupService");
    eventBus.unregister(this);

    for (InputState state : inputRegistry.getRunningInputs()) {
      MessageInput input = state.getMessageInput();

      LOG.info(
          "Attempting to close input <{}> [{}].", input.getUniqueReadableId(), input.getName());

      Stopwatch s = Stopwatch.createStarted();
      try {
        input.stop();

        LOG.info(
            "Input <{}> closed. Took [{}ms]",
            input.getUniqueReadableId(),
            s.elapsed(TimeUnit.MILLISECONDS));
      } catch (Exception e) {
        LOG.error(
            "Unable to stop input <{}> [{}]: " + e.getMessage(),
            input.getUniqueReadableId(),
            input.getName());
      } finally {
        s.stop();
      }
    }
    LOG.debug("Stopped InputSetupService");
  }
Ejemplo n.º 7
0
  public AggregateNumericMetric getSummaryAggregate(
      List<Integer> scheduleIds, long beginTime, long endTime) {
    Stopwatch stopwatch = new Stopwatch().start();
    try {
      DateTime begin = new DateTime(beginTime);

      if (dateTimeService.isInRawDataRange(new DateTime(beginTime))) {
        Iterable<RawNumericMetric> metrics = dao.findRawMetrics(scheduleIds, beginTime, endTime);
        return calculateAggregatedRaw(metrics, beginTime);
      }
      Bucket bucket = getBucket(begin);
      List<AggregateNumericMetric> metrics = loadMetrics(scheduleIds, beginTime, endTime, bucket);

      return calculateAggregate(metrics, beginTime, bucket);
    } finally {
      stopwatch.stop();
      if (log.isDebugEnabled()) {
        log.debug(
            "Finished calculating group summary aggregate for [scheduleIds: "
                + scheduleIds
                + ", beginTime: "
                + beginTime
                + ", endTime: "
                + endTime
                + "] in "
                + stopwatch.elapsed(TimeUnit.MILLISECONDS)
                + " ms");
      }
    }
  }
Ejemplo n.º 8
0
 private synchronized Duration elapsedErrorDuration() {
   if (errorStopwatch.isRunning()) {
     errorStopwatch.stop();
   }
   long nanos = errorStopwatch.elapsed(TimeUnit.NANOSECONDS);
   return new Duration(nanos, TimeUnit.NANOSECONDS).convertTo(TimeUnit.SECONDS);
 }
Ejemplo n.º 9
0
  public Iterable<MeasurementDataNumericHighLowComposite> findDataForResource(
      int scheduleId, long beginTime, long endTime, int numberOfBuckets) {
    Stopwatch stopwatch = new Stopwatch().start();
    try {
      DateTime begin = new DateTime(beginTime);

      if (dateTimeService.isInRawDataRange(begin)) {
        Iterable<RawNumericMetric> metrics = dao.findRawMetrics(scheduleId, beginTime, endTime);
        return createRawComposites(metrics, beginTime, endTime, numberOfBuckets);
      }

      List<AggregateNumericMetric> metrics = null;
      if (dateTimeService.isIn1HourDataRange(begin)) {
        metrics = dao.findAggregateMetrics(scheduleId, Bucket.ONE_HOUR, beginTime, endTime);
        return createComposites(metrics, beginTime, endTime, numberOfBuckets);
      } else if (dateTimeService.isIn6HourDataRange(begin)) {
        metrics = dao.findAggregateMetrics(scheduleId, Bucket.SIX_HOUR, beginTime, endTime);
        return createComposites(metrics, beginTime, endTime, numberOfBuckets);
      } else if (dateTimeService.isIn24HourDataRange(begin)) {
        metrics = dao.findAggregateMetrics(scheduleId, Bucket.TWENTY_FOUR_HOUR, beginTime, endTime);
        return createComposites(metrics, beginTime, endTime, numberOfBuckets);
      } else {
        throw new IllegalArgumentException(
            "beginTime[" + beginTime + "] is outside the accepted range.");
      }
    } finally {
      stopwatch.stop();
      if (log.isDebugEnabled()) {
        log.debug(
            "Finished calculating resource summary aggregate in "
                + stopwatch.elapsed(TimeUnit.MILLISECONDS)
                + " ms");
      }
    }
  }
  @Test
  public void ensureRecordsTest() {
    int empId = 11303;
    List<PayPeriod> payPeriods =
        periodService.getOpenPayPeriods(PayPeriodType.AF, empId, SortOrder.ASC);
    // Print existing records
    Set<TimeRecord> existingRecords =
        timeRecordService
            .getTimeRecords(Collections.singleton(empId), payPeriods, TimeRecordStatus.getAll())
            .stream()
            .map(TimeRecord::new)
            .collect(Collectors.toSet());
    logger.info("-------- EXISTING RECORDS --------");
    printRecords(existingRecords);

    Stopwatch sw = Stopwatch.createStarted();
    // Generate records
    manager.ensureRecords(empId);
    logger.info("generation took {} ms", sw.stop().elapsed(TimeUnit.MILLISECONDS));

    // Print difference
    Set<TimeRecord> newRecords =
        new TreeSet<>(
            timeRecordService.getTimeRecords(
                Collections.singleton(empId), payPeriods, TimeRecordStatus.getAll()));
    logger.info("-------- NEW RECORDS --------");
    printRecords(Sets.difference(newRecords, existingRecords));
  }
Ejemplo n.º 11
0
 /**
  * DO NOT RUN!!!!
  *
  * @author Joshua Barlin (propoke24)
  * @version 1
  * @return Time between execution and interruption
  * @deprecated Test Code
  */
 @Deprecated
 public static long timer() {
   final Stopwatch stopwatch = Stopwatch.createUnstarted();
   stopwatch.start();
   stopwatch.stop();
   return stopwatch.elapsed(TimeUnit.SECONDS);
 }
 public static void endTimer(String name) {
   Stopwatch stopwatch = timers.get(name + Thread.currentThread().getId());
   if (stopwatch != null) {
     stopwatch.stop();
     addMeasurementToTimer(stopwatch.elapsedTime(TimeUnit.NANOSECONDS), name);
   }
 }
  @Override
  public SOid fromString(final String from) throws Exception {
    if (LOGGER.isDebugEnabled()) {
      LOGGER.debug(String.format("fromString(from=%s)", from));
    }
    final SOid oid;
    final Stopwatch stopwatch = Stopwatch.createStarted();
    try {
      final String split[] = from.split(":");
      Assert.isTrue(
          split.length == 3,
          String.format("OID[%s] is invalid, it should be in format A:B:C", from));

      final String oidPrefix = split[TYPE_PREFIX_INDEX];
      final Class<?> oidClass = Class.forName(split[CLASS_NAME_INDEX]);
      final String oidId = split[ID_INDEX];

      oid = this.getOidObject(oidPrefix, oidClass, oidId);
    } catch (Exception exp) {
      LOGGER.error(
          String.format("fromString(from=%s) failed...", from), Throwables.getRootCause(exp));
      throw exp;
    }

    stopwatch.stop();

    if (LOGGER.isTraceEnabled()) {
      final long elapsed = stopwatch.elapsed(TimeUnit.MILLISECONDS);
      LOGGER.trace(
          String.format("fromString(from=%s) to SOid(oid=%s) took %d ms", from, oid, elapsed));
    }

    return oid;
  }
Ejemplo n.º 14
0
 @Test
 public void testMillionsExeWithAnnotation() {
   Stopwatch stopwatch = Stopwatch.createStarted();
   int size = 1000 * 1000;
   for (int i = 0; i < size; i++) {
     cacheDemo.getUserMock(i);
   }
   stopwatch.stop();
   System.out.println(stopwatch.elapsed(TimeUnit.MILLISECONDS));
 }
Ejemplo n.º 15
0
  public synchronized Long stop() {
    if (stopwatch == null) {
      return null;
    }

    try {
      return stopwatch.stop().elapsed(TimeUnit.MILLISECONDS);
    } finally {
      stopwatch = null;
    }
  }
Ejemplo n.º 16
0
  private void sendPackedObjects(
      final List<ObjectId> toSend,
      final Set<ObjectId> roots,
      Deduplicator deduplicator,
      final ProgressListener progress) {
    Set<ObjectId> sent = new HashSet<ObjectId>();
    while (!toSend.isEmpty()) {
      try {
        BinaryPackedObjects.Callback callback =
            new BinaryPackedObjects.Callback() {
              @Override
              public void callback(Supplier<RevObject> supplier) {
                RevObject object = supplier.get();
                progress.setProgress(progress.getProgress() + 1);
                if (object instanceof RevCommit) {
                  RevCommit commit = (RevCommit) object;
                  toSend.remove(commit.getId());
                  roots.removeAll(commit.getParentIds());
                  roots.add(commit.getId());
                }
              }
            };
        ObjectDatabase database = localRepository.objectDatabase();
        BinaryPackedObjects packer = new BinaryPackedObjects(database);

        ImmutableList<ObjectId> have = ImmutableList.copyOf(roots);
        final boolean traverseCommits = false;

        Stopwatch sw = Stopwatch.createStarted();
        ObjectSerializingFactory serializer = DataStreamSerializationFactoryV1.INSTANCE;
        SendObjectsConnectionFactory outFactory;
        ObjectFunnel objectFunnel;

        outFactory = new SendObjectsConnectionFactory(repositoryURL);
        int pushBytesLimit = parsePushLimit();
        objectFunnel = ObjectFunnels.newFunnel(outFactory, serializer, pushBytesLimit);
        final long writtenObjectsCount =
            packer.write(objectFunnel, toSend, have, sent, callback, traverseCommits, deduplicator);
        objectFunnel.close();
        sw.stop();

        long compressedSize = outFactory.compressedSize;
        long uncompressedSize = outFactory.uncompressedSize;
        LOGGER.info(
            String.format(
                "HttpRemoteRepo: Written %,d objects."
                    + " Time to process: %s."
                    + " Compressed size: %,d bytes. Uncompressed size: %,d bytes.",
                writtenObjectsCount, sw, compressedSize, uncompressedSize));
      } catch (IOException e) {
        Throwables.propagate(e);
      }
    }
  }
Ejemplo n.º 17
0
  @GET
  @Path("hdfs/{cluster}/")
  @Produces(MediaType.APPLICATION_JSON)
  public List<HdfsStats> getHdfsStats(
      @PathParam("cluster") String cluster,
      // run Id is timestamp in seconds
      @QueryParam("timestamp") long runid,
      @QueryParam("path") String pathPrefix,
      @QueryParam("limit") int limit)
      throws IOException {
    if (limit == 0) {
      limit = HdfsConstants.RECORDS_RETURNED_LIMIT;
    }

    boolean noRunId = false;
    if (runid == 0L) {
      // default it to 2 hours back
      long lastHour = System.currentTimeMillis() - 2 * 3600000L;
      // convert milliseconds to seconds
      runid = lastHour / 1000L;
      noRunId = true;
    }

    LOG.info(
        String.format(
            "Fetching hdfs stats for cluster=%s, path=%s limit=%d, runId=%d",
            cluster, pathPrefix, limit, runid));
    Stopwatch timer = new Stopwatch().start();
    serializationContext.set(new SerializationContext(SerializationContext.DetailLevel.EVERYTHING));
    List<HdfsStats> hdfsStats = getHdfsStatsService().getAllDirs(cluster, pathPrefix, limit, runid);
    timer.stop();
    /**
     * if we find NO hdfs stats for the default timestamp consider the case when no runId is passed
     * in that means user is expecting a default response we set the default runId to 2 hours back
     * as above but what if there was an error in collection at that time? hence we try to look back
     * for some older runIds
     */
    if (hdfsStats == null || hdfsStats.size() == 0L) {
      if (noRunId) {
        // consider reading the daily aggregation table instead of hourly
        // or consider reading older data since runId was a default timestamp
        int retryCount = 0;
        while (retryCount < HdfsConstants.ageMult.length) {
          runid = HdfsStatsService.getOlderRunId(retryCount, runid);
          hdfsStats = getHdfsStatsService().getAllDirs(cluster, pathPrefix, limit, runid);
          if ((hdfsStats != null) && (hdfsStats.size() != 0L)) {
            break;
          }
          retryCount++;
        }
      }
    }
    return hdfsStats;
  }
Ejemplo n.º 18
0
 @Test
 public void testAppend() {
   MemTable mt = new MemTable();
   Stopwatch sw = new Stopwatch();
   sw.start();
   for (int i = 0; i < 200000; i++) {
     Message msg = getMessage(i);
     mt.append(msg);
   }
   sw.stop();
   System.out.println("ex time set:" + sw);
   mt.getSnapShot();
 }
  /**
   * List input directories. Subclasses may override to, e.g., select only files matching a regular
   * expression.
   *
   * @param job the job to list input paths for
   * @return array of FileStatus objects
   * @throws IOException if zero items.
   */
  protected List<FileStatus> listStatus(JobContext job) throws IOException {
    Path[] dirs = getInputPaths(job);
    if (dirs.length == 0) {
      throw new IOException("No input paths specified in job");
    }

    // get tokens for all the required FileSystems..
    TokenCache.obtainTokensForNamenodes(job.getCredentials(), dirs, job.getConfiguration());

    // Whether we need to recursive look into the directory structure
    boolean recursive = getInputDirRecursive(job);

    // creates a MultiPathFilter with the hiddenFileFilter and the
    // user provided one (if any).
    List<PathFilter> filters = new ArrayList<PathFilter>();
    filters.add(hiddenFileFilter);
    PathFilter jobFilter = getInputPathFilter(job);
    if (jobFilter != null) {
      filters.add(jobFilter);
    }
    PathFilter inputFilter = new MultiPathFilter(filters);

    List<FileStatus> result = null;

    int numThreads =
        job.getConfiguration().getInt(LIST_STATUS_NUM_THREADS, DEFAULT_LIST_STATUS_NUM_THREADS);
    Stopwatch sw = new Stopwatch().start();
    if (numThreads == 1) {
      result = singleThreadedListStatus(job, dirs, inputFilter, recursive);
    } else {
      Iterable<FileStatus> locatedFiles = null;
      try {
        LocatedFileStatusFetcher locatedFileStatusFetcher =
            new LocatedFileStatusFetcher(
                job.getConfiguration(), dirs, recursive, inputFilter, true);
        locatedFiles = locatedFileStatusFetcher.getFileStatuses();
      } catch (InterruptedException e) {
        throw new IOException("Interrupted while getting file statuses");
      }
      result = Lists.newArrayList(locatedFiles);
    }

    sw.stop();
    if (LogGlobal.isDebugEnabled()) {
      /* LOG.debug("Time taken to get FileStatuses: "+sw.elapsedMillis()) */
      LOG.time_taken_get_filestatuses(String.valueOf(sw.elapsedMillis())).tag("methodCall").debug();
    }
    /* LOG.info("Total input paths to process : "+result.size()) */
    LOG.total_input_paths_process(String.valueOf(result.size())).tag("methodCall").info();
    return result;
  }
Ejemplo n.º 20
0
  /**
   * Performs the initial phases 0-2 of the build: Setup, Loading and Analysis.
   *
   * <p>Postcondition: On success, populates the BuildRequest's set of targets to build.
   *
   * @return null if loading / analysis phases were successful; a useful error message if loading or
   *     analysis phase errors were encountered and request.keepGoing.
   * @throws InterruptedException if the current thread was interrupted.
   * @throws ViewCreationFailedException if analysis failed for any reason.
   */
  private AnalysisResult runAnalysisPhase(
      BuildRequest request,
      LoadingResult loadingResult,
      BuildConfigurationCollection configurations)
      throws InterruptedException, ViewCreationFailedException {
    Stopwatch timer = Stopwatch.createStarted();
    if (!request.getBuildOptions().performAnalysisPhase) {
      getReporter().handle(Event.progress("Loading complete."));
      LOG.info("No analysis requested, so finished");
      return AnalysisResult.EMPTY;
    }

    getReporter().handle(Event.progress("Loading complete.  Analyzing..."));
    Profiler.instance().markPhase(ProfilePhase.ANALYZE);

    AnalysisResult analysisResult =
        getView()
            .update(
                loadingResult,
                configurations,
                request.getAspects(),
                request.getViewOptions(),
                request.getTopLevelArtifactContext(),
                getReporter(),
                getEventBus());

    // TODO(bazel-team): Merge these into one event.
    getEventBus()
        .post(
            new AnalysisPhaseCompleteEvent(
                analysisResult.getTargetsToBuild(),
                getView().getTargetsVisited(),
                timer.stop().elapsed(TimeUnit.MILLISECONDS)));
    getEventBus()
        .post(
            new TestFilteringCompleteEvent(
                analysisResult.getTargetsToBuild(), analysisResult.getTargetsToTest()));

    // Check licenses.
    // We check licenses if the first target configuration has license checking enabled. Right now,
    // it is not possible to have multiple target configurations with different settings for this
    // flag, which allows us to take this short cut.
    boolean checkLicenses = configurations.getTargetConfigurations().get(0).checkLicenses();
    if (checkLicenses) {
      Profiler.instance().markPhase(ProfilePhase.LICENSE);
      validateLicensingForTargets(
          analysisResult.getTargetsToBuild(), request.getViewOptions().keepGoing);
    }

    return analysisResult;
  }
Ejemplo n.º 21
0
  private static List<int[]> getPopularTags(BookmarkReader reader, int sampleSize, int limit) {
    timeString = "";
    List<int[]> tags = new ArrayList<int[]>();
    Stopwatch timer = new Stopwatch();
    timer.start();

    int[] tagIDs = getPopularTagList(reader, limit);

    timer.stop();
    long trainingTime = timer.elapsed(TimeUnit.MILLISECONDS);
    timer = new Stopwatch();
    timer.start();
    for (int j = 0; j < sampleSize; j++) {
      tags.add(tagIDs);
    }
    timer.stop();
    long testTime = timer.elapsed(TimeUnit.MILLISECONDS);
    timeString += ("Full training time: " + trainingTime + "\n");
    timeString += ("Full test time: " + testTime + "\n");
    timeString += ("Average test time: " + testTime / sampleSize) + "\n";
    timeString += ("Total time: " + (trainingTime + testTime) + "\n");
    return tags;
  }
Ejemplo n.º 22
0
 private LenskitRecommenderEngine loadEngine() throws RecommenderBuildException, IOException {
   File modelFile = options.get("model_file");
   if (modelFile == null) {
     logger.info("creating fresh recommender");
     LenskitRecommenderEngineBuilder builder = LenskitRecommenderEngine.newBuilder();
     for (LenskitConfiguration config : environment.loadConfigurations(getConfigFiles())) {
       builder.addConfiguration(config);
     }
     builder.addConfiguration(input.getConfiguration());
     Stopwatch timer = Stopwatch.createStarted();
     LenskitRecommenderEngine engine = builder.build();
     timer.stop();
     logger.info("built recommender in {}", timer);
     return engine;
   } else {
     logger.info("loading recommender from {}", modelFile);
     LenskitRecommenderEngineLoader loader = LenskitRecommenderEngine.newLoader();
     for (LenskitConfiguration config : environment.loadConfigurations(getConfigFiles())) {
       loader.addConfiguration(config);
     }
     loader.addConfiguration(input.getConfiguration());
     Stopwatch timer = Stopwatch.createStarted();
     LenskitRecommenderEngine engine;
     CompressionMode comp = CompressionMode.autodetect(modelFile);
     InputStream input = new FileInputStream(modelFile);
     try {
       input = comp.wrapInput(input);
       engine = loader.load(input);
     } finally {
       input.close();
     }
     timer.stop();
     logger.info("loaded recommender in {}", timer);
     return engine;
   }
 }
Ejemplo n.º 23
0
  public static void main(String[] args) throws Exception {
    // Thread.sleep(5000);
    parserArgs(args);
    Stopwatch stopwatch = new Stopwatch();
    stopwatch.start();

    startupProducer(fileName);

    printResult(getEntryOrdering(), top, statisticsWord());

    stopwatch.stop();

    System.out.println("task elapsed time\t" + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms");
    executor.shutdown();
  }
  @Override
  protected Collection<Commitment> getCommitments(String dep, String line) throws Exception {
    Query dq = new TermQuery(new Term("responsibleDepartment", dep));
    Query lq = new TermQuery(new Term("budgetLine", line));
    BooleanQuery fq = new BooleanQuery();

    if (departmentFirst) {
      fq.add(dq, BooleanClause.Occur.MUST);
      fq.add(lq, BooleanClause.Occur.MUST);
    } else {
      fq.add(lq, BooleanClause.Occur.MUST);
      fq.add(dq, BooleanClause.Occur.MUST);
    }

    keySearch.start();
    List<Object> keys = indexSearcher.search(commitmentRegion.getFullPath(), fq);
    keySearch.stop();

    hashGet.start();
    Map<String, Commitment> result = commitmentRegion.getAll(keys);
    hashGet.stop();

    return result.values();
  }
 @Test
 public void testAggregate() throws Exception {
   logger.debug("Run test: " + XMLLineAggregatorTest.class);
   Stopwatch timer = Stopwatch.createStarted();
   Sentence sentence = new Sentence.Builder(1).addWord("b").addWord("a").addWord("c").build();
   String aggregated = lineAggregator.aggregate(sentence);
   timer.stop();
   Assert.assertEquals(
       "<sentence><word>b</word><word>a</word><word>c</word></sentence>", aggregated);
   logger.debug(
       "Test: "
           + XMLLineAggregatorTest.class
           + " succeed in "
           + timer.elapsed(TimeUnit.MILLISECONDS)
           + " milliseconds.");
 }
Ejemplo n.º 26
0
    void transitionService(Service service, Service.State from, Service.State to) {
      Preconditions.checkNotNull(service);
      Preconditions.checkArgument(from != to);
      this.monitor.enter();
      try {
        this.transitioned = true;
        if (!this.ready) {
          return;
        }
        Preconditions.checkState(
            this.servicesByState.remove(from, service),
            "Service %s not at the expected location in the state map %s",
            new Object[] {service, from});

        Preconditions.checkState(
            this.servicesByState.put(to, service),
            "Service %s in the state map unexpectedly at %s",
            new Object[] {service, to});

        Stopwatch stopwatch = (Stopwatch) this.startupTimers.get(service);
        if (from == Service.State.NEW) {
          stopwatch.start();
        }
        if ((to.compareTo(Service.State.RUNNING) >= 0) && (stopwatch.isRunning())) {
          stopwatch.stop();
          if (!(service instanceof ServiceManager.NoOpService)) {
            ServiceManager.logger.log(
                Level.FINE, "Started {0} in {1}.", new Object[] {service, stopwatch});
          }
        }
        if (to == Service.State.FAILED) {
          fireFailedListeners(service);
        }
        if (this.states.count(Service.State.RUNNING) == this.numberOfServices) {
          fireHealthyListeners();
        } else if (this.states.count(Service.State.TERMINATED)
                + this.states.count(Service.State.FAILED)
            == this.numberOfServices) {
          fireStoppedListeners();
        }
      } finally {
        this.monitor.leave();

        executeListeners();
      }
    }
Ejemplo n.º 27
0
  private void test(String description, int iterations, Runnable task) {
    LOGGER.info("Running test: " + description);

    long best = Long.MAX_VALUE;
    Stopwatch stopwatch = new Stopwatch();

    for (int i = 0; i < iterations; i++) {
      stopwatch.start();
      task.run();
      stopwatch.stop();
      long elapsed = stopwatch.elapsed(TimeUnit.MICROSECONDS);
      best = Math.min(best, elapsed);
      stopwatch.reset();
    }

    LOGGER.info("Finished test " + description + " in " + best + "µs");
  }
Ejemplo n.º 28
0
  public static void main(String[] args) {
    Stopwatch watch = new Stopwatch();
    watch.start();

    GlydarBootstrap bootstrap = new GlydarBootstrap(args);
    server = new GServer(bootstrap);
    ParaGlydar.setServer(server);
    serverThread = new Thread(server);

    serverBootstrap = new ServerBootstrap();
    serverBootstrap
        .childHandler(new ProtocolInitializer())
        .option(ChannelOption.TCP_NODELAY, true)
        .option(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, 32 * 1024)
        .option(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, 64 * 1024)
        .group(new NioEventLoopGroup())
        .channelFactory(
            new ChannelFactory<ServerChannel>() {
              @Override
              public ServerChannel newChannel() {
                return new NioServerSocketChannel();
              }
            })
        .bind(new InetSocketAddress(server.getConfig().getPort()));

    server.setUpWorlds();

    try {
      server.getPluginLoader().loadPlugins();
    } catch (Exception exc) {
      server.getLogger().warning(exc, "Error while loading plugins");
    }

    server.getLogger().info("Server ready on port {0}", server.getConfig().getPort());
    server
        .getLogger()
        .info("This server is running {0} version {1}", server.getName(), server.getVersion());

    watch.stop();
    server.getLogger().info("Server started in {0}ms", watch.elapsed(TimeUnit.MILLISECONDS));

    server.getCommandReader().start();
    serverThread.start();
  }
Ejemplo n.º 29
0
  private void narrowByRule(ConstrainedTerm constrainedTerm, Rule rule) {
    stopwatch.reset();
    stopwatch.start();

    constrainedTermResults = new ArrayList<ConstrainedTerm>();

    SymbolicConstraint leftHandSideConstraint =
        new SymbolicConstraint(constrainedTerm.termContext());
    leftHandSideConstraint.addAll(rule.requires());
    for (Variable variable : rule.freshVariables()) {
      leftHandSideConstraint.add(variable, IntToken.fresh());
    }

    ConstrainedTerm leftHandSide =
        new ConstrainedTerm(
            rule.leftHandSide(),
            rule.lookups().getSymbolicConstraint(constrainedTerm.termContext()),
            leftHandSideConstraint,
            constrainedTerm.termContext());

    for (SymbolicConstraint constraint : constrainedTerm.unify(leftHandSide)) {
      constraint.addAll(rule.ensures());
      /* rename rule variables in the constraints */
      Map<Variable, Variable> freshSubstitution = constraint.rename(rule.variableSet());

      Term result = rule.rightHandSide();
      /* rename rule variables in the rule RHS */
      result = result.substituteWithBinders(freshSubstitution, constrainedTerm.termContext());
      /* apply the constraints substitution on the rule RHS */
      result =
          result.substituteWithBinders(constraint.substitution(), constrainedTerm.termContext());
      /* evaluate pending functions in the rule RHS */
      result = result.evaluate(constrainedTerm.termContext());
      /* eliminate anonymous variables */
      constraint.eliminateAnonymousVariables();

      /* compute all results */
      constrainedTermResults.add(
          new ConstrainedTerm(result, constraint, constrainedTerm.termContext()));
    }

    stopwatch.stop();
  }
Ejemplo n.º 30
0
  @Test
  public void testAllRowsReaderConcurrency12() throws Exception {
    final AtomicLong counter = new AtomicLong(0);

    final Map<Long, AtomicLong> threadIds = Maps.newHashMap();

    AllRowsReader<Integer, Integer> reader =
        new AllRowsReader.Builder<Integer, Integer>(keyspace, CF_ALL_ROWS)
            .withPageSize(100)
            .withConcurrencyLevel(12)
            .withColumnSlice(0)
            .forEachRow(
                new Function<Row<Integer, Integer>, Boolean>() {
                  @Override
                  public synchronized Boolean apply(Row<Integer, Integer> row) {
                    long threadId = Thread.currentThread().getId();
                    AtomicLong threadCounter = threadIds.get(threadId);
                    if (threadCounter == null) {
                      threadCounter = new AtomicLong(0);
                      threadIds.put(threadId, threadCounter);
                    }
                    threadCounter.incrementAndGet();
                    counter.incrementAndGet();
                    return true;
                  }
                })
            .build();

    try {
      Stopwatch sw = new Stopwatch().start();
      boolean result = reader.call();
      long runtimeMillis = sw.stop().elapsedMillis();

      LOG.info("Count = " + counter.get() + " runtime=" + runtimeMillis);
      LOG.info("ThreadIds (" + threadIds.size() + ") " + threadIds);
      Assert.assertEquals(threadIds.size(), 12);
      Assert.assertEquals(counter.get(), ALL_ROWS_COUNT);
      Assert.assertTrue(result);
    } catch (Exception e) {
      LOG.info(e.getMessage(), e);
      Assert.fail(e.getMessage());
    }
  }