Ejemplo n.º 1
0
 public void start() {
   if (watch.isRunning()) {
     watch.reset().start();
   } else {
     watch.start();
   }
 }
Ejemplo n.º 2
0
 /**
  * DO NOT RUN!!!!
  *
  * @author Joshua Barlin (propoke24)
  * @version 1
  * @return Time between execution and interruption
  * @deprecated Test Code
  */
 @Deprecated
 public static long timer() {
   final Stopwatch stopwatch = Stopwatch.createUnstarted();
   stopwatch.start();
   stopwatch.stop();
   return stopwatch.elapsed(TimeUnit.SECONDS);
 }
Ejemplo n.º 3
0
  @Override
  public List<EndpointAffinity> getOperatorAffinity() {
    watch.reset();
    watch.start();
    Map<String, DrillbitEndpoint> endpointMap = new HashMap<String, DrillbitEndpoint>();
    for (DrillbitEndpoint ep : storagePlugin.getContext().getBits()) {
      endpointMap.put(ep.getAddress(), ep);
    }

    Map<DrillbitEndpoint, EndpointAffinity> affinityMap =
        new HashMap<DrillbitEndpoint, EndpointAffinity>();
    for (ServerName sn : regionsToScan.values()) {
      DrillbitEndpoint ep = endpointMap.get(sn.getHostname());
      if (ep != null) {
        EndpointAffinity affinity = affinityMap.get(ep);
        if (affinity == null) {
          affinityMap.put(ep, new EndpointAffinity(ep, 1));
        } else {
          affinity.addAffinity(1);
        }
      }
    }
    logger.debug("Took {} µs to get operator affinity", watch.elapsed(TimeUnit.NANOSECONDS) / 1000);
    return Lists.newArrayList(affinityMap.values());
  }
Ejemplo n.º 4
0
 private void schedule(int delay) {
   currentDelay = delay;
   watch = new Stopwatch();
   watch.start();
   taskId =
       Bukkit.getScheduler()
           .scheduleSyncDelayedTask(plugin, this, (long) delay * TICKS_PER_SECONDS);
 }
Ejemplo n.º 5
0
 @Test
 public void testMillionsExe() {
   Stopwatch stopwatch = Stopwatch.createStarted();
   stopwatch.start();
   int size = 1000 * 1000;
   for (int i = 0; i < size; i++) {
     cacheDemo.getUserWithoutAnno(i);
   }
   stopwatch.stop();
   System.out.println(stopwatch.elapsed(TimeUnit.MILLISECONDS));
 }
Ejemplo n.º 6
0
 @Test
 public void testAppend() {
   MemTable mt = new MemTable();
   Stopwatch sw = new Stopwatch();
   sw.start();
   for (int i = 0; i < 200000; i++) {
     Message msg = getMessage(i);
     mt.append(msg);
   }
   sw.stop();
   System.out.println("ex time set:" + sw);
   mt.getSnapShot();
 }
Ejemplo n.º 7
0
 /**
  * Code for each 'client' to run.
  *
  * @param id
  * @param c
  * @param sharedConnection
  * @throws IOException
  */
 static void cycle(int id, final Configuration c, final HConnection sharedConnection)
     throws IOException {
   HTableInterface table = sharedConnection.getTable(BIG_USER_TABLE);
   table.setAutoFlushTo(false);
   long namespaceSpan = c.getLong("hbase.test.namespace.span", 1000000);
   long startTime = System.currentTimeMillis();
   final int printInterval = 100000;
   Random rd = new Random(id);
   boolean get = c.getBoolean("hbase.test.do.gets", false);
   try {
     Stopwatch stopWatch = new Stopwatch();
     stopWatch.start();
     for (int i = 0; i < namespaceSpan; i++) {
       byte[] b = format(rd.nextLong());
       if (get) {
         Get g = new Get(b);
         table.get(g);
       } else {
         Put p = new Put(b);
         p.add(HConstants.CATALOG_FAMILY, b, b);
         table.put(p);
       }
       if (i % printInterval == 0) {
         LOG.info("Put " + printInterval + "/" + stopWatch.elapsedMillis());
         stopWatch.reset();
         stopWatch.start();
       }
     }
     LOG.info(
         "Finished a cycle putting "
             + namespaceSpan
             + " in "
             + (System.currentTimeMillis() - startTime)
             + "ms");
   } finally {
     table.close();
   }
 }
Ejemplo n.º 8
0
 /**
  * Attempt to kill a running task. If the task has not started running, it will not start. If it's
  * already running, a kill request will be sent to it.
  *
  * <p>The AM will be informed about the task kill.
  */
 public void killTask() {
   if (!isCompleted.get()) {
     if (!killInvoked.getAndSet(true)) {
       synchronized (this) {
         LOG.info(
             "Kill task requested for id={}, taskRunnerSetup={}",
             taskSpec.getTaskAttemptID(),
             (taskRunner != null));
         if (taskRunner != null) {
           killtimerWatch.start();
           LOG.info("Issuing kill to task {}", taskSpec.getTaskAttemptID());
           boolean killed = taskRunner.killTask();
           if (killed) {
             // Sending a kill message to the AM right here. Don't need to wait for the task to
             // complete.
             LOG.info(
                 "Kill request for task {} completed. Informing AM", taskSpec.getTaskAttemptID());
             reportTaskKilled();
           } else {
             LOG.info(
                 "Kill request for task {} did not complete because the task is already complete",
                 taskSpec.getTaskAttemptID());
           }
           shouldRunTask = false;
         } else {
           // If the task hasn't started, and it is killed - report back to the AM that the task
           // has been killed.
           LOG.debug("Reporting taskKilled for non-started fragment {}", getRequestId());
           reportTaskKilled();
         }
         if (!isStarted.get()) {
           // If the task hasn't started - inform about fragment completion immediately. It's
           // possible for
           // the callable to never run.
           fragmentCompletionHanler.fragmentComplete(fragmentInfo);
           this.amReporter.unregisterTask(request.getAmHost(), request.getAmPort());
         }
       }
     } else {
       // This should not happen.
       LOG.warn(
           "Ignoring kill request for task {} since a previous kill request was processed",
           taskSpec.getTaskAttemptID());
     }
   } else {
     LOG.info(
         "Ignoring kill request for task {} since it's already complete",
         taskSpec.getTaskAttemptID());
   }
 }
Ejemplo n.º 9
0
  private static List<int[]> getPopularTags(BookmarkReader reader, int sampleSize, int limit) {
    timeString = "";
    List<int[]> tags = new ArrayList<int[]>();
    Stopwatch timer = new Stopwatch();
    timer.start();

    int[] tagIDs = getPopularTagList(reader, limit);

    timer.stop();
    long trainingTime = timer.elapsed(TimeUnit.MILLISECONDS);
    timer = new Stopwatch();
    timer.start();
    for (int j = 0; j < sampleSize; j++) {
      tags.add(tagIDs);
    }
    timer.stop();
    long testTime = timer.elapsed(TimeUnit.MILLISECONDS);
    timeString += ("Full training time: " + trainingTime + "\n");
    timeString += ("Full test time: " + testTime + "\n");
    timeString += ("Average test time: " + testTime / sampleSize) + "\n";
    timeString += ("Total time: " + (trainingTime + testTime) + "\n");
    return tags;
  }
Ejemplo n.º 10
0
  public static void main(String[] args) throws Exception {
    // Thread.sleep(5000);
    parserArgs(args);
    Stopwatch stopwatch = new Stopwatch();
    stopwatch.start();

    startupProducer(fileName);

    printResult(getEntryOrdering(), top, statisticsWord());

    stopwatch.stop();

    System.out.println("task elapsed time\t" + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms");
    executor.shutdown();
  }
  @Override
  protected Collection<Commitment> getCommitments(String dep, String line) throws Exception {
    Query dq = new TermQuery(new Term("responsibleDepartment", dep));
    Query lq = new TermQuery(new Term("budgetLine", line));
    BooleanQuery fq = new BooleanQuery();

    if (departmentFirst) {
      fq.add(dq, BooleanClause.Occur.MUST);
      fq.add(lq, BooleanClause.Occur.MUST);
    } else {
      fq.add(lq, BooleanClause.Occur.MUST);
      fq.add(dq, BooleanClause.Occur.MUST);
    }

    keySearch.start();
    List<Object> keys = indexSearcher.search(commitmentRegion.getFullPath(), fq);
    keySearch.stop();

    hashGet.start();
    Map<String, Commitment> result = commitmentRegion.getAll(keys);
    hashGet.stop();

    return result.values();
  }
Ejemplo n.º 12
0
    void transitionService(Service service, Service.State from, Service.State to) {
      Preconditions.checkNotNull(service);
      Preconditions.checkArgument(from != to);
      this.monitor.enter();
      try {
        this.transitioned = true;
        if (!this.ready) {
          return;
        }
        Preconditions.checkState(
            this.servicesByState.remove(from, service),
            "Service %s not at the expected location in the state map %s",
            new Object[] {service, from});

        Preconditions.checkState(
            this.servicesByState.put(to, service),
            "Service %s in the state map unexpectedly at %s",
            new Object[] {service, to});

        Stopwatch stopwatch = (Stopwatch) this.startupTimers.get(service);
        if (from == Service.State.NEW) {
          stopwatch.start();
        }
        if ((to.compareTo(Service.State.RUNNING) >= 0) && (stopwatch.isRunning())) {
          stopwatch.stop();
          if (!(service instanceof ServiceManager.NoOpService)) {
            ServiceManager.logger.log(
                Level.FINE, "Started {0} in {1}.", new Object[] {service, stopwatch});
          }
        }
        if (to == Service.State.FAILED) {
          fireFailedListeners(service);
        }
        if (this.states.count(Service.State.RUNNING) == this.numberOfServices) {
          fireHealthyListeners();
        } else if (this.states.count(Service.State.TERMINATED)
                + this.states.count(Service.State.FAILED)
            == this.numberOfServices) {
          fireStoppedListeners();
        }
      } finally {
        this.monitor.leave();

        executeListeners();
      }
    }
Ejemplo n.º 13
0
  private void test(String description, int iterations, Runnable task) {
    LOGGER.info("Running test: " + description);

    long best = Long.MAX_VALUE;
    Stopwatch stopwatch = new Stopwatch();

    for (int i = 0; i < iterations; i++) {
      stopwatch.start();
      task.run();
      stopwatch.stop();
      long elapsed = stopwatch.elapsed(TimeUnit.MICROSECONDS);
      best = Math.min(best, elapsed);
      stopwatch.reset();
    }

    LOGGER.info("Finished test " + description + " in " + best + "µs");
  }
Ejemplo n.º 14
0
  public static void main(String[] args) {
    Stopwatch watch = new Stopwatch();
    watch.start();

    GlydarBootstrap bootstrap = new GlydarBootstrap(args);
    server = new GServer(bootstrap);
    ParaGlydar.setServer(server);
    serverThread = new Thread(server);

    serverBootstrap = new ServerBootstrap();
    serverBootstrap
        .childHandler(new ProtocolInitializer())
        .option(ChannelOption.TCP_NODELAY, true)
        .option(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, 32 * 1024)
        .option(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, 64 * 1024)
        .group(new NioEventLoopGroup())
        .channelFactory(
            new ChannelFactory<ServerChannel>() {
              @Override
              public ServerChannel newChannel() {
                return new NioServerSocketChannel();
              }
            })
        .bind(new InetSocketAddress(server.getConfig().getPort()));

    server.setUpWorlds();

    try {
      server.getPluginLoader().loadPlugins();
    } catch (Exception exc) {
      server.getLogger().warning(exc, "Error while loading plugins");
    }

    server.getLogger().info("Server ready on port {0}", server.getConfig().getPort());
    server
        .getLogger()
        .info("This server is running {0} version {1}", server.getName(), server.getVersion());

    watch.stop();
    server.getLogger().info("Server started in {0}ms", watch.elapsed(TimeUnit.MILLISECONDS));

    server.getCommandReader().start();
    serverThread.start();
  }
Ejemplo n.º 15
0
  private void narrowByRule(ConstrainedTerm constrainedTerm, Rule rule) {
    stopwatch.reset();
    stopwatch.start();

    constrainedTermResults = new ArrayList<ConstrainedTerm>();

    SymbolicConstraint leftHandSideConstraint =
        new SymbolicConstraint(constrainedTerm.termContext());
    leftHandSideConstraint.addAll(rule.requires());
    for (Variable variable : rule.freshVariables()) {
      leftHandSideConstraint.add(variable, IntToken.fresh());
    }

    ConstrainedTerm leftHandSide =
        new ConstrainedTerm(
            rule.leftHandSide(),
            rule.lookups().getSymbolicConstraint(constrainedTerm.termContext()),
            leftHandSideConstraint,
            constrainedTerm.termContext());

    for (SymbolicConstraint constraint : constrainedTerm.unify(leftHandSide)) {
      constraint.addAll(rule.ensures());
      /* rename rule variables in the constraints */
      Map<Variable, Variable> freshSubstitution = constraint.rename(rule.variableSet());

      Term result = rule.rightHandSide();
      /* rename rule variables in the rule RHS */
      result = result.substituteWithBinders(freshSubstitution, constrainedTerm.termContext());
      /* apply the constraints substitution on the rule RHS */
      result =
          result.substituteWithBinders(constraint.substitution(), constrainedTerm.termContext());
      /* evaluate pending functions in the rule RHS */
      result = result.evaluate(constrainedTerm.termContext());
      /* eliminate anonymous variables */
      constraint.eliminateAnonymousVariables();

      /* compute all results */
      constrainedTermResults.add(
          new ConstrainedTerm(result, constraint, constrainedTerm.termContext()));
    }

    stopwatch.stop();
  }
Ejemplo n.º 16
0
  private void testIteration() throws IOException {
    for (int number = 1; number < 10_001; number *= 10) {
      String jsonString = generateJajascriptJSON(number);
      HttpRequest request =
          HttpRequest.postBuilder("/jajascript/optimize")
              .contentType(ContentTypes.APPLICATION_JSON, StandardCharsets.US_ASCII)
              .body(jsonString)
              .build();
      Action action1 = new JajascriptAction();

      Stopwatch stopwatch = new Stopwatch();
      stopwatch.start();
      HttpResponse response = action1.execute(request);
      stopwatch.stop();
      System.out.println(
          "For " + number + ", result = " + stopwatch.elapsed(TimeUnit.MICROSECONDS) + "µs.");
      System.out.println("Result = " + response.getBodyAsString(StandardCharsets.US_ASCII));
    }
  }
Ejemplo n.º 17
0
  public void start() {
    Stopwatch sw = new Stopwatch();
    sw.start();

    ServerCodecConfig codeConfig = new ServerCodecConfig(name, addr);
    ThriftServerFramedCodec codec =
        new ThriftServerFramedCodec(codeConfig, new TCompactProtocol.Factory());

    server =
        ServerBuilder.safeBuild(
            svc,
            ServerBuilder.get()
                .codec(codec)
                .name(name)
                .maxConcurrentRequests(maxConcurrentRequests)
                .bindTo(addr));
    sw.stop();
    logger.info(
        String.format("building finagle server took %s ms", sw.elapsed(TimeUnit.MILLISECONDS)));
  }
  @Test
  @Ignore
  public void testParseParquetPhysicalPlanRemote() throws Exception {
    DrillConfig config = DrillConfig.create();

    try (DrillClient client = new DrillClient(config); ) {
      client.connect();
      ParquetResultsListener listener = new ParquetResultsListener();
      Stopwatch watch = new Stopwatch();
      watch.start();
      client.runQuery(
          org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
          Resources.toString(Resources.getResource(fileName), Charsets.UTF_8),
          listener);
      System.out.println(
          String.format(
              "Got %d total records in %d seconds",
              listener.await(), watch.elapsed(TimeUnit.SECONDS)));
      client.close();
    }
  }
Ejemplo n.º 19
0
  // apply rule by matching
  private void rewriteByRule(Term term, Rule rule) {
    stopwatch.reset();
    stopwatch.start();

    termResults = new ArrayList<Term>();

    TermContext context = TermContext.of(definition);
    ConstrainedTerm constrainedTerm = new ConstrainedTerm(term, context);

    SymbolicConstraint leftHandSideConstraint = new SymbolicConstraint(context);
    leftHandSideConstraint.addAll(rule.requires());
    for (Variable variable : rule.freshVariables()) {
      leftHandSideConstraint.add(variable, IntToken.fresh());
    }

    ConstrainedTerm leftHandSide =
        new ConstrainedTerm(
            rule.leftHandSide(),
            rule.lookups().getSymbolicConstraint(context),
            leftHandSideConstraint,
            context);

    for (SymbolicConstraint constraint : constrainedTerm.unify(leftHandSide)) {
      if (!constraint.isMatching(leftHandSide)) {
        continue;
      }

      constraint.orientSubstitution(leftHandSide.variableSet());

      Term result = rule.rightHandSide();
      /* apply the constraints substitution on the rule RHS */
      result = result.substituteAndEvaluate(constraint.substitution(), context);

      /* compute all results */
      termResults.add(result);
    }

    stopwatch.stop();
  }
  @Override
  public void doFilterInternal(
      final HttpServletRequest request,
      final HttpServletResponse response,
      final FilterChain filterChain)
      throws IOException, ServletException {
    if (LOG.isDebugEnabled()) {
      final String requestDetails = buildRequestDetails(request);

      if (LOG.isDebugEnabled()) {
        LOG.debug(requestDetails + "Begin");
      }

      logCookies(request);

      final ResponseWrapper wrappedResponse = new ResponseWrapper(response);

      final Stopwatch stopwatch = Stopwatch.createUnstarted();
      stopwatch.start();
      try {
        filterChain.doFilter(request, wrappedResponse);
      } finally {
        stopwatch.stop();
        final int status = wrappedResponse.getStatus();

        if (status != 0) {
          LOG.debug(requestDetails + stopwatch.toString() + " (" + status + ")");
        } else {
          LOG.debug(requestDetails + stopwatch.toString());
        }
      }

      return;
    }

    filterChain.doFilter(request, response);
  }
Ejemplo n.º 21
0
  public synchronized void scheduleRequest() {
    if (closed || (future != null) || scheduled) {
      return;
    }
    scheduled = true;

    // start before scheduling to include error delay
    errorStopwatch.start();

    executor.schedule(
        () -> {
          try {
            initiateRequest();
          } catch (Throwable t) {
            // should not happen, but be safe and fail the operator
            clientCallback.clientFailed(HttpPageBufferClient.this, t);
          }
        },
        errorDelayMillis,
        TimeUnit.MILLISECONDS);

    lastUpdate = DateTime.now();
    requestsScheduled.incrementAndGet();
  }
 public static void startTimer(String name) {
   Stopwatch stopwatch = Stopwatch.createUnstarted();
   timers.put(name + Thread.currentThread().getId(), stopwatch);
   stopwatch.start();
 }
Ejemplo n.º 23
0
 @Override
 public void methodStarting() {
   startNanos = componentStopwatch.elapsed(NANOSECONDS);
   stopwatch.start();
 }
  private Integer mergeTranslations(
      final Long sourceVersionId,
      final Long targetVersionId,
      final int batchStart,
      final int batchLength,
      final boolean useNewerTranslation,
      final List<HLocale> supportedLocales)
      throws Exception {

    final Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();

    List<HTextFlow[]> matches =
        textFlowDAO.getSourceByMatchedContext(
            sourceVersionId, targetVersionId, batchStart, batchLength);

    Multimap<DocumentLocaleKey, TextFlowTargetStateChange> eventMap = HashMultimap.create();

    Map<DocumentLocaleKey, Map<ContentState, Long>> docStatsMap = Maps.newHashMap();

    Map<DocumentLocaleKey, Long> lastUpdatedTargetId = Maps.newHashMap();
    ;

    for (HTextFlow[] results : matches) {
      HTextFlow sourceTf = results[0];
      HTextFlow targetTf = results[1];
      boolean foundChange = false;
      Map<Long, ContentState> localeContentStateMap = Maps.newHashMap();

      for (HLocale hLocale : supportedLocales) {
        HTextFlowTarget sourceTft = sourceTf.getTargets().get(hLocale.getId());
        // only process translated state
        if (sourceTft == null || !sourceTft.getState().isTranslated()) {
          continue;
        }

        HTextFlowTarget targetTft = targetTf.getTargets().get(hLocale.getId());
        if (targetTft == null) {
          targetTft = new HTextFlowTarget(targetTf, hLocale);
          targetTft.setVersionNum(0);
          targetTf.getTargets().put(hLocale.getId(), targetTft);
        }

        if (MergeTranslationsServiceImpl.shouldMerge(sourceTft, targetTft, useNewerTranslation)) {
          foundChange = true;

          ContentState oldState = targetTft.getState();
          localeContentStateMap.put(hLocale.getId(), oldState);
          mergeTextFlowTarget(sourceTft, targetTft);
        }
      }
      if (foundChange) {
        translationStateCacheImpl.clearDocumentStatistics(targetTf.getDocument().getId());
        textFlowDAO.makePersistent(targetTf);
        textFlowDAO.flush();

        for (Map.Entry<Long, ContentState> entry : localeContentStateMap.entrySet()) {
          HTextFlowTarget updatedTarget = targetTf.getTargets().get(entry.getKey());

          DocumentLocaleKey key =
              new DocumentLocaleKey(
                  targetTf.getDocument().getId(), updatedTarget.getLocale().getLocaleId());

          eventMap.put(
              key,
              new TextFlowTargetStateEvent.TextFlowTargetStateChange(
                  targetTf.getId(),
                  updatedTarget.getId(),
                  updatedTarget.getState(),
                  entry.getValue()));

          lastUpdatedTargetId.put(key, updatedTarget.getId());

          Map<ContentState, Long> contentStateDeltas =
              docStatsMap.get(key) == null ? Maps.newHashMap() : docStatsMap.get(key);

          DocStatsEvent.updateContentStateDeltas(
              contentStateDeltas,
              updatedTarget.getState(),
              entry.getValue(),
              targetTf.getWordCount());

          docStatsMap.put(key, contentStateDeltas);
        }
      }
    }
    Long actorId = authenticatedAccount.getPerson().getId();
    for (Map.Entry<DocumentLocaleKey, Collection<TextFlowTargetStateChange>> entry :
        eventMap.asMap().entrySet()) {
      TextFlowTargetStateEvent tftUpdatedEvent =
          new TextFlowTargetStateEvent(
              entry.getKey(), targetVersionId, actorId, ImmutableList.copyOf(entry.getValue()));
      textFlowTargetStateEvent.fire(tftUpdatedEvent);
    }
    for (Map.Entry<DocumentLocaleKey, Map<ContentState, Long>> entry : docStatsMap.entrySet()) {
      DocStatsEvent docEvent =
          new DocStatsEvent(
              entry.getKey(),
              targetVersionId,
              entry.getValue(),
              lastUpdatedTargetId.get(entry.getKey()));
      docStatsEvent.fire(docEvent);
    }
    stopwatch.stop();
    log.info(
        "Complete merge translations of {} in {}",
        matches.size() * supportedLocales.size(),
        stopwatch);
    return matches.size() * supportedLocales.size();
  }
Ejemplo n.º 25
0
  /**
   * Dame todos los paises
   *
   * <p>http://api.geonames.org/countryInfo?lang=es&username=gsantosgo&style=full
   * http://api.geonames.org/search?featureCode=PCLI&featureCode=PCL&lang=es&username=gsantosgo&style=full
   *
   * @param args
   */
  public static void main(String args[]) {
    MyWebService.setUserName("gsantosgo");

    System.out.println("======================================");
    System.out.println(" Geonames Server: " + MyWebService.getGeoNamesServer());
    System.out.println(" Geonames Server: " + MyWebService.getUserName());

    /*
    // Configuracion PROXY. Para conectarse correctamente este necesario configurar el PROXY.
          System.setProperty("http.proxySet","true");
          System.setProperty("http.proxyHost","10.14.79.204");
          System.setProperty("http.proxyPort","8080");
          System.setProperty("http.proxyUser","");
          System.setProperty("http.proxyPassword","");
     */

    Stopwatch stopwatch = new Stopwatch();

    // Step 1. CountryInfo ======
    stopwatch.start();
    ToponymSearchCriteria toponymSearchCriteria = new ToponymSearchCriteria();
    toponymSearchCriteria.setLanguage("es");
    toponymSearchCriteria.setStyle(Style.FULL);

    ToponymSearchResult toponymSearchResult = new ToponymSearchResult();
    try {
      toponymSearchResult = MyWebService.countryInfo(toponymSearchCriteria);
    } catch (Exception e) {
      e.printStackTrace();
    }
    System.out.println("Número de Paises: " + toponymSearchResult.getTotalResultsCount());
    List<Toponym> toponymList = toponymSearchResult.getToponyms();
    // for (Toponym toponym : toponymList) {
    // System.out.println(toponym);
    //	}
    stopwatch.stop();
    System.out.println(
        String.format(
            "Tiempo transcurrido en %d miliseconds: ",
            stopwatch.elapsedTime(TimeUnit.MILLISECONDS)));

    stopwatch.reset();
    stopwatch.start();

    // Step 2. Country Search
    ToponymSearchCriteria toponymCountrySearchCriteria = new ToponymSearchCriteria();
    toponymCountrySearchCriteria.setLanguage("es");
    toponymCountrySearchCriteria.setStyle(Style.FULL);
    toponymCountrySearchCriteria.setFeatureClass(FeatureClass.A);
    toponymCountrySearchCriteria.setFeatureCodes(
        new String[] {"PCL", "PCLD", "PCLF", "PCLI", "PCLIX", "PCLS"});

    ToponymSearchResult toponymCountrySearchResult = new ToponymSearchResult();
    try {
      toponymCountrySearchResult = MyWebService.search(toponymCountrySearchCriteria);
    } catch (Exception e) {
      e.printStackTrace();
    }
    System.out.println("Número de Paises: " + toponymCountrySearchResult.getTotalResultsCount());
    List<Toponym> toponymCountryList = toponymCountrySearchResult.getToponyms();
    Map<Integer, Toponym> mapToponyms = Maps.newHashMap();
    for (Toponym toponym : toponymCountryList) {
      mapToponyms.put(toponym.getGeoNameId(), toponym);
    }

    int count = 0;
    for (Toponym toponym : toponymList) {
      Toponym mapToponym = mapToponyms.get(toponym.getGeoNameId());
      if (mapToponym == null) {
        count++;
      }
    }
    System.out.println(" ==>" + count);

    stopwatch.stop();
    System.out.println(
        String.format(
            "Tiempo transcurrido en %d miliseconds: ",
            stopwatch.elapsedTime(TimeUnit.MILLISECONDS)));
  }
Ejemplo n.º 26
0
  /** @param incomingEndpoints */
  @Override
  public void applyAssignments(List<DrillbitEndpoint> incomingEndpoints) {
    watch.reset();
    watch.start();

    final int numSlots = incomingEndpoints.size();
    Preconditions.checkArgument(
        numSlots <= regionsToScan.size(),
        String.format(
            "Incoming endpoints %d is greater than number of scan regions %d",
            numSlots, regionsToScan.size()));

    /*
     * Minimum/Maximum number of assignment per slot
     */
    final int minPerEndpointSlot = (int) Math.floor((double) regionsToScan.size() / numSlots);
    final int maxPerEndpointSlot = (int) Math.ceil((double) regionsToScan.size() / numSlots);

    /*
     * initialize (endpoint index => HBaseSubScanSpec list) map
     */
    endpointFragmentMapping = Maps.newHashMapWithExpectedSize(numSlots);

    /*
     * another map with endpoint (hostname => corresponding index list) in 'incomingEndpoints' list
     */
    Map<String, Queue<Integer>> endpointHostIndexListMap = Maps.newHashMap();

    /*
     * Initialize these two maps
     */
    for (int i = 0; i < numSlots; ++i) {
      endpointFragmentMapping.put(i, new ArrayList<HBaseSubScanSpec>(maxPerEndpointSlot));
      String hostname = incomingEndpoints.get(i).getAddress();
      Queue<Integer> hostIndexQueue = endpointHostIndexListMap.get(hostname);
      if (hostIndexQueue == null) {
        hostIndexQueue = Lists.newLinkedList();
        endpointHostIndexListMap.put(hostname, hostIndexQueue);
      }
      hostIndexQueue.add(i);
    }

    Set<Entry<HRegionInfo, ServerName>> regionsToAssignSet =
        Sets.newHashSet(regionsToScan.entrySet());

    /*
     * First, we assign regions which are hosted on region servers running on drillbit endpoints
     */
    for (Iterator<Entry<HRegionInfo, ServerName>> regionsIterator = regionsToAssignSet.iterator();
        regionsIterator.hasNext(); /*nothing*/ ) {
      Entry<HRegionInfo, ServerName> regionEntry = regionsIterator.next();
      /*
       * Test if there is a drillbit endpoint which is also an HBase RegionServer that hosts the current HBase region
       */
      Queue<Integer> endpointIndexlist =
          endpointHostIndexListMap.get(regionEntry.getValue().getHostname());
      if (endpointIndexlist != null) {
        Integer slotIndex = endpointIndexlist.poll();
        List<HBaseSubScanSpec> endpointSlotScanList = endpointFragmentMapping.get(slotIndex);
        endpointSlotScanList.add(regionInfoToSubScanSpec(regionEntry.getKey()));
        // add to the tail of the slot list, to add more later in round robin fashion
        endpointIndexlist.offer(slotIndex);
        // this region has been assigned
        regionsIterator.remove();
      }
    }

    /*
     * Build priority queues of slots, with ones which has tasks lesser than 'minPerEndpointSlot' and another which have more.
     */
    PriorityQueue<List<HBaseSubScanSpec>> minHeap =
        new PriorityQueue<List<HBaseSubScanSpec>>(numSlots, LIST_SIZE_COMPARATOR);
    PriorityQueue<List<HBaseSubScanSpec>> maxHeap =
        new PriorityQueue<List<HBaseSubScanSpec>>(numSlots, LIST_SIZE_COMPARATOR_REV);
    for (List<HBaseSubScanSpec> listOfScan : endpointFragmentMapping.values()) {
      if (listOfScan.size() < minPerEndpointSlot) {
        minHeap.offer(listOfScan);
      } else if (listOfScan.size() > minPerEndpointSlot) {
        maxHeap.offer(listOfScan);
      }
    }

    /*
     * Now, let's process any regions which remain unassigned and assign them to slots with minimum number of assignments.
     */
    if (regionsToAssignSet.size() > 0) {
      for (Entry<HRegionInfo, ServerName> regionEntry : regionsToAssignSet) {
        List<HBaseSubScanSpec> smallestList = minHeap.poll();
        smallestList.add(regionInfoToSubScanSpec(regionEntry.getKey()));
        if (smallestList.size() < minPerEndpointSlot) {
          minHeap.offer(smallestList);
        }
      }
    }

    /*
     * While there are slots with lesser than 'minPerEndpointSlot' unit work, balance from those with more.
     */
    while (minHeap.peek() != null && minHeap.peek().size() < minPerEndpointSlot) {
      List<HBaseSubScanSpec> smallestList = minHeap.poll();
      List<HBaseSubScanSpec> largestList = maxHeap.poll();
      smallestList.add(largestList.remove(largestList.size() - 1));
      if (largestList.size() > minPerEndpointSlot) {
        maxHeap.offer(largestList);
      }
      if (smallestList.size() < minPerEndpointSlot) {
        minHeap.offer(smallestList);
      }
    }

    /* no slot should be empty at this point */
    assert (minHeap.peek() == null || minHeap.peek().size() > 0)
        : String.format(
            "Unable to assign tasks to some endpoints.\nEndpoints: {}.\nAssignment Map: {}.",
            incomingEndpoints,
            endpointFragmentMapping.toString());

    logger.debug(
        "Built assignment map in {} µs.\nEndpoints: {}.\nAssignment Map: {}",
        watch.elapsed(TimeUnit.NANOSECONDS) / 1000,
        incomingEndpoints,
        endpointFragmentMapping.toString());
  }
  /**
   * Runs a filter.
   *
   * @param pipelineConfigurationFile Pipeline configuration file.
   * @param duration Timeout period.
   * @param variables Substitution key-value pairs into pipeline configuration file.
   * @throws IOException if configuration cannot be loaded.
   */
  protected void benchmark(
      final String pipelineConfigurationFile,
      final Duration duration,
      final ImmutableMap<String, String> variables)
      throws IOException {
    // Replace any variables in the configuration file
    String configuration =
        Resources.toString(Resources.getResource(pipelineConfigurationFile), Charsets.UTF_8);
    for (final Map.Entry<String, String> entry : variables.entrySet()) {
      configuration = configuration.replace(entry.getKey(), entry.getValue());
    }

    // Load the specified stock configuration
    final PipelineConfiguration stockPipelineConfiguration =
        new StaticConfiguration.Builder()
            .addSource(new JsonNodeLiteralSource.Builder().setSource(configuration).build())
            .setObjectMapper(PipelineConfiguration.createObjectMapper(_injector))
            .build()
            .getRequiredAs(PipelineConfiguration.class);

    // Canary tracking
    LOGGER.info(
        String.format("Expected canaries; periods=%s", stockPipelineConfiguration.getPeriods()));
    final CountDownLatch latch = new CountDownLatch(stockPipelineConfiguration.getPeriods().size());
    final Set<Period> periods = Sets.newConcurrentHashSet();

    // Create custom "canary" sink
    final ListeningSink sink =
        new ListeningSink(
            (periodicData) -> {
              if (periodicData != null) {
                for (final String metricName : periodicData.getData().keys()) {
                  if (TestFileGenerator.CANARY.equals(metricName)) {
                    if (periods.add(periodicData.getPeriod())) {
                      LOGGER.info(
                          String.format(
                              "Canary flew; filter=%s, period=%s",
                              this.getClass(), periodicData.getPeriod()));
                      latch.countDown();
                    }
                  }
                }
              }
              return null;
            });

    // Add the custom "canary" sink
    final List<Sink> benchmarkSinks = Lists.newArrayList(stockPipelineConfiguration.getSinks());
    benchmarkSinks.add(sink);

    // Create the custom configuration
    final PipelineConfiguration benchmarkPipelineConfiguration =
        OvalBuilder.<PipelineConfiguration, PipelineConfiguration.Builder>clone(
                stockPipelineConfiguration)
            .setSinks(benchmarkSinks)
            .build();

    // Instantiate the pipeline
    final Pipeline pipeline = new Pipeline(benchmarkPipelineConfiguration);

    // Execute the pipeline until the canary flies the coop
    try {
      LOGGER.debug(
          String.format("Launching pipeline; configuration=%s", pipelineConfigurationFile));
      final Stopwatch timer = Stopwatch.createUnstarted();
      timer.start();
      pipeline.launch();

      if (!latch.await(duration.getMillis(), TimeUnit.MILLISECONDS)) {
        LOGGER.error("Test timed out");
        throw new RuntimeException("Test timed out");
      }

      timer.stop();
      LOGGER.info(
          String.format(
              "Performance filter result; filter=%s, seconds=%s",
              this.getClass(), timer.elapsed(TimeUnit.SECONDS)));

    } catch (final InterruptedException e) {
      Thread.interrupted();
      throw new RuntimeException("Test interrupted");
    } finally {
      pipeline.shutdown();
    }
  }
Ejemplo n.º 28
0
 /** starts the timer, either initially or if {@link #pause()}d; no-op if already running */
 public synchronized CountdownTimer start() {
   if (!stopwatch.isRunning()) stopwatch.start();
   return this;
 }
  @Test
  @Ignore
  public void testPerformance(
      @Injectable final DrillbitContext bitContext,
      @Injectable UserServer.UserClientConnection connection)
      throws Exception {
    DrillConfig c = DrillConfig.create();
    FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
    FragmentContext context =
        new FragmentContext(
            bitContext, BitControl.PlanFragment.getDefaultInstance(), connection, registry);

    //    new NonStrictExpectations() {
    //      {
    //        context.getAllocator(); result = BufferAllocator.getAllocator(DrillConfig.create());
    //      }
    //    };

    final String fileName = "/tmp/parquet_test_performance.parquet";
    HashMap<String, FieldInfo> fields = new HashMap<>();
    ParquetTestProperties props =
        new ParquetTestProperties(1, 20 * 1000 * 1000, DEFAULT_BYTES_PER_PAGE, fields);
    populateFieldInfoMap(props);
    // generateParquetFile(fileName, props);

    Configuration dfsConfig = new Configuration();
    List<Footer> footers = ParquetFileReader.readFooters(dfsConfig, new Path(fileName));
    Footer f = footers.iterator().next();

    List<SchemaPath> columns = Lists.newArrayList();
    columns.add(new SchemaPath("_MAP.integer", ExpressionPosition.UNKNOWN));
    columns.add(new SchemaPath("_MAP.bigInt", ExpressionPosition.UNKNOWN));
    columns.add(new SchemaPath("_MAP.f", ExpressionPosition.UNKNOWN));
    columns.add(new SchemaPath("_MAP.d", ExpressionPosition.UNKNOWN));
    columns.add(new SchemaPath("_MAP.b", ExpressionPosition.UNKNOWN));
    columns.add(new SchemaPath("_MAP.bin", ExpressionPosition.UNKNOWN));
    columns.add(new SchemaPath("_MAP.bin2", ExpressionPosition.UNKNOWN));
    int totalRowCount = 0;

    FileSystem fs = new CachedSingleFileSystem(fileName);
    BufferAllocator allocator = new TopLevelAllocator();
    for (int i = 0; i < 25; i++) {
      ParquetRecordReader rr =
          new ParquetRecordReader(
              context,
              256000,
              fileName,
              0,
              fs,
              new CodecFactoryExposer(dfsConfig),
              f.getParquetMetadata(),
              columns);
      TestOutputMutator mutator = new TestOutputMutator(allocator);
      rr.setup(mutator);
      Stopwatch watch = new Stopwatch();
      watch.start();

      int rowCount = 0;
      while ((rowCount = rr.next()) > 0) {
        totalRowCount += rowCount;
      }
      System.out.println(
          String.format("Time completed: %s. ", watch.elapsed(TimeUnit.MILLISECONDS)));
      rr.cleanup();
    }

    allocator.close();
    System.out.println(String.format("Total row count %s", totalRowCount));
  }
Ejemplo n.º 30
0
 @Override
 public void queryIdArrived(QueryId queryId) {
   w.start();
   this.queryId = queryId;
 }