Exemplo n.º 1
0
    @Override
    public void run() {
      try {
        StopWatch stp2 = new StopWatch();
        stp2.start();
        JSONObject json = new JSONObject();
        job.setStatus("running");
        if (job.progress() == 100.0) {

          finalizeJob(job);
          return;
        }
        Vector<String> ids = new Vector<String>();
        Vector<String> original_names = new Vector<String>();

        String data = job.getNextDataBatch();

        if (data == null || data.equals("")) return;

        String[] lines = data.split("\n");

        if (job.containsId()) {
          for (int i = 0; i < lines.length; i++) {
            if (lines[i].trim().equals("")) continue;
            ids.add(NameUtil.getNameId(lines[i]));
          }
        }

        for (int i = 0; i < lines.length; i++) {
          original_names.add(NameUtil.processName(lines[i], job.containsId()));
        }

        String names = NameUtil.CleanNames(lines, job);

        if (names.equals("")) return;

        if (job.getType() == TnrsJob.NAME_MATCH_JOB) {

          TaxamatchInterface taxa_match = new TaxamatchInterface(tnrsBaseUrl);
          String result = taxa_match.queryTaxamatch(names, job);
          json = (JSONObject) JSONSerializer.toJSON(result);

        } else if (job.getType() == TnrsJob.PARSING_JOB) {

          json = gni_interface.parseNames(names);
        }
        if (job.outstandingNames() == 0) {
          JobHelper.persistJobInfo(baseFolder, job);
        }
        saveResults(job, json, ids, original_names, "");

        job.setStatus("idle");
        stp2.stop();
        log.info("overall :" + stp2.toString());
      } catch (Exception ex) {
        log.error(ExceptionUtils.getFullStackTrace(ex));
        job.setStatus("failed");
        ex.printStackTrace();
      }
    }
Exemplo n.º 2
0
 /**
  * 测试哨兵模式
  *
  * <p>one master(one sentinel) - one slave
  */
 @Test
 public void testSentinel() {
   // 哨兵初始化
   HostAndPort sentinelAddr = new HostAndPort("192.168.1.201", 26379);
   Set<String> sentinels = new HashSet<String>();
   sentinels.add(sentinelAddr.toString());
   JedisSentinelPool sentinelPool =
       new JedisSentinelPool("mymaster", sentinels, new GenericObjectPoolConfig());
   // 线程初始化
   StopWatch stopWatch = new StopWatch();
   stopWatch.start();
   AtomicInteger calcCount = new AtomicInteger(0);
   AtomicInteger failCount = new AtomicInteger(0);
   for (int t = 0; t < TIMES; t++) {
     ThreadPool threadPool = new ThreadPool(THREADS);
     SentinelThread sentinelThread = new SentinelThread(sentinelPool, calcCount, failCount);
     threadPool.executeThread(sentinelThread);
     try {
       TimeUnit.SECONDS.sleep(SECONDS);
     } catch (InterruptedException e) {
       Thread.currentThread().interrupt();
       System.err.println("error !!!");
     }
   }
   sentinelPool.close();
   stopWatch.stop();
   // 打印结果
   System.out.println(
       String.format(
           "redis sentinel work finish, times:%d(milliseconds), fails:%d",
           stopWatch.getTime(), failCount.get()));
 }
Exemplo n.º 3
0
  protected void testOutgoingConnection() throws MailException {
    StopWatch stopWatch = null;

    if (_log.isDebugEnabled()) {
      stopWatch = new StopWatch();

      stopWatch.start();
    }

    try {
      Transport transport = getTransport();

      transport.isConnected();

      transport.close();
    } catch (Exception e) {
      throw new MailException(MailException.ACCOUNT_OUTGOING_CONNECTION_FAILED, e);
    } finally {
      if (_log.isDebugEnabled()) {
        stopWatch.stop();

        _log.debug("Testing outgoing connection completed in " + stopWatch.getTime() + " ms");
      }
    }
  }
  @Test
  public void testListCalendarNames() {
    initializeCredentials();
    FindFolder request = constructFindFolderRequest();
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    FindFolderResponse response = ewsClient.findFolder(request);
    String captured = capture(response);
    log.info("testListCalendarNames response: " + captured);
    stopWatch.stop();
    log.debug("FindFolder request completed in " + stopWatch);
    Assert.assertNotNull(response);

    // now generate a Map?
    Map<String, String> msolCalendars = new LinkedHashMap<String, String>();
    ArrayOfResponseMessagesType responses = response.getResponseMessages();
    List<JAXBElement<? extends ResponseMessageType>> responseList =
        responses
            .getCreateItemResponseMessagesAndDeleteItemResponseMessagesAndGetItemResponseMessages();
    // iterate over responses
    for (JAXBElement<? extends ResponseMessageType> rm : responseList) {
      FindFolderResponseMessageType itemType = (FindFolderResponseMessageType) rm.getValue();
      FindFolderParentType rootFolder = itemType.getRootFolder();
      ArrayOfFoldersType folders = rootFolder.getFolders();
      List<BaseFolderType> folderList = folders.getFoldersAndCalendarFoldersAndContactsFolders();
      for (BaseFolderType baseFolder : folderList) {
        String displayName = baseFolder.getDisplayName();
        String folderId = baseFolder.getFolderId().getId();
        String changeKey = baseFolder.getFolderId().getChangeKey();
        log.debug(displayName + "(id=" + folderId + " : changeKey=" + changeKey + " )");
      }
    }
  }
Exemplo n.º 5
0
  @GET
  @Transactional
  @Path("/user/{userName}/accountBalance")
  @Produces(MediaType.APPLICATION_JSON)
  public Response getAccountBalance(
      @PathParam("apiVersion") String apiVersion,
      @PathParam("userName") String userName,
      @DefaultValue("true") @QueryParam("cache") boolean cache)
      throws JsonProcessingException {
    StopWatch watch = new StopWatch();
    watch.start();

    EntityManager em = emf.createEntityManager();
    if (log.isDebugEnabled()) {
      log.trace("Time to execute getTree Service : {}ms", watch.getTime());
    }
    //		if (!accountValidator.validateAccount(accountId)) {
    //			return
    // Response.status(Response.Status.UNAUTHORIZED).entity(accountValidator.getResponse(accountId)).build();
    //		}
    long accNo = 746353;
    // BossUserRepository userRepo = new BossUserRepository();
    double accountBalance = userRepo.account(accNo);

    watch.stop();
    if (log.isDebugEnabled()) {
      log.trace("Time to execute ACCOUNTDETAILS for a USER : {}ms", watch.getTime());
    }
    return Response.status(Response.Status.OK).entity(accountBalance).build();
  }
  @Override
  protected Client createClient(Settings.Builder builder) {
    StopWatch stopWatch = new StopWatch();

    stopWatch.start();

    if (_log.isDebugEnabled()) {
      _log.debug(
          "Starting embedded Elasticsearch cluster " + elasticsearchConfiguration.clusterName());
    }

    NodeBuilder nodeBuilder = NodeBuilder.nodeBuilder();

    nodeBuilder.settings(builder);

    _node = nodeBuilder.node();

    _node.start();

    Client client = _node.client();

    if (_log.isDebugEnabled()) {
      stopWatch.stop();

      _log.debug(
          "Finished starting "
              + elasticsearchConfiguration.clusterName()
              + " in "
              + stopWatch.getTime()
              + " ms");
    }

    return client;
  }
Exemplo n.º 7
0
  protected void performWarmUp(SessionFactory sf) {
    log("starting warm up phase");

    warmupStopWatch.start();

    TestContext ctx = new TestContext(this, sf);
    scheduleTasksAndStart(ctx, warmupCyclesCount);

    warmupStopWatch.stop();
  }
  private void waitFor(Id id, Pattern pattern) {
    if (pattern == null) {
      return;
    }

    final StopWatch watch = new StopWatch();
    watch.start();
    logger.info(String.format("Waiting for '%s' to appear in output", pattern.toString()));

    final Container container;

    try {
      container = findContainer(id);
    } catch (DockerException e) {
      throw new OrchestrationException(e);
    }

    if (container == null) {
      logger.warn(String.format("Can not find container %s, not waiting", id));
      return;
    }

    try {
      final LogContainerCmd logContainerCmd =
          docker
              .logContainerCmd(container.getId())
              .withStdErr()
              .withStdOut()
              .withFollowStream()
              .withTimestamps();

      final InputStream stream = logContainerCmd.exec();

      try (final BufferedReader reader = new BufferedReader(new InputStreamReader(stream))) {
        String line;
        while ((line = reader.readLine()) != null) {
          if (pattern.matcher(line).find()) {
            watch.stop();
            logger.info(String.format("Waited for %s", watch.toString()));
            return;
          }
        }
        throw new OrchestrationException("Container log ended before line appeared in output");
      }
    } catch (Exception e) {
      logger.warn(
          "Unable to obtain logs from container "
              + container.getId()
              + ", will continue without waiting: ",
          e);
    }
  }
Exemplo n.º 9
0
 @SyncThread(level = 1000)
 private void createVm(
     VmInstanceInventory vm, String rootDiskUuid, List<String> nws, List<String> disks)
     throws ApiSenderException {
   StopWatch watch = new StopWatch();
   watch.start();
   try {
     api.createVmByFullConfig(vm, rootDiskUuid, nws, disks);
   } finally {
     watch.stop();
     timeCost.add(watch.getTime());
     latch.countDown();
   }
 }
 /**
  * Issues a {@link GetUserAvailabilityRequest} for the configured emailAddress, startDate and
  * endDate. Verifies a response, and that the freebusy responses match expectedEventCount.
  */
 @Test
 public void testGetUserAvailability() {
   initializeCredentials();
   GetUserAvailabilityRequest request =
       constructAvailabilityRequest(
           DateHelp.makeDate(startDate), DateHelp.makeDate(endDate), emailAddress);
   StopWatch stopWatch = new StopWatch();
   stopWatch.start();
   GetUserAvailabilityResponse response = ewsClient.getUserAvailability(request);
   stopWatch.stop();
   log.debug("GetUserAvailability request completed in " + stopWatch);
   Assert.assertNotNull(response);
   Assert.assertEquals(
       expectedEventCount, response.getFreeBusyResponseArray().getFreeBusyResponses().size());
 }
Exemplo n.º 11
0
  @GET
  @Path("/")
  @Produces(MediaType.APPLICATION_JSON)
  public Response getTree(
      @PathParam("apiVersion") String apiVersion,
      @DefaultValue("true") @QueryParam("cache") boolean cache)
      throws JsonProcessingException {
    StopWatch watch = new StopWatch();
    watch.start();

    if (log.isDebugEnabled()) {
      log.trace("Time to execute getTree Service : {}ms", watch.getTime());
    }
    return Response.status(Response.Status.OK).build();
  }
  /**
   * Import data from a data-file or a data-package located on the server.
   *
   * @return
   */
  @RequestMapping(method = RequestMethod.POST, value = "/" + Constants.DS_ACTION_IMPORT)
  @ResponseBody
  public String importData(
      @RequestParam(value = "dataPackage", required = false) String dataPackage,
      @RequestParam(value = "dataFile", required = false) String dataFile,
      @RequestParam(value = "dsName", required = false) String dsName,
      @RequestParam(value = "ukFieldName", required = false) String ukFieldName,
      HttpServletRequest request,
      HttpServletResponse response)
      throws Exception {

    StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    if (logger.isInfoEnabled()) {
      logger.info("Processing request: import data from file(s) located on server");
    }

    if (logger.isDebugEnabled()) {
      if (dataPackage != null) {
        logger.debug("  --> dataPackage: {} ", new Object[] {dataPackage});
      } else {
        logger.debug(
            "  --> dataFile: {}, dsName: {}, ukFieldName: {} ",
            new Object[] {dataFile, dsName, ukFieldName});
      }
    }

    this.prepareRequest(request, response);
    try {

      if (dataPackage != null && !"".equals(dataPackage)) {
        this.getApplicationContext()
            .getBean(IImportDataPackageService.class)
            .doImportDataPackage(dataPackage);
      } else if (dataFile != null && !"".equals(dataFile)) {
        this.getApplicationContext()
            .getBean(IImportDataPackageService.class)
            .doImportDataFile(dataFile, dsName, ukFieldName);
      }

      return "{\"success\":true}";
    } catch (Exception e) {
      return this.handleException(e, response);
    } finally {
      this.finishRequest();
    }
  }
 @PostConstruct
 public void createIndex() {
   StopWatch stopWatch = new StopWatch();
   stopWatch.start();
   try {
     getFullTextEntityManager(entityManager) //
         .createIndexer(CLASSES_TO_BE_INDEXED) //
         .batchSizeToLoadObjects(batchSizeToLoadObjects) //
         .threadsToLoadObjects(threadsToLoadObjects) //
         .threadsForSubsequentFetching(threadsForSubsequentFetching) //
         .start();
   } finally {
     stopWatch.stop();
     log.info("Indexed {} in {}", Arrays.toString(CLASSES_TO_BE_INDEXED), stopWatch.toString());
   }
 }
Exemplo n.º 14
0
  /** @param orgIn */
  private void publishUpdateErrataCacheEvent(Org orgIn) {
    StopWatch sw = new StopWatch();
    if (log.isDebugEnabled()) {
      log.debug("Updating errata cache");
      sw.start();
    }

    UpdateErrataCacheEvent uece = new UpdateErrataCacheEvent(UpdateErrataCacheEvent.TYPE_ORG);
    uece.setOrgId(orgIn.getId());
    MessageQueue.publish(uece);

    if (log.isDebugEnabled()) {
      sw.stop();
      log.debug("Finished Updating errata cache. Took [" + sw.getTime() + "]");
    }
  }
Exemplo n.º 15
0
  protected void initIndex(SessionFactory sf) {
    log("starting initialize index");

    initIndexStopWatch.start();

    FullTextSession s = Search.getFullTextSession(sf.openSession());
    try {
      s.createIndexer().startAndWait();
    } catch (InterruptedException e) {
      throw new RuntimeException(e);
    } finally {
      s.close();
    }

    initIndexStopWatch.stop();
  }
  @Override
  protected Client createClient(Settings.Builder builder) {
    StopWatch stopWatch = new StopWatch();

    stopWatch.start();

    if (_log.isWarnEnabled()) {
      StringBundler sb = new StringBundler(6);

      sb.append("Liferay is configured to use embedded Elasticsearch ");
      sb.append("as its search engine. Do NOT use embedded ");
      sb.append("Elasticsearch in production. Embedded Elasticsearch ");
      sb.append("is useful for development and demonstration purposes. ");
      sb.append("Remote Elasticsearch connections can be configured in ");
      sb.append("the Control Panel.");

      _log.warn(sb);
    }

    if (_log.isDebugEnabled()) {
      _log.debug(
          "Starting embedded Elasticsearch cluster " + elasticsearchConfiguration.clusterName());
    }

    NodeBuilder nodeBuilder = NodeBuilder.nodeBuilder();

    nodeBuilder.settings(builder);

    _node = nodeBuilder.node();

    _node.start();

    Client client = _node.client();

    if (_log.isDebugEnabled()) {
      stopWatch.stop();

      _log.debug(
          "Finished starting "
              + elasticsearchConfiguration.clusterName()
              + " in "
              + stopWatch.getTime()
              + " ms");
    }

    return client;
  }
Exemplo n.º 17
0
  @Test
  public void rdfsEntailQuery() throws EngineException, MalformedURLException, IOException {

    Graph gRes = Graph.create(false);
    QueryProcessDQP exec = QueryProcessDQP.create(gRes);
    exec.addRemote(new URL("http://localhost:" + port + "/kgram/sparql"), WSImplem.REST);

    StopWatch sw = new StopWatch();
    sw.start();
    Mappings res = exec.query(sparqlEntailQueryPerson);

    System.out.println("--------");
    System.out.println("Results in " + sw.getTime() + "ms");
    System.out.println(res);

    assertEquals(17, res.size());
  }
Exemplo n.º 18
0
  /**
   * Transforms an EDGE request into a simple SPARQL query pushed to the remote producer. Results
   * are returned through standard web services protocol.
   *
   * @param gNode graph variable if it exists, null otherwise
   * @param from "from named <g>" list
   * @param qEdge edge searched for
   * @param env query execution context (current variable values, etc.)
   * @return an iterator over graph entities
   */
  @Override
  public Iterable<Entity> getEdges(Node gNode, List<Node> from, Edge qEdge, Environment env) {
    // si gNode != null et from non vide, alors "from named"
    // si gNode == null et from non vide alors "from"

    String query = getSparqlQuery(qEdge, env);
    Graph resGraph = Graph.create();
    Graph g = Graph.create();

    StopWatch sw = new StopWatch();
    sw.start();

    InputStream is = null;
    try {
      QueryProcess exec = QueryProcess.create(resGraph);

      if (query != null) {
        Mappings map = exec.query(query);

        //            logger.info("Received results in " + sw.getTime());

        String sparqlRes = RDFFormat.create(map).toString();
        //            System.out.println(XMLFormat.create(map));

        if (sparqlRes != null) {
          Load l = Load.create(g);
          is = new ByteArrayInputStream(sparqlRes.getBytes());
          l.load(is);
          //                logger.info("Results (cardinality " + g.size() + ") merged in  " +
          // sw.getTime() + " ms.");
        }
      }

    } catch (LoadException ex) {
      ex.printStackTrace();
    } catch (EngineException ex) {
      ex.printStackTrace();
    }
    //        for (Iterator<Entity> it = g.getEdges().iterator(); it.hasNext();) {
    //            Edge e = (Edge) it.next();
    //            System.out.println(e);
    //        }
    //
    return g.getEdges();
  }
  /**
   * Returns <code>true</code> if the roles have permission to perform the action on the resources.
   *
   * @param userId the primary key of the user performing the permission check
   * @param resourceId the primary key of the resource, typically the scope group ID representing
   *     the scope in which the permission check is being performed
   * @param resources the resources for which permissions are to be checked
   * @param actionId the primary key of the action to be performed on the resources
   * @param roleIds the primary keys of the roles
   * @return <code>true</code> if the roles have permission to perform the action on the resources;
   *     <code>false</code> otherwise
   * @throws PortalException if any one of the roles with the primary keys could not be found or if
   *     a resource action with the action ID could not be found
   * @throws SystemException if a system exception occurred
   */
  @Override
  public boolean hasUserPermissions(
      long userId, long resourceId, List<Resource> resources, String actionId, long[] roleIds)
      throws PortalException, SystemException {

    StopWatch stopWatch = new StopWatch();

    stopWatch.start();

    int block = 1;

    boolean hasUserPermissions =
        resourcePermissionLocalService.hasResourcePermission(resources, roleIds, actionId);

    logHasUserPermissions(userId, resourceId, actionId, stopWatch, block++);

    return hasUserPermissions;
  }
Exemplo n.º 20
0
  /** @param args */
  public static void main(String[] args) {
    String idxLocation = args[0];
    try {
      IndexWriter iw =
          new IndexWriter(idxLocation, new StandardAnalyzer(), MaxFieldLength.UNLIMITED);
      LOG.info("opened index " + idxLocation + " starting optimization...");
      StopWatch watch = new StopWatch();
      watch.start();

      iw.optimize();
      iw.close();

      watch.stop();
      LOG.info("done. took " + watch);
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
  @Override
  public Observable<Void> unpause() {
    if (stopWatchState == STATE_PAUSED) {
      stopWatch.resume();
      stopWatchState = STATE_RUNNING;
    }

    return Observable.just(null);
  }
  @Override
  public Observable<Void> pause() {
    if (stopWatchState == STATE_RUNNING) {
      stopWatch.suspend();
      stopWatchState = STATE_PAUSED;
    }

    return Observable.just(null);
  }
Exemplo n.º 23
0
  @Test
  public void localEntailments() throws EngineException {
    Graph localGraph = Graph.create(true);
    Load ld = Load.create(localGraph);
    ld.load(humanData.getAbsolutePath());
    ld.load(humanOnt.getAbsolutePath());

    QueryProcess exec = QueryProcess.create(localGraph);
    StopWatch sw = new StopWatch();
    sw.start();
    Mappings res = exec.query(sparqlEntailQueryPerson);

    System.out.println("--------");
    System.out.println("Results in " + sw.getTime() + "ms");
    System.out.println(res);

    assertEquals(17, res.size());
  }
Exemplo n.º 24
0
  private void _generateImagesGS(FileVersion fileVersion, File file) throws Exception {

    if (_isGeneratePreview(fileVersion)) {
      StopWatch stopWatch = null;

      if (_log.isInfoEnabled()) {
        stopWatch = new StopWatch();

        stopWatch.start();
      }

      _generateImagesGS(fileVersion, file, false);

      if (_log.isInfoEnabled()) {
        int previewFileCount = getPreviewFileCount(fileVersion);

        _log.info(
            "Ghostscript generated "
                + previewFileCount
                + " preview pages for "
                + fileVersion.getTitle()
                + " in "
                + stopWatch);
      }
    }

    if (_isGenerateThumbnail(fileVersion)) {
      StopWatch stopWatch = null;

      if (_log.isInfoEnabled()) {
        stopWatch = new StopWatch();

        stopWatch.start();
      }

      _generateImagesGS(fileVersion, file, true);

      if (_log.isInfoEnabled()) {
        _log.info(
            "Ghostscript generated a thumbnail for " + fileVersion.getTitle() + " in " + stopWatch);
      }
    }
  }
Exemplo n.º 25
0
  protected void initDatabase(SessionFactory sf) {
    log("starting initialize database");

    initDatabaseStopWatch.start();

    BatchSupport batchSupport = new BatchSupport(sf, initialOffset);
    batchSupport.execute(
        "insert into author(id, name) values(?, ?)",
        initialAutorCount,
        new BatchCallback() {
          @Override
          public void initStatement(PreparedStatement ps, long id) throws SQLException {
            ps.setLong(1, id);
            ps.setString(2, "autor" + id);
          }
        });
    batchSupport.execute(
        "insert into book(id, title, summary, rating, totalSold, publicationDate) values(?, ?, ?, ?, ?, ?)",
        initialBookCount,
        new BatchCallback() {
          @Override
          public void initStatement(PreparedStatement ps, long id) throws SQLException {
            ps.setLong(1, id);
            ps.setString(2, "title" + id);
            ps.setString(3, reverse(SUMMARIES[(int) (id % SUMMARIES.length)]));
            ps.setLong(4, -1);
            ps.setLong(5, -1);
            ps.setDate(6, new Date(PUBLICATION_DATE_ZERO.getTime()));
          }
        });
    batchSupport.execute(
        "insert into book_author(book_id, authors_id) values(?, ?)",
        initialBookCount,
        new BatchCallback() {
          @Override
          public void initStatement(PreparedStatement ps, long id) throws SQLException {
            ps.setLong(1, id);
            ps.setLong(2, initialOffset + (id % initialAutorCount));
          }
        });

    initDatabaseStopWatch.stop();
  }
 /**
  * Similar to {@link #testGetUserAvailability()}, but uses {@link FindItem}.
  *
  * @throws JAXBException
  */
 @Test
 public void testFindItemCalendarType() throws JAXBException {
   initializeCredentials();
   FindItem request =
       constructFindItemRequest(
           DateHelp.makeDate(startDate), DateHelp.makeDate(endDate), emailAddress);
   StopWatch stopWatch = new StopWatch();
   stopWatch.start();
   FindItemResponse response = ewsClient.findItem(request);
   stopWatch.stop();
   log.debug("FindItem request completed in " + stopWatch);
   Assert.assertNotNull(response);
   Assert.assertEquals(
       expectedEventCount,
       response
           .getResponseMessages()
           .getCreateItemResponseMessagesAndDeleteItemResponseMessagesAndGetItemResponseMessages()
           .size());
 }
Exemplo n.º 27
0
  public void testLotsOfLoanRequests() throws Exception {
    final MuleClient client = new MuleClient();
    Customer c = new Customer("Ross Mason", 1234);
    CustomerQuoteRequest[] requests = new CustomerQuoteRequest[3];
    requests[0] = new CustomerQuoteRequest(c, 100000, 48);
    requests[1] = new CustomerQuoteRequest(c, 1000, 12);
    requests[2] = new CustomerQuoteRequest(c, 10, 24);

    final StopWatch stopWatch = new StopWatch();

    final int numRequests = getNumberOfRequests() + getWarmUpMessages();
    int i = 0;

    int numberOfThreads = 1;

    CountDownLatch latch = new CountDownLatch(numberOfThreads);
    ExceptionHolder exceptionHolder = new ExceptionHolder();
    try {
      for (int x = 0; x < numberOfThreads; x++) {
        Thread thread =
            new Thread(new ClientReceiver(latch, numRequests / numberOfThreads, exceptionHolder));
        thread.start();
      }

      for (i = 0; i < numRequests; i++) {
        if (i == getWarmUpMessages()) {
          stopWatch.start();
        }
        client.dispatch("CustomerRequests", requests[i % 3], null);
      }
    } finally {
      latch.await();
      stopWatch.stop();
      System.out.println("Total running time was: " + stopWatch.getTime() + "ms");
      System.out.println("Requests processed was: " + i);
      int mps = (int) (numRequests / ((double) stopWatch.getTime() / (double) 1000));
      System.out.println("Msg/sec: " + mps + " (warm up msgs = " + getWarmUpMessages() + ")");
      if (exceptionHolder.isExceptionThrown()) {
        exceptionHolder.print();
        fail("Exceptions thrown during async processing");
      }
    }
  }
Exemplo n.º 28
0
  @Override
  public void loadIndex(long companyId, InputStream inputStream) throws IOException {

    if (!isLoadIndexFromClusterEnabled()) {
      return;
    }

    IndexAccessor indexAccessor = _indexAccessors.get(companyId);

    if (indexAccessor == null) {
      if (_log.isInfoEnabled()) {
        _log.info(
            "Skip loading Lucene index files for company "
                + companyId
                + " in favor of lazy loading");
      }

      return;
    }

    StopWatch stopWatch = new StopWatch();

    stopWatch.start();

    if (_log.isInfoEnabled()) {
      _log.info("Start loading Lucene index files for company " + companyId);
    }

    indexAccessor.loadIndex(inputStream);

    if (_log.isInfoEnabled()) {
      _log.info(
          "Finished loading index files for company "
              + companyId
              + " in "
              + stopWatch.getTime()
              + " ms");
    }
  }
  protected void reindex(Indexer indexer) throws Exception {
    StopWatch stopWatch = new StopWatch();

    stopWatch.start();

    if (_log.isInfoEnabled()) {
      _log.info("Reindexing with " + indexer.getClass() + " started");
    }

    indexer.reindex(new String[] {String.valueOf(_companyId)});

    _usedSearchEngineIds.add(indexer.getSearchEngineId());

    if (_log.isInfoEnabled()) {
      _log.info(
          "Reindexing with "
              + indexer.getClass()
              + " completed in "
              + (stopWatch.getTime() / Time.SECOND)
              + " seconds");
    }
  }
  public MetadataSegment fetchMetadataOld(String type, String[] levels) throws RetsReplyException {
    // Always need system to get version and date
    MSystem system = findSystemFromHibernate();

    MetadataFinder finder = (MetadataFinder) sMetadataFinders.get(type);
    if (finder != null) {
      StopWatch stopWatch = new StopWatch();
      LOG.debug("Using finder for type: " + type);
      stopWatch.start();
      List metadata = finder.findMetadata(levels, mSessions);
      stopWatch.stop();
      LOG.debug("End finder: " + stopWatch.getTime());
      return new MetadataSegment(metadata, levels, system.getVersionString(), system.getDate());
    } else {
      LOG.warn(
          "Recieved query for unknown metadataResults type: "
              + type
              + ", level="
              + StringUtils.join(levels, ":"));
      throw new RetsReplyException(ReplyCode.INVALID_TYPE, type);
    }
  }