Esempio n. 1
0
  private List<Future> getBatches(DataLoadThreadTime dataLoadThreadTime, Scenario scenario)
      throws Exception {
    RowCalculator rowCalculator = new RowCalculator(getThreadPoolSize(), scenario.getRowCount());
    List<Future> writeBatches = new ArrayList<>();

    for (int i = 0; i < getThreadPoolSize(); i++) {
      List<Column> phxMetaCols =
          pUtil.getColumnsFromPhoenix(
              scenario.getSchemaName(),
              scenario.getTableNameWithoutSchemaName(),
              pUtil.getConnection());
      int threadRowCount = rowCalculator.getNext();
      logger.info("Kick off thread (#" + i + ")for upsert with (" + threadRowCount + ") rows.");
      Future<Info> write =
          upsertData(
              scenario, phxMetaCols, scenario.getTableName(), threadRowCount, dataLoadThreadTime);
      writeBatches.add(write);
    }
    if (writeBatches.isEmpty()) {
      throw new PherfException(
          "Holy shit snacks! Throwing up hands in disbelief and exiting. Could not write data for some unknown reason.");
    }

    return writeBatches;
  }
Esempio n. 2
0
 public ScenarioResult(Scenario scenario) {
   this.setDataOverride(scenario.getDataOverride());
   this.setPhoenixProperties(scenario.getPhoenixProperties());
   this.setRowCount(scenario.getRowCount());
   this.setTableName(scenario.getTableName());
   this.setName(scenario.getName());
 }
Esempio n. 3
0
  private synchronized void exec(
      DataLoadTimeSummary dataLoadTimeSummary,
      DataLoadThreadTime dataLoadThreadTime,
      Scenario scenario)
      throws Exception {
    logger.info("\nLoading " + scenario.getRowCount() + " rows for " + scenario.getTableName());
    long start = System.currentTimeMillis();

    List<Future> writeBatches = getBatches(dataLoadThreadTime, scenario);

    waitForBatches(dataLoadTimeSummary, scenario, start, writeBatches);

    // always update stats for Phoenix base tables
    updatePhoenixStats(scenario.getTableName());
  }
Esempio n. 4
0
  /**
   * Default the writers to use up all available cores for threads. If writeParams are used in the
   * config files, they will override the defaults. writeParams are used for read/write mixed
   * workloads. TODO extract notion of the scenario list and have 1 write workload per scenario
   *
   * @param phoenixUtil {@link org.apache.phoenix.pherf.util.PhoenixUtil} Query helper
   * @param properties {@link java.util.Properties} default properties to use
   * @param parser {@link org.apache.phoenix.pherf.configuration.XMLConfigParser}
   * @param scenario {@link org.apache.phoenix.pherf.configuration.Scenario} If null is passed it
   *     will run against all scenarios in the parsers list.
   * @throws Exception
   */
  public WriteWorkload(
      PhoenixUtil phoenixUtil, Properties properties, XMLConfigParser parser, Scenario scenario)
      throws Exception {
    this.pUtil = phoenixUtil;
    this.parser = parser;
    this.rulesApplier = new RulesApplier(parser);
    this.resultUtil = new ResultUtil();

    // Overwrite defaults properties with those given in the configuration. This indicates the
    // scenario is a R/W mixed workload.
    if (scenario != null) {
      this.scenario = scenario;
      writeParams = scenario.getWriteParams();
      threadSleepDuration = writeParams.getThreadSleepDuration();
    } else {
      writeParams = null;
      this.scenario = null;
      threadSleepDuration = 0;
    }

    int size = Integer.parseInt(properties.getProperty("pherf.default.dataloader.threadpool"));

    this.threadPoolSize = (size == 0) ? Runtime.getRuntime().availableProcessors() : size;

    // TODO Move pool management up to WorkloadExecutor
    this.pool = Executors.newFixedThreadPool(this.threadPoolSize);

    String bSize =
        (writeParams == null) || (writeParams.getBatchSize() == Long.MIN_VALUE)
            ? properties.getProperty("pherf.default.dataloader.batchsize")
            : String.valueOf(writeParams.getBatchSize());
    this.batchSize = (bSize == null) ? PherfConstants.DEFAULT_BATCH_SIZE : Integer.parseInt(bSize);
  }
Esempio n. 5
0
 private void waitForBatches(
     DataLoadTimeSummary dataLoadTimeSummary,
     Scenario scenario,
     long start,
     List<Future> writeBatches)
     throws InterruptedException, java.util.concurrent.ExecutionException {
   int sumRows = 0, sumDuration = 0;
   // Wait for all the batch threads to complete
   for (Future<Info> write : writeBatches) {
     Info writeInfo = write.get();
     sumRows += writeInfo.getRowCount();
     sumDuration += writeInfo.getDuration();
     logger.info(
         "Executor ("
             + this.hashCode()
             + ") writes complete with row count ("
             + writeInfo.getRowCount()
             + ") in Ms ("
             + writeInfo.getDuration()
             + ")");
   }
   logger.info(
       "Writes completed with total row count ("
           + sumRows
           + ") with total time of("
           + sumDuration
           + ") Ms");
   dataLoadTimeSummary.add(
       scenario.getTableName(), sumRows, (int) (System.currentTimeMillis() - start));
 }