コード例 #1
0
 private void validatePercentiles(Map<Double, Double> percentiles) {
   int i = 0;
   double last = Double.NEGATIVE_INFINITY;
   for (Map.Entry<Double, Double> entry : percentiles.entrySet()) {
     assertThat(entry.getKey(), equalTo(BenchmarkSettings.DEFAULT_PERCENTILES[i++]));
     // This is a hedge against rounding errors. Sometimes two adjacent percentile values will
     // be nearly equivalent except for some insignificant decimal places. In such cases we
     // want the two values to compare as equal.
     assertThat(entry.getValue(), greaterThanOrEqualTo(last - 1e-6));
     last = entry.getValue();
   }
 }
コード例 #2
0
 private static Map<String, String> randomHeadersFrom(Map<String, String> headers) {
   Map<String, String> newHeaders = new HashMap<>();
   if (headers.isEmpty()) {
     return newHeaders;
   }
   int i = randomInt(headers.size() - 1);
   for (Map.Entry<String, String> entry : headers.entrySet()) {
     if (randomInt(i) == 0) {
       newHeaders.put(entry.getKey(), entry.getValue());
     }
   }
   return newHeaders;
 }
コード例 #3
0
  public void testDuelGlobalOrdinals() throws Exception {
    Random random = getRandom();
    final int numDocs = scaledRandomIntBetween(10, 1000);
    final int numValues = scaledRandomIntBetween(10, 500);
    final String[] values = new String[numValues];
    for (int i = 0; i < numValues; ++i) {
      values[i] = new String(RandomStrings.randomAsciiOfLength(random, 10));
    }
    for (int i = 0; i < numDocs; i++) {
      Document d = new Document();
      final int numVals = randomInt(3);
      for (int j = 0; j < numVals; ++j) {
        final String value = RandomPicks.randomFrom(random, Arrays.asList(values));
        d.add(new StringField("string", value, Field.Store.NO));
        d.add(new SortedSetDocValuesField("bytes", new BytesRef(value)));
      }
      writer.addDocument(d);
      if (randomInt(10) == 0) {
        refreshReader();
      }
    }
    refreshReader();

    Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuelFieldDataTests.Type>();
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")),
        Type.Bytes);
    typeMap.put(
        new FieldDataType("string", ImmutableSettings.builder().put("format", "doc_values")),
        Type.Bytes);

    for (Map.Entry<FieldDataType, Type> entry : typeMap.entrySet()) {
      ifdService.clear();
      IndexOrdinalsFieldData fieldData =
          getForField(entry.getKey(), entry.getValue().name().toLowerCase(Locale.ROOT));
      RandomAccessOrds left = fieldData.load(readerContext).getOrdinalsValues();
      fieldData.clear();
      RandomAccessOrds right =
          fieldData
              .loadGlobal(topLevelReader)
              .load(topLevelReader.leaves().get(0))
              .getOrdinalsValues();
      assertEquals(left.getValueCount(), right.getValueCount());
      for (long ord = 0; ord < left.getValueCount(); ++ord) {
        assertEquals(left.lookupOrd(ord), right.lookupOrd(ord));
      }
    }
  }
コード例 #4
0
  @Test
  public void testFetchAction() throws Exception {
    setUpCharacters();

    Analysis analysis = analyze("select id, name from characters");
    QueryThenFetchConsumer queryThenFetchConsumer =
        internalCluster().getInstance(QueryThenFetchConsumer.class);
    Planner.Context plannerContext = new Planner.Context(clusterService(), UUID.randomUUID());
    ConsumerContext consumerContext = new ConsumerContext(analysis.rootRelation(), plannerContext);
    QueryThenFetch plan =
        (QueryThenFetch)
            queryThenFetchConsumer.consume(analysis.rootRelation(), consumerContext).plan();

    List<Bucket> results = getBuckets(plan.collectNode());

    TransportFetchNodeAction transportFetchNodeAction =
        internalCluster().getInstance(TransportFetchNodeAction.class);

    // extract docIds by nodeId and jobSearchContextId
    Map<String, LongArrayList> jobSearchContextDocIds = new HashMap<>();
    for (Bucket rows : results) {
      long docId = (long) rows.iterator().next().get(0);
      // unpack jobSearchContextId and reader doc id from docId
      int jobSearchContextId = (int) (docId >> 32);
      String nodeId = plannerContext.nodeId(jobSearchContextId);
      LongArrayList docIdsPerNode = jobSearchContextDocIds.get(nodeId);
      if (docIdsPerNode == null) {
        docIdsPerNode = new LongArrayList();
        jobSearchContextDocIds.put(nodeId, docIdsPerNode);
      }
      docIdsPerNode.add(docId);
    }

    Iterable<Projection> projections =
        Iterables.filter(
            plan.mergeNode().projections(), Predicates.instanceOf(FetchProjection.class));
    FetchProjection fetchProjection = (FetchProjection) Iterables.getOnlyElement(projections);
    RowInputSymbolVisitor rowInputSymbolVisitor =
        new RowInputSymbolVisitor(internalCluster().getInstance(Functions.class));
    RowInputSymbolVisitor.Context context =
        rowInputSymbolVisitor.extractImplementations(fetchProjection.outputSymbols());

    final CountDownLatch latch = new CountDownLatch(jobSearchContextDocIds.size());
    final List<Row> rows = new ArrayList<>();
    for (Map.Entry<String, LongArrayList> nodeEntry : jobSearchContextDocIds.entrySet()) {
      NodeFetchRequest nodeFetchRequest = new NodeFetchRequest();
      nodeFetchRequest.jobId(plan.collectNode().jobId());
      nodeFetchRequest.executionPhaseId(plan.collectNode().executionPhaseId());
      nodeFetchRequest.toFetchReferences(context.references());
      nodeFetchRequest.closeContext(true);
      nodeFetchRequest.jobSearchContextDocIds(nodeEntry.getValue());

      transportFetchNodeAction.execute(
          nodeEntry.getKey(),
          nodeFetchRequest,
          new ActionListener<NodeFetchResponse>() {
            @Override
            public void onResponse(NodeFetchResponse nodeFetchResponse) {
              for (Row row : nodeFetchResponse.rows()) {
                rows.add(row);
              }
              latch.countDown();
            }

            @Override
            public void onFailure(Throwable e) {
              latch.countDown();
              fail(e.getMessage());
            }
          });
    }
    latch.await();

    assertThat(rows.size(), is(2));
    for (Row row : rows) {
      assertThat((Integer) row.get(0), anyOf(is(1), is(2)));
      assertThat(
          (BytesRef) row.get(1), anyOf(is(new BytesRef("Arthur")), is(new BytesRef("Ford"))));
    }
  }
コード例 #5
0
 private static void putContext(ActionRequest<?> request, Map<String, String> context) {
   for (Map.Entry<String, String> header : context.entrySet()) {
     request.putInContext(header.getKey(), header.getValue());
   }
 }
コード例 #6
0
 private static void putHeaders(ActionRequest<?> request, Map<String, String> headers) {
   for (Map.Entry<String, String> header : headers.entrySet()) {
     request.putHeader(header.getKey(), header.getValue());
   }
 }
コード例 #7
0
  private BuildResult doRun(
      final OutputListenerImpl outputListener,
      OutputListenerImpl errorListener,
      BuildListenerImpl listener) {
    // Capture the current state of things that we will change during execution
    InputStream originalStdIn = System.in;
    Properties originalSysProperties = new Properties();
    originalSysProperties.putAll(System.getProperties());
    File originalUserDir = new File(originalSysProperties.getProperty("user.dir"));
    Map<String, String> originalEnv = new HashMap<String, String>(System.getenv());

    // Augment the environment for the execution
    System.setIn(getStdin());
    processEnvironment.maybeSetProcessDir(getWorkingDir());
    for (Map.Entry<String, String> entry : getEnvironmentVars().entrySet()) {
      processEnvironment.maybeSetEnvironmentVariable(entry.getKey(), entry.getValue());
    }
    Map<String, String> implicitJvmSystemProperties = getImplicitJvmSystemProperties();
    System.getProperties().putAll(implicitJvmSystemProperties);

    DefaultStartParameter parameter = new DefaultStartParameter();
    parameter.setCurrentDir(getWorkingDir());
    parameter.setShowStacktrace(ShowStacktrace.ALWAYS);

    CommandLineParser parser = new CommandLineParser();
    DefaultCommandLineConverter converter = new DefaultCommandLineConverter();
    converter.configure(parser);
    ParsedCommandLine parsedCommandLine = parser.parse(getAllArgs());

    BuildLayoutParameters layout = converter.getLayoutConverter().convert(parsedCommandLine);

    Map<String, String> properties = new HashMap<String, String>();
    new LayoutToPropertiesConverter().convert(layout, properties);
    converter.getSystemPropertiesConverter().convert(parsedCommandLine, properties);

    new PropertiesToStartParameterConverter().convert(properties, parameter);
    converter.convert(parsedCommandLine, parameter);

    DefaultGradleLauncherFactory factory =
        DeprecationLogger.whileDisabled(
            new Factory<DefaultGradleLauncherFactory>() {
              public DefaultGradleLauncherFactory create() {
                return (DefaultGradleLauncherFactory) GradleLauncher.getFactory();
              }
            });
    factory.addListener(listener);
    GradleLauncher gradleLauncher = factory.newInstance(parameter);
    gradleLauncher.addStandardOutputListener(outputListener);
    gradleLauncher.addStandardErrorListener(errorListener);
    try {
      return gradleLauncher.run();
    } finally {
      // Restore the environment
      System.setProperties(originalSysProperties);
      processEnvironment.maybeSetProcessDir(originalUserDir);
      for (String envVar : getEnvironmentVars().keySet()) {
        String oldValue = originalEnv.get(envVar);
        if (oldValue != null) {
          processEnvironment.maybeSetEnvironmentVariable(envVar, oldValue);
        } else {
          processEnvironment.maybeRemoveEnvironmentVariable(envVar);
        }
      }
      factory.removeListener(listener);
      System.setIn(originalStdIn);
    }
  }