protected Sequence<T> mergeCachedAndUncachedSequences(
      List<Sequence<T>> sequencesByInterval, QueryToolChest<T, Query<T>> toolChest) {
    if (sequencesByInterval.isEmpty()) {
      return Sequences.empty();
    }

    return toolChest.mergeSequencesUnordered(Sequences.simple(sequencesByInterval));
  }
  @Test
  public void testGroupByWithDimFilterAndWithFilteredDimSpec() throws Exception {
    GroupByQuery query =
        GroupByQuery.builder()
            .setDataSource("xx")
            .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000"))
            .setGranularity(QueryGranularity.ALL)
            .setDimensions(
                Lists.<DimensionSpec>newArrayList(
                    new RegexFilteredDimensionSpec(new DefaultDimensionSpec("tags", "tags"), "t3")))
            .setAggregatorSpecs(
                Arrays.asList(new AggregatorFactory[] {new CountAggregatorFactory("count")}))
            .setDimFilter(new SelectorDimFilter("tags", "t3"))
            .build();

    Sequence<Row> result =
        helper.runQueryOnSegmentsObjs(
            ImmutableList.of(
                new QueryableIndexSegment("sid1", queryableIndex),
                new IncrementalIndexSegment(incrementalIndex, "sid2")),
            query);

    List<Row> expectedResults =
        Arrays.asList(
            GroupByQueryRunnerTestHelper.createExpectedRow(
                "1970-01-01T00:00:00.000Z", "tags", "t3", "count", 4L));

    TestHelper.assertExpectedObjects(
        expectedResults, Sequences.toList(result, new ArrayList<Row>()), "");
  }
示例#3
0
  @Test
  public void testFullSelectNoResults() {
    SelectQuery query =
        newTestQuery()
            .intervals(I_0112_0114)
            .filters(
                new AndDimFilter(
                    Arrays.<DimFilter>asList(
                        new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null),
                        new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "foo", null))))
            .build();

    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(
            runner.run(query, Maps.newHashMap()), Lists.<Result<SelectResultValue>>newArrayList());

    List<Result<SelectResultValue>> expectedResults =
        Arrays.asList(
            new Result<SelectResultValue>(
                new DateTime("2011-01-12T00:00:00.000Z"),
                new SelectResultValue(
                    ImmutableMap.<String, Integer>of(), Lists.<EventHolder>newArrayList())));

    verify(expectedResults, results);
  }
  @Test
  public void testIngestAndQuery() throws Exception {
    AggregationTestHelper helper =
        AggregationTestHelper.createGroupByQueryAggregationTestHelper(
            Lists.newArrayList(new AggregatorsModule()), tempFolder);

    String metricSpec =
        "[{"
            + "\"type\": \"hyperUnique\","
            + "\"name\": \"index_hll\","
            + "\"fieldName\": \"market\""
            + "}]";

    String parseSpec =
        "{"
            + "\"type\" : \"string\","
            + "\"parseSpec\" : {"
            + "    \"format\" : \"tsv\","
            + "    \"timestampSpec\" : {"
            + "        \"column\" : \"timestamp\","
            + "        \"format\" : \"auto\""
            + "},"
            + "    \"dimensionsSpec\" : {"
            + "        \"dimensions\": [],"
            + "        \"dimensionExclusions\" : [],"
            + "        \"spatialDimensions\" : []"
            + "    },"
            + "    \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]"
            + "  }"
            + "}";

    String query =
        "{"
            + "\"queryType\": \"groupBy\","
            + "\"dataSource\": \"test_datasource\","
            + "\"granularity\": \"ALL\","
            + "\"dimensions\": [],"
            + "\"aggregations\": ["
            + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }"
            + "],"
            + "\"postAggregations\": ["
            + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }"
            + "],"
            + "\"intervals\": [ \"1970/2050\" ]"
            + "}";

    Sequence seq =
        helper.createIndexAndRunQueryOnSegment(
            new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()),
            parseSpec,
            metricSpec,
            0,
            QueryGranularities.NONE,
            50000,
            query);

    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(3.0, row.getFloatMetric("index_hll"), 0.1);
    Assert.assertEquals(3.0, row.getFloatMetric("index_unique_count"), 0.1);
  }
  @Benchmark
  @BenchmarkMode(Mode.AverageTime)
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception {
    List<QueryRunner<Result<TimeseriesResultValue>>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
      String segmentName = "qIndex" + i;
      QueryRunner<Result<TimeseriesResultValue>> runner =
          QueryBenchmarkUtil.makeQueryRunner(
              factory, segmentName, new QueryableIndexSegment(segmentName, qIndexes.get(i)));
      singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }

    QueryRunner theRunner =
        toolChest.postMergeQueryDecoration(
            new FinalizeResultsQueryRunner<>(
                toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)),
                toolChest));

    Sequence<Result<TimeseriesResultValue>> queryResult =
        theRunner.run(query, Maps.<String, Object>newHashMap());
    List<Result<TimeseriesResultValue>> results =
        Sequences.toList(queryResult, Lists.<Result<TimeseriesResultValue>>newArrayList());

    for (Result<TimeseriesResultValue> result : results) {
      blackhole.consume(result);
    }
  }
示例#6
0
  private <T> void assertQueryable(
      QueryGranularity granularity,
      String dataSource,
      Interval interval,
      List<Pair<String, Interval>> expected) {
    Iterator<Pair<String, Interval>> expectedIter = expected.iterator();
    final List<Interval> intervals = Arrays.asList(interval);
    final SearchQuery query =
        Druids.newSearchQueryBuilder()
            .dataSource(dataSource)
            .intervals(intervals)
            .granularity(granularity)
            .limit(10000)
            .query("wow")
            .build();
    QueryRunner<Result<SearchResultValue>> runner =
        serverManager.getQueryRunnerForIntervals(query, intervals);
    final Sequence<Result<SearchResultValue>> seq = runner.run(query);
    Sequences.toList(seq, Lists.<Result<SearchResultValue>>newArrayList());
    Iterator<SegmentForTesting> adaptersIter = factory.getAdapters().iterator();

    while (expectedIter.hasNext() && adaptersIter.hasNext()) {
      Pair<String, Interval> expectedVals = expectedIter.next();
      SegmentForTesting value = adaptersIter.next();

      Assert.assertEquals(expectedVals.lhs, value.getVersion());
      Assert.assertEquals(expectedVals.rhs, value.getInterval());
    }

    Assert.assertFalse(expectedIter.hasNext());
    Assert.assertFalse(adaptersIter.hasNext());

    factory.clearAdapters();
  }
示例#7
0
  @Test
  public void testSelectWithDimsAndMets() {
    SelectQuery query =
        newTestQuery()
            .intervals(I_0112_0114)
            .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.marketDimension))
            .metrics(Arrays.asList(QueryRunnerTestHelper.indexMetric))
            .build();

    HashMap<String, Object> context = new HashMap<String, Object>();
    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(
            runner.run(query, context), Lists.<Result<SelectResultValue>>newArrayList());

    PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId);
    List<Result<SelectResultValue>> expectedResults =
        toExpected(
            toEvents(
                new String[] {
                  EventHolder.timestampKey + ":TIME",
                  QueryRunnerTestHelper.marketDimension + ":STRING",
                  null,
                  null,
                  null,
                  QueryRunnerTestHelper.indexMetric + ":FLOAT"
                },
                V_0112_0114),
            offset.startOffset(),
            offset.threshold());
    verify(expectedResults, results);
  }
示例#8
0
  @Test
  public void testFullOnSelectWithFilter() {
    // startDelta + threshold pairs
    for (int[] param : new int[][] {{3, 3}, {0, 1}, {5, 5}, {2, 7}, {3, 0}}) {
      SelectQuery query =
          newTestQuery()
              .intervals(I_0112_0114)
              .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null))
              .granularity(QueryRunnerTestHelper.dayGran)
              .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension))
              .metrics(Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric))
              .pagingSpec(new PagingSpec(toPagingIdentifier(param[0], descending), param[1]))
              .build();

      HashMap<String, Object> context = new HashMap<String, Object>();
      Iterable<Result<SelectResultValue>> results =
          Sequences.toList(
              runner.run(query, context), Lists.<Result<SelectResultValue>>newArrayList());

      final List<List<Map<String, Object>>> events =
          toEvents(
              new String[] {
                EventHolder.timestampKey + ":TIME",
                null,
                QueryRunnerTestHelper.qualityDimension + ":STRING",
                null,
                null,
                QueryRunnerTestHelper.indexMetric + ":FLOAT"
              },
              // filtered values with day granularity
              new String[] {
                "2011-01-12T00:00:00.000Z	spot	automotive	preferred	apreferred	100.000000",
                "2011-01-12T00:00:00.000Z	spot	business	preferred	bpreferred	100.000000",
                "2011-01-12T00:00:00.000Z	spot	entertainment	preferred	epreferred	100.000000",
                "2011-01-12T00:00:00.000Z	spot	health	preferred	hpreferred	100.000000",
                "2011-01-12T00:00:00.000Z	spot	mezzanine	preferred	mpreferred	100.000000",
                "2011-01-12T00:00:00.000Z	spot	news	preferred	npreferred	100.000000",
                "2011-01-12T00:00:00.000Z	spot	premium	preferred	ppreferred	100.000000",
                "2011-01-12T00:00:00.000Z	spot	technology	preferred	tpreferred	100.000000",
                "2011-01-12T00:00:00.000Z	spot	travel	preferred	tpreferred	100.000000"
              },
              new String[] {
                "2011-01-13T00:00:00.000Z	spot	automotive	preferred	apreferred	94.874713",
                "2011-01-13T00:00:00.000Z	spot	business	preferred	bpreferred	103.629399",
                "2011-01-13T00:00:00.000Z	spot	entertainment	preferred	epreferred	110.087299",
                "2011-01-13T00:00:00.000Z	spot	health	preferred	hpreferred	114.947403",
                "2011-01-13T00:00:00.000Z	spot	mezzanine	preferred	mpreferred	104.465767",
                "2011-01-13T00:00:00.000Z	spot	news	preferred	npreferred	102.851683",
                "2011-01-13T00:00:00.000Z	spot	premium	preferred	ppreferred	108.863011",
                "2011-01-13T00:00:00.000Z	spot	technology	preferred	tpreferred	111.356672",
                "2011-01-13T00:00:00.000Z	spot	travel	preferred	tpreferred	106.236928"
              });

      PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId);
      List<Result<SelectResultValue>> expectedResults =
          toExpected(events, offset.startOffset(), offset.threshold());
      verify(expectedResults, results);
    }
  }
  @Test
  public void testResetSanity() throws IOException {

    IncrementalIndex index = indexCreator.createIndex();
    DateTime t = DateTime.now();
    Interval interval = new Interval(t.minusMinutes(1), t.plusMinutes(1));

    index.add(
        new MapBasedInputRow(
            t.minus(1).getMillis(),
            Lists.newArrayList("billy"),
            ImmutableMap.<String, Object>of("billy", "hi")));
    index.add(
        new MapBasedInputRow(
            t.minus(1).getMillis(),
            Lists.newArrayList("sally"),
            ImmutableMap.<String, Object>of("sally", "bo")));

    IncrementalIndexStorageAdapter adapter = new IncrementalIndexStorageAdapter(index);

    for (boolean descending : Arrays.asList(false, true)) {
      Sequence<Cursor> cursorSequence =
          adapter.makeCursors(
              new SelectorFilter("sally", "bo"), interval, QueryGranularity.NONE, descending);

      Cursor cursor =
          Sequences.toList(Sequences.limit(cursorSequence, 1), Lists.<Cursor>newArrayList()).get(0);
      DimensionSelector dimSelector;

      dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
      Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));

      index.add(
          new MapBasedInputRow(
              t.minus(1).getMillis(),
              Lists.newArrayList("sally"),
              ImmutableMap.<String, Object>of("sally", "ah")));

      // Cursor reset should not be affected by out of order values
      cursor.reset();

      dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
      Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
    }
  }
  @Test
  public void testSelectPagination() {
    SelectQuery query =
        new SelectQuery(
            QueryRunnerTestHelper.dataSource,
            QueryRunnerTestHelper.fullOnInterval,
            null,
            QueryRunnerTestHelper.allGran,
            Lists.<String>newArrayList(QueryRunnerTestHelper.qualityDimension),
            Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric),
            new PagingSpec(
                Maps.newLinkedHashMap(ImmutableMap.of(QueryRunnerTestHelper.segmentId, 3)), 3),
            null);

    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(runner.run(query), Lists.<Result<SelectResultValue>>newArrayList());

    List<Result<SelectResultValue>> expectedResults =
        Arrays.asList(
            new Result<SelectResultValue>(
                new DateTime("2011-01-12T00:00:00.000Z"),
                new SelectResultValue(
                    ImmutableMap.of(QueryRunnerTestHelper.segmentId, 5),
                    Arrays.asList(
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            3,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "health")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            4,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "mezzanine")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            5,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "news")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build())))));

    verify(expectedResults, results);
  }
  @Test
  public void testSelectWithDimsAndMets() {
    SelectQuery query =
        new SelectQuery(
            QueryRunnerTestHelper.dataSource,
            QueryRunnerTestHelper.fullOnInterval,
            null,
            QueryRunnerTestHelper.allGran,
            Lists.<String>newArrayList(providerLowercase),
            Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric),
            new PagingSpec(null, 3),
            null);

    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(runner.run(query), Lists.<Result<SelectResultValue>>newArrayList());

    List<Result<SelectResultValue>> expectedResults =
        Arrays.asList(
            new Result<SelectResultValue>(
                new DateTime("2011-01-12T00:00:00.000Z"),
                new SelectResultValue(
                    ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2),
                    Arrays.asList(
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            0,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(providerLowercase, "spot")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            1,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(providerLowercase, "spot")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            2,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(providerLowercase, "spot")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build())))));

    verify(expectedResults, results);
  }
  private static <T> List<T> runQuery(
      QueryRunnerFactory factory, QueryRunner runner, Query<T> query) {
    QueryToolChest toolChest = factory.getToolchest();
    QueryRunner<T> theRunner =
        new FinalizeResultsQueryRunner<>(
            toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)), toolChest);

    Sequence<T> queryResult = theRunner.run(query, Maps.<String, Object>newHashMap());
    return Sequences.toList(queryResult, Lists.<T>newArrayList());
  }
示例#13
0
  @Test
  public void testSequentialPaging() {
    int[] asc = {2, 5, 8, 11, 14, 17, 20, 23, 25};
    int[] dsc = {-3, -6, -9, -12, -15, -18, -21, -24, -26};
    int[] expected = descending ? dsc : asc;

    SelectQuery query = newTestQuery().intervals(I_0112_0114).build();
    for (int offset : expected) {
      List<Result<SelectResultValue>> results =
          Sequences.toList(
              runner.run(query, ImmutableMap.of()),
              Lists.<Result<SelectResultValue>>newArrayList());

      Assert.assertEquals(1, results.size());

      SelectResultValue result = results.get(0).getValue();
      Map<String, Integer> pagingIdentifiers = result.getPagingIdentifiers();
      Assert.assertEquals(
          offset, pagingIdentifiers.get(QueryRunnerTestHelper.segmentId).intValue());

      Map<String, Integer> next = PagingSpec.next(pagingIdentifiers, descending);
      query = query.withPagingSpec(new PagingSpec(next, 3));
    }

    query = newTestQuery().intervals(I_0112_0114).build();
    for (int offset : expected) {
      List<Result<SelectResultValue>> results =
          Sequences.toList(
              runner.run(query, ImmutableMap.of()),
              Lists.<Result<SelectResultValue>>newArrayList());

      Assert.assertEquals(1, results.size());

      SelectResultValue result = results.get(0).getValue();
      Map<String, Integer> pagingIdentifiers = result.getPagingIdentifiers();
      Assert.assertEquals(
          offset, pagingIdentifiers.get(QueryRunnerTestHelper.segmentId).intValue());

      // use identifier as-is but with fromNext=true
      query = query.withPagingSpec(new PagingSpec(pagingIdentifiers, 3, true));
    }
  }
示例#14
0
    @Override
    public Sequence<Result<TopNResultValue>> run(
        Query<Result<TopNResultValue>> input, Map<String, Object> responseContext) {
      if (!(input instanceof TopNQuery)) {
        throw new ISE("Can only handle [%s], got [%s]", TopNQuery.class, input.getClass());
      }

      final TopNQuery query = (TopNQuery) input;
      if (query.getThreshold() > minTopNThreshold) {
        return runner.run(query, responseContext);
      }

      final boolean isBySegment = query.getContextBySegment(false);

      return Sequences.map(
          runner.run(query.withThreshold(minTopNThreshold), responseContext),
          new Function<Result<TopNResultValue>, Result<TopNResultValue>>() {
            @Override
            public Result<TopNResultValue> apply(Result<TopNResultValue> input) {
              if (isBySegment) {
                BySegmentResultValue<Result<TopNResultValue>> value =
                    (BySegmentResultValue<Result<TopNResultValue>>) input.getValue();

                return new Result<TopNResultValue>(
                    input.getTimestamp(),
                    new BySegmentTopNResultValue(
                        Lists.transform(
                            value.getResults(),
                            new Function<Result<TopNResultValue>, Result<TopNResultValue>>() {
                              @Override
                              public Result<TopNResultValue> apply(Result<TopNResultValue> input) {
                                return new Result<>(
                                    input.getTimestamp(),
                                    new TopNResultValue(
                                        Lists.<Object>newArrayList(
                                            Iterables.limit(
                                                input.getValue(), query.getThreshold()))));
                              }
                            }),
                        value.getSegmentId(),
                        value.getInterval()));
              }

              return new Result<>(
                  input.getTimestamp(),
                  new TopNResultValue(
                      Lists.<Object>newArrayList(
                          Iterables.limit(input.getValue(), query.getThreshold()))));
            }
          });
    }
  @Test
  public void testObjectColumnSelectorOnVaryingColumnSchema() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(
        new MapBasedInputRow(
            new DateTime("2014-09-01T00:00:00"),
            Lists.newArrayList("billy"),
            ImmutableMap.<String, Object>of("billy", "hi")));
    index.add(
        new MapBasedInputRow(
            new DateTime("2014-09-01T01:00:00"),
            Lists.newArrayList("billy", "sally"),
            ImmutableMap.<String, Object>of(
                "billy", "hip",
                "sally", "hop")));

    GroupByQueryEngine engine = makeGroupByQueryEngine();

    final Sequence<Row> rows =
        engine.process(
            GroupByQuery.builder()
                .setDataSource("test")
                .setGranularity(QueryGranularity.ALL)
                .setInterval(new Interval(0, new DateTime().getMillis()))
                .addDimension("billy")
                .addDimension("sally")
                .addAggregator(new LongSumAggregatorFactory("cnt", "cnt"))
                .addAggregator(
                    new JavaScriptAggregatorFactory(
                        "fieldLength",
                        Arrays.asList("sally", "billy"),
                        "function(current, s, b) { return current + (s == null ? 0 : s.length) + (b == null ? 0 : b.length); }",
                        "function() { return 0; }",
                        "function(a,b) { return a + b; }"))
                .build(),
            new IncrementalIndexStorageAdapter(index));

    final ArrayList<Row> results = Sequences.toList(rows, Lists.<Row>newArrayList());

    Assert.assertEquals(2, results.size());

    MapBasedRow row = (MapBasedRow) results.get(0);
    Assert.assertEquals(
        ImmutableMap.of("billy", "hi", "cnt", 1L, "fieldLength", 2.0), row.getEvent());

    row = (MapBasedRow) results.get(1);
    Assert.assertEquals(
        ImmutableMap.of("billy", "hip", "sally", "hop", "cnt", 1L, "fieldLength", 6.0),
        row.getEvent());
  }
    @Override
    public Sequence<Result<SearchResultValue>> run(
        Query<Result<SearchResultValue>> input, Map<String, Object> responseContext) {
      if (!(input instanceof SearchQuery)) {
        throw new ISE("Can only handle [%s], got [%s]", SearchQuery.class, input.getClass());
      }

      final SearchQuery query = (SearchQuery) input;
      if (query.getLimit() < config.getMaxSearchLimit()) {
        return runner.run(query, responseContext);
      }

      final boolean isBySegment = BaseQuery.getContextBySegment(query, false);

      return Sequences.map(
          runner.run(query.withLimit(config.getMaxSearchLimit()), responseContext),
          new Function<Result<SearchResultValue>, Result<SearchResultValue>>() {
            @Override
            public Result<SearchResultValue> apply(Result<SearchResultValue> input) {
              if (isBySegment) {
                BySegmentSearchResultValue value = (BySegmentSearchResultValue) input.getValue();

                return new Result<SearchResultValue>(
                    input.getTimestamp(),
                    new BySegmentSearchResultValue(
                        Lists.transform(
                            value.getResults(),
                            new Function<Result<SearchResultValue>, Result<SearchResultValue>>() {
                              @Override
                              public Result<SearchResultValue> apply(
                                  @Nullable Result<SearchResultValue> input) {
                                return new Result<SearchResultValue>(
                                    input.getTimestamp(),
                                    new SearchResultValue(
                                        Lists.newArrayList(
                                            Iterables.limit(input.getValue(), query.getLimit()))));
                              }
                            }),
                        value.getSegmentId(),
                        value.getInterval()));
              }

              return new Result<SearchResultValue>(
                  input.getTimestamp(),
                  new SearchResultValue(
                      Lists.<SearchHit>newArrayList(
                          Iterables.limit(input.getValue(), query.getLimit()))));
            }
          });
    }
  /**
   * *Awesome* method name auto-generated by IntelliJ! I love IntelliJ!
   *
   * @param index
   * @return
   */
  private List<SegmentAnalysis> getSegmentAnalysises(Segment index) {
    final QueryRunner runner =
        QueryRunnerTestHelper.makeQueryRunner(
            (QueryRunnerFactory)
                new SegmentMetadataQueryRunnerFactory(
                    new SegmentMetadataQueryQueryToolChest(),
                    QueryRunnerTestHelper.NOOP_QUERYWATCHER),
            index);

    final SegmentMetadataQuery query =
        new SegmentMetadataQuery(
            new LegacyDataSource("test"), QuerySegmentSpecs.create("2011/2012"), null, null, null);
    HashMap<String, Object> context = new HashMap<String, Object>();
    return Sequences.toList(query.run(runner, context), Lists.<SegmentAnalysis>newArrayList());
  }
示例#18
0
  @Test
  public void testFullOnSelect() {
    SelectQuery query = newTestQuery().intervals(I_0112_0114).build();

    HashMap<String, Object> context = new HashMap<String, Object>();
    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(
            runner.run(query, context), Lists.<Result<SelectResultValue>>newArrayList());

    PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId);
    List<Result<SelectResultValue>> expectedResults =
        toExpected(
            toEvents(new String[] {EventHolder.timestampKey + ":TIME"}, V_0112_0114),
            offset.startOffset(),
            offset.threshold());
    verify(expectedResults, results);
  }
示例#19
0
 private Sequence<Result<SearchResultValue>> makeReturnResult(
     int limit, TreeMap<SearchHit, MutableInt> retVal) {
   Iterable<SearchHit> source =
       Iterables.transform(
           retVal.entrySet(),
           new Function<Map.Entry<SearchHit, MutableInt>, SearchHit>() {
             @Override
             public SearchHit apply(Map.Entry<SearchHit, MutableInt> input) {
               SearchHit hit = input.getKey();
               return new SearchHit(
                   hit.getDimension(), hit.getValue(), input.getValue().intValue());
             }
           });
   return Sequences.simple(
       ImmutableList.of(
           new Result<SearchResultValue>(
               segment.getDataInterval().getStart(),
               new SearchResultValue(
                   Lists.newArrayList(new FunctionalIterable<SearchHit>(source).limit(limit))))));
 }
  @Test
  public void testSingleValueTopN() throws IOException {
    IncrementalIndex index = indexCreator.createIndex();
    DateTime t = DateTime.now();
    index.add(
        new MapBasedInputRow(
            t.minus(1).getMillis(),
            Lists.newArrayList("sally"),
            ImmutableMap.<String, Object>of("sally", "bo")));

    TopNQueryEngine engine =
        new TopNQueryEngine(
            new StupidPool<ByteBuffer>(
                new Supplier<ByteBuffer>() {
                  @Override
                  public ByteBuffer get() {
                    return ByteBuffer.allocate(50000);
                  }
                }));

    final Iterable<Result<TopNResultValue>> results =
        Sequences.toList(
            engine.query(
                new TopNQueryBuilder()
                    .dataSource("test")
                    .granularity(QueryGranularity.ALL)
                    .intervals(Lists.newArrayList(new Interval(0, new DateTime().getMillis())))
                    .dimension("sally")
                    .metric("cnt")
                    .threshold(10)
                    .aggregators(
                        Lists.<AggregatorFactory>newArrayList(
                            new LongSumAggregatorFactory("cnt", "cnt")))
                    .build(),
                new IncrementalIndexStorageAdapter(index)),
            Lists.<Result<TopNResultValue>>newLinkedList());

    Assert.assertEquals(1, Iterables.size(results));
    Assert.assertEquals(1, results.iterator().next().getValue().getValue().size());
  }
  @Test
  public void testTopNWithDimFilterAndWithFilteredDimSpec() throws Exception {
    TopNQuery query =
        new TopNQueryBuilder()
            .dataSource("xx")
            .granularity(QueryGranularity.ALL)
            .dimension(
                new ListFilteredDimensionSpec(
                    new DefaultDimensionSpec("tags", "tags"), ImmutableSet.of("t3"), null))
            .metric("count")
            .intervals(QueryRunnerTestHelper.fullOnInterval)
            .aggregators(
                Arrays.asList(new AggregatorFactory[] {new CountAggregatorFactory("count")}))
            .threshold(5)
            .filters(new SelectorDimFilter("tags", "t3"))
            .build();

    QueryRunnerFactory factory =
        new TopNQueryRunnerFactory(
            TestQueryRunners.getPool(),
            new TopNQueryQueryToolChest(
                new TopNQueryConfig(),
                QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()),
            QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    QueryRunner<Result<TopNResultValue>> runner =
        QueryRunnerTestHelper.makeQueryRunner(
            factory, new QueryableIndexSegment("sid1", queryableIndex));
    Map<String, Object> context = Maps.newHashMap();
    Sequence<Result<TopNResultValue>> result = runner.run(query, context);
    List<Result<TopNResultValue>> expectedResults =
        Arrays.asList(
            new Result<TopNResultValue>(
                new DateTime("2011-01-12T00:00:00.000Z"),
                new TopNResultValue(
                    Arrays.<Map<String, Object>>asList(
                        ImmutableMap.<String, Object>of("tags", "t3", "count", 2L)))));
    TestHelper.assertExpectedObjects(
        expectedResults, Sequences.toList(result, new ArrayList<Result<TopNResultValue>>()), "");
  }
  @Test
  public void testSanity() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(
        new MapBasedInputRow(
            new DateTime().minus(1).getMillis(),
            Lists.newArrayList("billy"),
            ImmutableMap.<String, Object>of("billy", "hi")));
    index.add(
        new MapBasedInputRow(
            new DateTime().minus(1).getMillis(),
            Lists.newArrayList("sally"),
            ImmutableMap.<String, Object>of("sally", "bo")));

    GroupByQueryEngine engine = makeGroupByQueryEngine();

    final Sequence<Row> rows =
        engine.process(
            GroupByQuery.builder()
                .setDataSource("test")
                .setGranularity(QueryGranularity.ALL)
                .setInterval(new Interval(0, new DateTime().getMillis()))
                .addDimension("billy")
                .addDimension("sally")
                .addAggregator(new LongSumAggregatorFactory("cnt", "cnt"))
                .build(),
            new IncrementalIndexStorageAdapter(index));

    final ArrayList<Row> results = Sequences.toList(rows, Lists.<Row>newArrayList());

    Assert.assertEquals(2, results.size());

    MapBasedRow row = (MapBasedRow) results.get(0);
    Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());

    row = (MapBasedRow) results.get(1);
    Assert.assertEquals(ImmutableMap.of("sally", "bo", "cnt", 1L), row.getEvent());
  }
示例#23
0
  @Test
  public void testFullSelectNoDimensionAndMetric() {
    SelectQuery query =
        newTestQuery()
            .intervals(I_0112_0114)
            .dimensionSpecs(DefaultDimensionSpec.toSpec("foo"))
            .metrics(Lists.<String>newArrayList("foo2"))
            .build();

    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(
            runner.run(query, Maps.newHashMap()), Lists.<Result<SelectResultValue>>newArrayList());

    final List<List<Map<String, Object>>> events =
        toEvents(
            new String[] {EventHolder.timestampKey + ":TIME", "foo:NULL", "foo2:NULL"},
            V_0112_0114);

    PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId);
    List<Result<SelectResultValue>> expectedResults =
        toExpected(events, offset.startOffset(), offset.threshold());
    verify(expectedResults, results);
  }
示例#24
0
  @Test
  public void testSelectPagination() {
    SelectQuery query =
        newTestQuery()
            .intervals(I_0112_0114)
            .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension))
            .metrics(Arrays.asList(QueryRunnerTestHelper.indexMetric))
            .pagingSpec(new PagingSpec(toPagingIdentifier(3, descending), 3))
            .build();

    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(
            runner.run(query, Maps.newHashMap()), Lists.<Result<SelectResultValue>>newArrayList());

    PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId);
    List<Result<SelectResultValue>> expectedResults =
        toExpected(
            toEvents(
                new String[] {EventHolder.timestampKey + ":TIME", "foo:NULL", "foo2:NULL"},
                V_0112_0114),
            offset.startOffset(),
            offset.threshold());
    verify(expectedResults, results);
  }
示例#25
0
 @Override
 public QueryRunner<Result<SearchResultValue>> mergeRunners(
     ExecutorService queryExecutor,
     Iterable<QueryRunner<Result<SearchResultValue>>> queryRunners) {
   return new ConcatQueryRunner<Result<SearchResultValue>>(Sequences.simple(queryRunners));
 }
  @Test
  public void testFullOnSelect() {
    SelectQuery query =
        new SelectQuery(
            QueryRunnerTestHelper.dataSource,
            QueryRunnerTestHelper.fullOnInterval,
            null,
            QueryRunnerTestHelper.allGran,
            Lists.<String>newArrayList(),
            Lists.<String>newArrayList(),
            new PagingSpec(null, 3),
            null);

    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(runner.run(query), Lists.<Result<SelectResultValue>>newArrayList());

    List<Result<SelectResultValue>> expectedResults =
        Arrays.asList(
            new Result<SelectResultValue>(
                new DateTime("2011-01-12T00:00:00.000Z"),
                new SelectResultValue(
                    ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2),
                    Arrays.asList(
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            0,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(providerLowercase, "spot")
                                .put(QueryRunnerTestHelper.qualityDimension, "automotive")
                                .put(QueryRunnerTestHelper.placementDimension, "preferred")
                                .put(
                                    QueryRunnerTestHelper.placementishDimension,
                                    Lists.newArrayList("a", "preferred"))
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            1,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(providerLowercase, "spot")
                                .put(QueryRunnerTestHelper.qualityDimension, "business")
                                .put(QueryRunnerTestHelper.placementDimension, "preferred")
                                .put(
                                    QueryRunnerTestHelper.placementishDimension,
                                    Lists.newArrayList("b", "preferred"))
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            2,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(providerLowercase, "spot")
                                .put(QueryRunnerTestHelper.qualityDimension, "entertainment")
                                .put(QueryRunnerTestHelper.placementDimension, "preferred")
                                .put(
                                    QueryRunnerTestHelper.placementishDimension,
                                    Lists.newArrayList("e", "preferred"))
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build())))));

    verify(expectedResults, results);
  }
  @Test
  public void testFullOnSelectWithFilter() {
    SelectQuery query =
        new SelectQuery(
            QueryRunnerTestHelper.dataSource,
            new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")),
            new SelectorDimFilter(QueryRunnerTestHelper.providerDimension, "spot"),
            QueryRunnerTestHelper.dayGran,
            Lists.<String>newArrayList(QueryRunnerTestHelper.qualityDimension),
            Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric),
            new PagingSpec(
                Maps.newLinkedHashMap(ImmutableMap.of(QueryRunnerTestHelper.segmentId, 3)), 3),
            null);

    Iterable<Result<SelectResultValue>> results =
        Sequences.toList(runner.run(query), Lists.<Result<SelectResultValue>>newArrayList());

    List<Result<SelectResultValue>> expectedResults =
        Arrays.asList(
            new Result<SelectResultValue>(
                new DateTime("2011-01-12T00:00:00.000Z"),
                new SelectResultValue(
                    ImmutableMap.of(QueryRunnerTestHelper.segmentId, 5),
                    Arrays.asList(
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            3,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "health")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            4,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "mezzanine")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            5,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-12T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "news")
                                .put(QueryRunnerTestHelper.indexMetric, 100.000000F)
                                .build())))),
            new Result<SelectResultValue>(
                new DateTime("2011-01-13T00:00:00.000Z"),
                new SelectResultValue(
                    ImmutableMap.of(QueryRunnerTestHelper.segmentId, 5),
                    Arrays.asList(
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            3,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-13T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "health")
                                .put(QueryRunnerTestHelper.indexMetric, 114.947403F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            4,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-13T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "mezzanine")
                                .put(QueryRunnerTestHelper.indexMetric, 104.465767F)
                                .build()),
                        new EventHolder(
                            QueryRunnerTestHelper.segmentId,
                            5,
                            new ImmutableMap.Builder<String, Object>()
                                .put(
                                    EventHolder.timestampKey,
                                    new DateTime("2011-01-13T00:00:00.000Z"))
                                .put(QueryRunnerTestHelper.qualityDimension, "news")
                                .put(QueryRunnerTestHelper.indexMetric, 102.851683F)
                                .build())))));

    verify(expectedResults, results);
  }
示例#28
0
  @Test
  public void testCancel() throws Exception {
    HttpClient httpClient = EasyMock.createStrictMock(HttpClient.class);

    Capture<Request> capturedRequest = EasyMock.newCapture();
    ListenableFuture<Object> cancelledFuture = Futures.immediateCancelledFuture();
    SettableFuture<Object> cancellationFuture = SettableFuture.create();

    EasyMock.expect(
            httpClient.go(
                EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject()))
        .andReturn(cancelledFuture)
        .once();

    EasyMock.expect(
            httpClient.go(
                EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject()))
        .andReturn(cancellationFuture)
        .once();

    EasyMock.replay(httpClient);

    final ServerSelector serverSelector =
        new ServerSelector(
            new DataSegment(
                "test",
                new Interval("2013-01-01/2013-01-02"),
                new DateTime("2013-01-01").toString(),
                Maps.<String, Object>newHashMap(),
                Lists.<String>newArrayList(),
                Lists.<String>newArrayList(),
                NoneShardSpec.instance(),
                0,
                0L),
            new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()));

    DirectDruidClient client1 =
        new DirectDruidClient(
            new ReflectionQueryToolChestWarehouse(),
            QueryRunnerTestHelper.NOOP_QUERYWATCHER,
            new DefaultObjectMapper(),
            httpClient,
            "foo",
            new NoopServiceEmitter());

    QueryableDruidServer queryableDruidServer1 =
        new QueryableDruidServer(
            new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0),
            client1);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer1, serverSelector.getSegment());

    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    HashMap<String, List> context = Maps.newHashMap();
    cancellationFuture.set(
        new StatusResponseHolder(HttpResponseStatus.OK, new StringBuilder("cancelled")));
    Sequence results = client1.run(query, context);
    Assert.assertEquals(HttpMethod.DELETE, capturedRequest.getValue().getMethod());
    Assert.assertEquals(0, client1.getNumOpenConnections());

    QueryInterruptedException exception = null;
    try {
      Sequences.toList(results, Lists.newArrayList());
    } catch (QueryInterruptedException e) {
      exception = e;
    }
    Assert.assertNotNull(exception);

    EasyMock.verify(httpClient);
  }
示例#29
0
  @Test
  public void testQueryInterruptionExceptionLogMessage() throws JsonProcessingException {
    HttpClient httpClient = EasyMock.createMock(HttpClient.class);
    SettableFuture<Object> interruptionFuture = SettableFuture.create();
    Capture<Request> capturedRequest = EasyMock.newCapture();
    String hostName = "localhost:8080";
    EasyMock.expect(
            httpClient.go(
                EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject()))
        .andReturn(interruptionFuture)
        .anyTimes();

    EasyMock.replay(httpClient);

    DataSegment dataSegment =
        new DataSegment(
            "test",
            new Interval("2013-01-01/2013-01-02"),
            new DateTime("2013-01-01").toString(),
            Maps.<String, Object>newHashMap(),
            Lists.<String>newArrayList(),
            Lists.<String>newArrayList(),
            NoneShardSpec.instance(),
            0,
            0L);
    final ServerSelector serverSelector =
        new ServerSelector(
            dataSegment,
            new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()));

    DirectDruidClient client1 =
        new DirectDruidClient(
            new ReflectionQueryToolChestWarehouse(),
            QueryRunnerTestHelper.NOOP_QUERYWATCHER,
            new DefaultObjectMapper(),
            httpClient,
            hostName,
            new NoopServiceEmitter());

    QueryableDruidServer queryableDruidServer =
        new QueryableDruidServer(
            new DruidServer("test1", hostName, 0, "historical", DruidServer.DEFAULT_TIER, 0),
            client1);

    serverSelector.addServerAndUpdateSegment(queryableDruidServer, dataSegment);

    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    HashMap<String, List> context = Maps.newHashMap();
    interruptionFuture.set(new ByteArrayInputStream("{\"error\":\"testing\"}".getBytes()));
    Sequence results = client1.run(query, context);

    QueryInterruptedException actualException = null;
    try {
      Sequences.toList(results, Lists.newArrayList());
    } catch (QueryInterruptedException e) {
      actualException = e;
    }
    Assert.assertNotNull(actualException);
    Assert.assertEquals(actualException.getMessage(), QueryInterruptedException.UNKNOWN_EXCEPTION);
    Assert.assertEquals(actualException.getCauseMessage(), "testing");
    Assert.assertEquals(actualException.getHost(), hostName);
    EasyMock.verify(httpClient);
  }
示例#30
0
  @Test
  public void testRun() throws Exception {
    HttpClient httpClient = EasyMock.createMock(HttpClient.class);
    final URL url = new URL("http://foo/druid/v2/");

    SettableFuture<InputStream> futureResult = SettableFuture.create();
    Capture<Request> capturedRequest = EasyMock.newCapture();
    EasyMock.expect(
            httpClient.go(
                EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject()))
        .andReturn(futureResult)
        .times(1);

    SettableFuture futureException = SettableFuture.create();
    EasyMock.expect(
            httpClient.go(
                EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject()))
        .andReturn(futureException)
        .times(1);

    EasyMock.expect(
            httpClient.go(
                EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject()))
        .andReturn(SettableFuture.create())
        .atLeastOnce();

    EasyMock.replay(httpClient);

    final ServerSelector serverSelector =
        new ServerSelector(
            new DataSegment(
                "test",
                new Interval("2013-01-01/2013-01-02"),
                new DateTime("2013-01-01").toString(),
                Maps.<String, Object>newHashMap(),
                Lists.<String>newArrayList(),
                Lists.<String>newArrayList(),
                NoneShardSpec.instance(),
                0,
                0L),
            new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()));

    DirectDruidClient client1 =
        new DirectDruidClient(
            new ReflectionQueryToolChestWarehouse(),
            QueryRunnerTestHelper.NOOP_QUERYWATCHER,
            new DefaultObjectMapper(),
            httpClient,
            "foo",
            new NoopServiceEmitter());
    DirectDruidClient client2 =
        new DirectDruidClient(
            new ReflectionQueryToolChestWarehouse(),
            QueryRunnerTestHelper.NOOP_QUERYWATCHER,
            new DefaultObjectMapper(),
            httpClient,
            "foo2",
            new NoopServiceEmitter());

    QueryableDruidServer queryableDruidServer1 =
        new QueryableDruidServer(
            new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0),
            client1);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer1, serverSelector.getSegment());
    QueryableDruidServer queryableDruidServer2 =
        new QueryableDruidServer(
            new DruidServer("test1", "localhost", 0, "historical", DruidServer.DEFAULT_TIER, 0),
            client2);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer2, serverSelector.getSegment());

    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    HashMap<String, List> context = Maps.newHashMap();
    Sequence s1 = client1.run(query, context);
    Assert.assertTrue(capturedRequest.hasCaptured());
    Assert.assertEquals(url, capturedRequest.getValue().getUrl());
    Assert.assertEquals(HttpMethod.POST, capturedRequest.getValue().getMethod());
    Assert.assertEquals(1, client1.getNumOpenConnections());

    // simulate read timeout
    Sequence s2 = client1.run(query, context);
    Assert.assertEquals(2, client1.getNumOpenConnections());
    futureException.setException(new ReadTimeoutException());
    Assert.assertEquals(1, client1.getNumOpenConnections());

    // subsequent connections should work
    Sequence s3 = client1.run(query, context);
    Sequence s4 = client1.run(query, context);
    Sequence s5 = client1.run(query, context);

    Assert.assertTrue(client1.getNumOpenConnections() == 4);

    // produce result for first connection
    futureResult.set(
        new ByteArrayInputStream(
            "[{\"timestamp\":\"2014-01-01T01:02:03Z\", \"result\": 42.0}]".getBytes()));
    List<Result> results = Sequences.toList(s1, Lists.<Result>newArrayList());
    Assert.assertEquals(1, results.size());
    Assert.assertEquals(new DateTime("2014-01-01T01:02:03Z"), results.get(0).getTimestamp());
    Assert.assertEquals(3, client1.getNumOpenConnections());

    client2.run(query, context);
    client2.run(query, context);

    Assert.assertTrue(client2.getNumOpenConnections() == 2);

    Assert.assertTrue(serverSelector.pick() == queryableDruidServer2);

    EasyMock.verify(httpClient);
  }