@Test
  public void testIngestAndQuery() throws Exception {
    AggregationTestHelper helper =
        AggregationTestHelper.createGroupByQueryAggregationTestHelper(
            Lists.newArrayList(new AggregatorsModule()), tempFolder);

    String metricSpec =
        "[{"
            + "\"type\": \"hyperUnique\","
            + "\"name\": \"index_hll\","
            + "\"fieldName\": \"market\""
            + "}]";

    String parseSpec =
        "{"
            + "\"type\" : \"string\","
            + "\"parseSpec\" : {"
            + "    \"format\" : \"tsv\","
            + "    \"timestampSpec\" : {"
            + "        \"column\" : \"timestamp\","
            + "        \"format\" : \"auto\""
            + "},"
            + "    \"dimensionsSpec\" : {"
            + "        \"dimensions\": [],"
            + "        \"dimensionExclusions\" : [],"
            + "        \"spatialDimensions\" : []"
            + "    },"
            + "    \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]"
            + "  }"
            + "}";

    String query =
        "{"
            + "\"queryType\": \"groupBy\","
            + "\"dataSource\": \"test_datasource\","
            + "\"granularity\": \"ALL\","
            + "\"dimensions\": [],"
            + "\"aggregations\": ["
            + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }"
            + "],"
            + "\"postAggregations\": ["
            + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }"
            + "],"
            + "\"intervals\": [ \"1970/2050\" ]"
            + "}";

    Sequence seq =
        helper.createIndexAndRunQueryOnSegment(
            new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()),
            parseSpec,
            metricSpec,
            0,
            QueryGranularities.NONE,
            50000,
            query);

    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(3.0, row.getFloatMetric("index_hll"), 0.1);
    Assert.assertEquals(3.0, row.getFloatMetric("index_unique_count"), 0.1);
  }
  @Test
  public void testObjectColumnSelectorOnVaryingColumnSchema() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(
        new MapBasedInputRow(
            new DateTime("2014-09-01T00:00:00"),
            Lists.newArrayList("billy"),
            ImmutableMap.<String, Object>of("billy", "hi")));
    index.add(
        new MapBasedInputRow(
            new DateTime("2014-09-01T01:00:00"),
            Lists.newArrayList("billy", "sally"),
            ImmutableMap.<String, Object>of(
                "billy", "hip",
                "sally", "hop")));

    GroupByQueryEngine engine = makeGroupByQueryEngine();

    final Sequence<Row> rows =
        engine.process(
            GroupByQuery.builder()
                .setDataSource("test")
                .setGranularity(QueryGranularity.ALL)
                .setInterval(new Interval(0, new DateTime().getMillis()))
                .addDimension("billy")
                .addDimension("sally")
                .addAggregator(new LongSumAggregatorFactory("cnt", "cnt"))
                .addAggregator(
                    new JavaScriptAggregatorFactory(
                        "fieldLength",
                        Arrays.asList("sally", "billy"),
                        "function(current, s, b) { return current + (s == null ? 0 : s.length) + (b == null ? 0 : b.length); }",
                        "function() { return 0; }",
                        "function(a,b) { return a + b; }"))
                .build(),
            new IncrementalIndexStorageAdapter(index));

    final ArrayList<Row> results = Sequences.toList(rows, Lists.<Row>newArrayList());

    Assert.assertEquals(2, results.size());

    MapBasedRow row = (MapBasedRow) results.get(0);
    Assert.assertEquals(
        ImmutableMap.of("billy", "hi", "cnt", 1L, "fieldLength", 2.0), row.getEvent());

    row = (MapBasedRow) results.get(1);
    Assert.assertEquals(
        ImmutableMap.of("billy", "hip", "sally", "hop", "cnt", 1L, "fieldLength", 6.0),
        row.getEvent());
  }
  @Test
  public void testSanity() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(
        new MapBasedInputRow(
            new DateTime().minus(1).getMillis(),
            Lists.newArrayList("billy"),
            ImmutableMap.<String, Object>of("billy", "hi")));
    index.add(
        new MapBasedInputRow(
            new DateTime().minus(1).getMillis(),
            Lists.newArrayList("sally"),
            ImmutableMap.<String, Object>of("sally", "bo")));

    GroupByQueryEngine engine = makeGroupByQueryEngine();

    final Sequence<Row> rows =
        engine.process(
            GroupByQuery.builder()
                .setDataSource("test")
                .setGranularity(QueryGranularity.ALL)
                .setInterval(new Interval(0, new DateTime().getMillis()))
                .addDimension("billy")
                .addDimension("sally")
                .addAggregator(new LongSumAggregatorFactory("cnt", "cnt"))
                .build(),
            new IncrementalIndexStorageAdapter(index));

    final ArrayList<Row> results = Sequences.toList(rows, Lists.<Row>newArrayList());

    Assert.assertEquals(2, results.size());

    MapBasedRow row = (MapBasedRow) results.get(0);
    Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());

    row = (MapBasedRow) results.get(1);
    Assert.assertEquals(ImmutableMap.of("sally", "bo", "cnt", 1L), row.getEvent());
  }
 @Override
 public Map<String, Object> getCurrentValue() throws IOException, InterruptedException {
   return currRow.getEvent();
 }
 @Override
 public DateTime getCurrentKey() throws IOException, InterruptedException {
   return currRow.getTimestamp();
 }