private void createTestIndex(File segmentDir) throws Exception {
    List<String> rows =
        Lists.newArrayList(
            "2014102200,host1,10",
            "2014102200,host2,20",
            "2014102200,host3,30",
            "2014102201,host1,10",
            "2014102201,host2,20",
            "2014102201,host3,30",
            "2014102202,host1,10",
            "2014102202,host2,20",
            "2014102202,host3,30");

    StringInputRowParser parser =
        new StringInputRowParser(
            new CSVParseSpec(
                new TimestampSpec("timestamp", "yyyyMMddHH", null),
                new DimensionsSpec(ImmutableList.of("host"), null, null),
                null,
                ImmutableList.of("timestamp", "host", "visited")),
            Charsets.UTF_8.toString());

    AggregatorFactory[] aggregators =
        new AggregatorFactory[] {new LongSumAggregatorFactory("visited_sum", "visited")};

    IncrementalIndex index = null;
    try {
      index = new OnheapIncrementalIndex(0, QueryGranularity.NONE, aggregators, true, 5000);
      for (String line : rows) {
        index.add(parser.parse(line));
      }
      IndexMerger.persist(index, segmentDir, null, new IndexSpec());
    } finally {
      if (index != null) {
        index.close();
      }
    }
  }
  @Parameterized.Parameters(name = "{1}")
  public static Collection<Object[]> constructorFeeder() throws IOException {
    final IndexSpec indexSpec = new IndexSpec();

    final HeapMemoryTaskStorage ts = new HeapMemoryTaskStorage(new TaskStorageConfig(null) {});
    final IncrementalIndexSchema schema =
        new IncrementalIndexSchema.Builder()
            .withQueryGranularity(QueryGranularities.NONE)
            .withMinTimestamp(JodaUtils.MIN_INSTANT)
            .withDimensionsSpec(ROW_PARSER)
            .withMetrics(
                new AggregatorFactory[] {
                  new LongSumAggregatorFactory(METRIC_LONG_NAME, DIM_LONG_NAME),
                  new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME)
                })
            .build();
    final OnheapIncrementalIndex index =
        new OnheapIncrementalIndex(schema, true, MAX_ROWS * MAX_SHARD_NUMBER);

    for (Integer i = 0; i < MAX_ROWS; ++i) {
      index.add(ROW_PARSER.parse(buildRow(i.longValue())));
    }

    if (!persistDir.mkdirs() && !persistDir.exists()) {
      throw new IOException(
          String.format("Could not create directory at [%s]", persistDir.getAbsolutePath()));
    }
    INDEX_MERGER.persist(index, persistDir, indexSpec);

    final TaskLockbox tl = new TaskLockbox(ts);
    final IndexerSQLMetadataStorageCoordinator mdc =
        new IndexerSQLMetadataStorageCoordinator(null, null, null) {
          private final Set<DataSegment> published = Sets.newHashSet();
          private final Set<DataSegment> nuked = Sets.newHashSet();

          @Override
          public List<DataSegment> getUsedSegmentsForInterval(String dataSource, Interval interval)
              throws IOException {
            return ImmutableList.copyOf(segmentSet);
          }

          @Override
          public List<DataSegment> getUsedSegmentsForIntervals(
              String dataSource, List<Interval> interval) throws IOException {
            return ImmutableList.copyOf(segmentSet);
          }

          @Override
          public List<DataSegment> getUnusedSegmentsForInterval(
              String dataSource, Interval interval) {
            return ImmutableList.of();
          }

          @Override
          public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments) {
            Set<DataSegment> added = Sets.newHashSet();
            for (final DataSegment segment : segments) {
              if (published.add(segment)) {
                added.add(segment);
              }
            }

            return ImmutableSet.copyOf(added);
          }

          @Override
          public void deleteSegments(Set<DataSegment> segments) {
            nuked.addAll(segments);
          }
        };
    final LocalTaskActionClientFactory tac =
        new LocalTaskActionClientFactory(ts, new TaskActionToolbox(tl, mdc, newMockEmitter()));
    SegmentHandoffNotifierFactory notifierFactory =
        EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    EasyMock.replay(notifierFactory);

    final TaskToolboxFactory taskToolboxFactory =
        new TaskToolboxFactory(
            new TaskConfig(tmpDir.getAbsolutePath(), null, null, 50000, null, false, null, null),
            tac,
            newMockEmitter(),
            new DataSegmentPusher() {
              @Deprecated
              @Override
              public String getPathForHadoop(String dataSource) {
                return getPathForHadoop();
              }

              @Override
              public String getPathForHadoop() {
                throw new UnsupportedOperationException();
              }

              @Override
              public DataSegment push(File file, DataSegment segment) throws IOException {
                return segment;
              }
            },
            new DataSegmentKiller() {
              @Override
              public void kill(DataSegment segments) throws SegmentLoadingException {}
            },
            new DataSegmentMover() {
              @Override
              public DataSegment move(DataSegment dataSegment, Map<String, Object> targetLoadSpec)
                  throws SegmentLoadingException {
                return dataSegment;
              }
            },
            new DataSegmentArchiver() {
              @Override
              public DataSegment archive(DataSegment segment) throws SegmentLoadingException {
                return segment;
              }

              @Override
              public DataSegment restore(DataSegment segment) throws SegmentLoadingException {
                return segment;
              }
            },
            null, // segment announcer
            notifierFactory,
            null, // query runner factory conglomerate corporation unionized collective
            null, // query executor service
            null, // monitor scheduler
            new SegmentLoaderFactory(
                new SegmentLoaderLocalCacheManager(
                    null,
                    new SegmentLoaderConfig() {
                      @Override
                      public List<StorageLocationConfig> getLocations() {
                        return Lists.newArrayList();
                      }
                    },
                    MAPPER)),
            MAPPER,
            INDEX_MERGER,
            INDEX_IO,
            null,
            null,
            INDEX_MERGER_V9);
    Collection<Object[]> values = new LinkedList<>();
    for (InputRowParser parser :
        Arrays.<InputRowParser>asList(
            ROW_PARSER,
            new MapInputRowParser(
                new JSONParseSpec(
                    new TimestampSpec(TIME_COLUMN, "auto", null),
                    new DimensionsSpec(
                        DimensionsSpec.getDefaultSchemas(ImmutableList.<String>of()),
                        ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME),
                        ImmutableList.<SpatialDimensionSchema>of()),
                    null,
                    null)))) {
      for (List<String> dim_names : Arrays.<List<String>>asList(null, ImmutableList.of(DIM_NAME))) {
        for (List<String> metric_names :
            Arrays.<List<String>>asList(
                null, ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME))) {
          values.add(
              new Object[] {
                new IngestSegmentFirehoseFactory(
                    DATA_SOURCE_NAME,
                    FOREVER,
                    new SelectorDimFilter(DIM_NAME, DIM_VALUE, null),
                    dim_names,
                    metric_names,
                    Guice.createInjector(
                        new Module() {
                          @Override
                          public void configure(Binder binder) {
                            binder.bind(TaskToolboxFactory.class).toInstance(taskToolboxFactory);
                          }
                        }),
                    INDEX_IO),
                String.format(
                    "DimNames[%s]MetricNames[%s]ParserDimNames[%s]",
                    dim_names == null ? "null" : "dims",
                    metric_names == null ? "null" : "metrics",
                    parser == ROW_PARSER ? "dims" : "null"),
                parser
              });
        }
      }
    }
    return values;
  }