@Test(timeout = 4000L)
  public void testRealtimeIndexTaskFailure() throws Exception {
    setUpAndStartTaskQueue(
        new DataSegmentPusher() {
          @Override
          public String getPathForHadoop(String s) {
            throw new UnsupportedOperationException();
          }

          @Override
          public DataSegment push(File file, DataSegment dataSegment) throws IOException {
            throw new RuntimeException("FAILURE");
          }
        });
    monitorScheduler.addMonitor(EasyMock.anyObject(Monitor.class));
    EasyMock.expectLastCall().atLeastOnce();
    monitorScheduler.removeMonitor(EasyMock.anyObject(Monitor.class));
    EasyMock.expectLastCall().anyTimes();
    EasyMock.replay(monitorScheduler, queryRunnerFactoryConglomerate);

    RealtimeIndexTask realtimeIndexTask = giveMeARealtimeIndexTask();
    final String taskId = realtimeIndexTask.getId();
    tq.add(realtimeIndexTask);

    // Wait for realtime index task to fail
    while (tsqa.getStatus(taskId).get().isRunnable()) {
      Thread.sleep(10);
    }

    Assert.assertTrue("Task should be in Failure state", tsqa.getStatus(taskId).get().isFailure());

    EasyMock.verify(monitorScheduler, queryRunnerFactoryConglomerate);
  }
  @Test
  public void testIndexTask() throws Exception {
    final Task indexTask =
        new IndexTask(
            null,
            null,
            new IndexTask.IndexIngestionSpec(
                new DataSchema(
                    "foo",
                    null,
                    new AggregatorFactory[] {new DoubleSumAggregatorFactory("met", "met")},
                    new UniformGranularitySpec(
                        Granularity.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D"))),
                    mapper),
                new IndexTask.IndexIOConfig(new MockFirehoseFactory(false)),
                new IndexTask.IndexTuningConfig(10000, 10, -1, indexSpec)),
            mapper,
            null);

    final Optional<TaskStatus> preRunTaskStatus = tsqa.getStatus(indexTask.getId());
    Assert.assertTrue("pre run task status not present", !preRunTaskStatus.isPresent());

    final TaskStatus mergedStatus = runTask(indexTask);
    final TaskStatus status = ts.getStatus(indexTask.getId()).get();
    final List<DataSegment> publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished());
    final List<DataSegment> loggedSegments =
        byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId()));

    Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
    Assert.assertEquals(
        "merged statusCode", TaskStatus.Status.SUCCESS, mergedStatus.getStatusCode());
    Assert.assertEquals("segments logged vs published", loggedSegments, publishedSegments);
    Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());

    Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource());
    Assert.assertEquals(
        "segment1 interval",
        new Interval("2010-01-01/P1D"),
        publishedSegments.get(0).getInterval());
    Assert.assertEquals(
        "segment1 dimensions",
        ImmutableList.of("dim1", "dim2"),
        publishedSegments.get(0).getDimensions());
    Assert.assertEquals(
        "segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics());

    Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource());
    Assert.assertEquals(
        "segment2 interval",
        new Interval("2010-01-02/P1D"),
        publishedSegments.get(1).getInterval());
    Assert.assertEquals(
        "segment2 dimensions",
        ImmutableList.of("dim1", "dim2"),
        publishedSegments.get(1).getDimensions());
    Assert.assertEquals(
        "segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
  }
  private TaskStatus runTask(final Task task) throws Exception {
    final Task dummyTask =
        new DefaultObjectMapper()
            .readValue("{\"type\":\"noop\", \"isReadyResult\":\"exception\"}\"", Task.class);
    final long startTime = System.currentTimeMillis();

    Preconditions.checkArgument(!task.getId().equals(dummyTask.getId()));

    tq.add(dummyTask);
    tq.add(task);

    TaskStatus retVal = null;

    for (final String taskId : ImmutableList.of(dummyTask.getId(), task.getId())) {
      try {
        TaskStatus status;
        while ((status = tsqa.getStatus(taskId).get()).isRunnable()) {
          if (System.currentTimeMillis() > startTime + 10 * 1000) {
            throw new ISE("Where did the task go?!: %s", task.getId());
          }

          Thread.sleep(100);
        }
        if (taskId.equals(task.getId())) {
          retVal = status;
        }
      } catch (Exception e) {
        throw Throwables.propagate(e);
      }
    }

    return retVal;
  }
  @Test(timeout = 4000L)
  public void testRealtimeIndexTask() throws Exception {
    monitorScheduler.addMonitor(EasyMock.anyObject(Monitor.class));
    EasyMock.expectLastCall().atLeastOnce();
    monitorScheduler.removeMonitor(EasyMock.anyObject(Monitor.class));
    EasyMock.expectLastCall().anyTimes();
    EasyMock.replay(monitorScheduler, queryRunnerFactoryConglomerate);

    RealtimeIndexTask realtimeIndexTask = giveMeARealtimeIndexTask();
    final String taskId = realtimeIndexTask.getId();

    tq.add(realtimeIndexTask);
    // wait for task to process events and publish segment
    Assert.assertTrue(publishCountDown.await(1000, TimeUnit.MILLISECONDS));

    // Realtime Task has published the segment, simulate loading of segment to a historical node so
    // that task finishes with SUCCESS status
    segmentCallbacks
        .get(0)
        .segmentAdded(
            new DruidServerMetadata("dummy", "dummy_host", 0, "historical", "dummy_tier", 0),
            mdc.getPublished().iterator().next());

    // Wait for realtime index task to handle callback in plumber and succeed
    while (tsqa.getStatus(taskId).get().isRunnable()) {
      Thread.sleep(10);
    }

    Assert.assertTrue("Task should be in Success state", tsqa.getStatus(taskId).get().isSuccess());

    Assert.assertEquals(1, announcedSinks);
    Assert.assertEquals(1, pushedSegments);
    Assert.assertEquals(1, mdc.getPublished().size());
    DataSegment segment = mdc.getPublished().iterator().next();
    Assert.assertEquals("test_ds", segment.getDataSource());
    Assert.assertEquals(ImmutableList.of("dim1", "dim2"), segment.getDimensions());
    Assert.assertEquals(
        new Interval(now.toString("YYYY-MM-dd") + "/" + now.plusDays(1).toString("YYYY-MM-dd")),
        segment.getInterval());
    Assert.assertEquals(ImmutableList.of("count"), segment.getMetrics());
    EasyMock.verify(monitorScheduler, queryRunnerFactoryConglomerate);
  }
  @Test
  public void testResumeTasks() throws Exception {
    final Task indexTask =
        new IndexTask(
            null,
            null,
            new IndexTask.IndexIngestionSpec(
                new DataSchema(
                    "foo",
                    null,
                    new AggregatorFactory[] {new DoubleSumAggregatorFactory("met", "met")},
                    new UniformGranularitySpec(
                        Granularity.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D"))),
                    mapper),
                new IndexTask.IndexIOConfig(new MockFirehoseFactory(false)),
                new IndexTask.IndexTuningConfig(10000, 10, -1, indexSpec)),
            mapper,
            null);

    final long startTime = System.currentTimeMillis();

    // manually insert the task into TaskStorage, waiting for TaskQueue to sync from storage
    ts.insert(indexTask, TaskStatus.running(indexTask.getId()));

    while (tsqa.getStatus(indexTask.getId()).get().isRunnable()) {
      if (System.currentTimeMillis() > startTime + 10 * 1000) {
        throw new ISE("Where did the task go?!: %s", indexTask.getId());
      }

      Thread.sleep(100);
    }

    final TaskStatus status = ts.getStatus(indexTask.getId()).get();
    final List<DataSegment> publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished());
    final List<DataSegment> loggedSegments =
        byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId()));

    Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
    Assert.assertEquals("segments logged vs published", loggedSegments, publishedSegments);
    Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());

    Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource());
    Assert.assertEquals(
        "segment1 interval",
        new Interval("2010-01-01/P1D"),
        publishedSegments.get(0).getInterval());
    Assert.assertEquals(
        "segment1 dimensions",
        ImmutableList.of("dim1", "dim2"),
        publishedSegments.get(0).getDimensions());
    Assert.assertEquals(
        "segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics());

    Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource());
    Assert.assertEquals(
        "segment2 interval",
        new Interval("2010-01-02/P1D"),
        publishedSegments.get(1).getInterval());
    Assert.assertEquals(
        "segment2 dimensions",
        ImmutableList.of("dim1", "dim2"),
        publishedSegments.get(1).getDimensions());
    Assert.assertEquals(
        "segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
  }