Пример #1
0
  @Test
  public void testIndexTaskFailure() throws Exception {
    final Task indexTask =
        new IndexTask(
            null,
            null,
            new IndexTask.IndexIngestionSpec(
                new DataSchema(
                    "foo",
                    null,
                    new AggregatorFactory[] {new DoubleSumAggregatorFactory("met", "met")},
                    new UniformGranularitySpec(
                        Granularity.DAY, null, ImmutableList.of(new Interval("2010-01-01/P1D"))),
                    mapper),
                new IndexTask.IndexIOConfig(new MockExceptionalFirehoseFactory()),
                new IndexTask.IndexTuningConfig(10000, 10, -1, indexSpec)),
            mapper,
            null);

    final TaskStatus status = runTask(indexTask);

    Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
    Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
  }
Пример #2
0
  @Test
  public void testBadVersion() throws Exception {
    final Task task =
        new AbstractFixedIntervalTask("id1", "id1", "ds", new Interval("2012-01-01/P1D"), null) {
          @Override
          public String getType() {
            return "test";
          }

          @Override
          public TaskStatus run(TaskToolbox toolbox) throws Exception {
            final TaskLock myLock =
                Iterables.getOnlyElement(
                    toolbox.getTaskActionClient().submit(new LockListAction()));

            final DataSegment segment =
                DataSegment.builder()
                    .dataSource("ds")
                    .interval(new Interval("2012-01-01/P1D"))
                    .version(myLock.getVersion() + "1!!!1!!")
                    .build();

            toolbox.getTaskActionClient().submit(new SegmentInsertAction(ImmutableSet.of(segment)));
            return TaskStatus.success(getId());
          }
        };

    final TaskStatus status = runTask(task);

    Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
    Assert.assertEquals("segments published", 0, mdc.getPublished().size());
    Assert.assertEquals("segments nuked", 0, mdc.getNuked().size());
  }
Пример #3
0
  @Test
  public void testIndexTask() throws Exception {
    final Task indexTask =
        new IndexTask(
            null,
            null,
            new IndexTask.IndexIngestionSpec(
                new DataSchema(
                    "foo",
                    null,
                    new AggregatorFactory[] {new DoubleSumAggregatorFactory("met", "met")},
                    new UniformGranularitySpec(
                        Granularity.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D"))),
                    mapper),
                new IndexTask.IndexIOConfig(new MockFirehoseFactory(false)),
                new IndexTask.IndexTuningConfig(10000, 10, -1, indexSpec)),
            mapper,
            null);

    final Optional<TaskStatus> preRunTaskStatus = tsqa.getStatus(indexTask.getId());
    Assert.assertTrue("pre run task status not present", !preRunTaskStatus.isPresent());

    final TaskStatus mergedStatus = runTask(indexTask);
    final TaskStatus status = ts.getStatus(indexTask.getId()).get();
    final List<DataSegment> publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished());
    final List<DataSegment> loggedSegments =
        byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId()));

    Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
    Assert.assertEquals(
        "merged statusCode", TaskStatus.Status.SUCCESS, mergedStatus.getStatusCode());
    Assert.assertEquals("segments logged vs published", loggedSegments, publishedSegments);
    Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());

    Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource());
    Assert.assertEquals(
        "segment1 interval",
        new Interval("2010-01-01/P1D"),
        publishedSegments.get(0).getInterval());
    Assert.assertEquals(
        "segment1 dimensions",
        ImmutableList.of("dim1", "dim2"),
        publishedSegments.get(0).getDimensions());
    Assert.assertEquals(
        "segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics());

    Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource());
    Assert.assertEquals(
        "segment2 interval",
        new Interval("2010-01-02/P1D"),
        publishedSegments.get(1).getInterval());
    Assert.assertEquals(
        "segment2 dimensions",
        ImmutableList.of("dim1", "dim2"),
        publishedSegments.get(1).getDimensions());
    Assert.assertEquals(
        "segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
  }
Пример #4
0
  @Test
  public void testRealtimeishTask() throws Exception {
    final Task rtishTask = new RealtimeishTask();
    final TaskStatus status = runTask(rtishTask);

    Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
    Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
  }
Пример #5
0
  @Test
  public void testNeverReadyTask() throws Exception {
    final Task neverReadyTask =
        new DefaultObjectMapper()
            .readValue("{\"type\":\"noop\", \"isReadyResult\":\"exception\"}\"", Task.class);
    final TaskStatus status = runTask(neverReadyTask);

    Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
    Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
  }
Пример #6
0
  @Test
  public void testNoopTask() throws Exception {
    final Task noopTask =
        new DefaultObjectMapper()
            .readValue("{\"type\":\"noop\", \"runTime\":\"100\"}\"", Task.class);
    final TaskStatus status = runTask(noopTask);

    Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
    Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
  }
Пример #7
0
  @Test
  public void testResumeTasks() throws Exception {
    final Task indexTask =
        new IndexTask(
            null,
            null,
            new IndexTask.IndexIngestionSpec(
                new DataSchema(
                    "foo",
                    null,
                    new AggregatorFactory[] {new DoubleSumAggregatorFactory("met", "met")},
                    new UniformGranularitySpec(
                        Granularity.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D"))),
                    mapper),
                new IndexTask.IndexIOConfig(new MockFirehoseFactory(false)),
                new IndexTask.IndexTuningConfig(10000, 10, -1, indexSpec)),
            mapper,
            null);

    final long startTime = System.currentTimeMillis();

    // manually insert the task into TaskStorage, waiting for TaskQueue to sync from storage
    ts.insert(indexTask, TaskStatus.running(indexTask.getId()));

    while (tsqa.getStatus(indexTask.getId()).get().isRunnable()) {
      if (System.currentTimeMillis() > startTime + 10 * 1000) {
        throw new ISE("Where did the task go?!: %s", indexTask.getId());
      }

      Thread.sleep(100);
    }

    final TaskStatus status = ts.getStatus(indexTask.getId()).get();
    final List<DataSegment> publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished());
    final List<DataSegment> loggedSegments =
        byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId()));

    Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
    Assert.assertEquals("segments logged vs published", loggedSegments, publishedSegments);
    Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());

    Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource());
    Assert.assertEquals(
        "segment1 interval",
        new Interval("2010-01-01/P1D"),
        publishedSegments.get(0).getInterval());
    Assert.assertEquals(
        "segment1 dimensions",
        ImmutableList.of("dim1", "dim2"),
        publishedSegments.get(0).getDimensions());
    Assert.assertEquals(
        "segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics());

    Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource());
    Assert.assertEquals(
        "segment2 interval",
        new Interval("2010-01-02/P1D"),
        publishedSegments.get(1).getInterval());
    Assert.assertEquals(
        "segment2 dimensions",
        ImmutableList.of("dim1", "dim2"),
        publishedSegments.get(1).getDimensions());
    Assert.assertEquals(
        "segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
  }
Пример #8
0
  @Test
  public void testKillTask() throws Exception {
    final File tmpSegmentDir = temporaryFolder.newFolder();

    List<DataSegment> expectedUnusedSegments =
        Lists.transform(
            ImmutableList.<String>of(
                "2011-04-01/2011-04-02", "2011-04-02/2011-04-03", "2011-04-04/2011-04-05"),
            new Function<String, DataSegment>() {
              @Override
              public DataSegment apply(String input) {
                final Interval interval = new Interval(input);
                try {
                  return DataSegment.builder()
                      .dataSource("test_kill_task")
                      .interval(interval)
                      .loadSpec(
                          ImmutableMap.<String, Object>of(
                              "type",
                              "local",
                              "path",
                              tmpSegmentDir.getCanonicalPath()
                                  + "/druid/localStorage/wikipedia/"
                                  + interval.getStart()
                                  + "-"
                                  + interval.getEnd()
                                  + "/"
                                  + "2011-04-6T16:52:46.119-05:00"
                                  + "/0/index.zip"))
                      .version("2011-04-6T16:52:46.119-05:00")
                      .dimensions(ImmutableList.<String>of())
                      .metrics(ImmutableList.<String>of())
                      .shardSpec(new NoneShardSpec())
                      .binaryVersion(9)
                      .size(0)
                      .build();
                } catch (IOException e) {
                  throw new ISE(e, "Error creating segments");
                }
              }
            });

    mdc.setUnusedSegments(expectedUnusedSegments);

    // manually create local segments files
    List<File> segmentFiles = Lists.newArrayList();
    for (DataSegment segment :
        mdc.getUnusedSegmentsForInterval("test_kill_task", new Interval("2011-04-01/P4D"))) {
      File file = new File((String) segment.getLoadSpec().get("path"));
      file.mkdirs();
      segmentFiles.add(file);
    }

    final Task killTask =
        new KillTask(null, "test_kill_task", new Interval("2011-04-01/P4D"), null);

    final TaskStatus status = runTask(killTask);
    Assert.assertEquals("merged statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
    Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
    Assert.assertEquals("num segments nuked", 3, mdc.getNuked().size());
    Assert.assertTrue(
        "expected unused segments get killed",
        expectedUnusedSegments.containsAll(mdc.getNuked())
            && mdc.getNuked().containsAll(expectedUnusedSegments));

    for (File file : segmentFiles) {
      Assert.assertFalse("unused segments files get deleted", file.exists());
    }
  }