@Before
  public void setUp() throws IOException {
    EmittingLogger.registerEmitter(new NoopServiceEmitter());

    queryWaitLatch = new CountDownLatch(1);
    queryWaitYieldLatch = new CountDownLatch(1);
    queryNotifyLatch = new CountDownLatch(1);
    factory = new MyQueryRunnerFactory(queryWaitLatch, queryWaitYieldLatch, queryNotifyLatch);
    serverManagerExec = Executors.newFixedThreadPool(2);
    serverManager =
        new ServerManager(
            new SegmentLoader() {
              @Override
              public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException {
                return false;
              }

              @Override
              public Segment getSegment(final DataSegment segment) {
                return new SegmentForTesting(
                    MapUtils.getString(segment.getLoadSpec(), "version"),
                    (Interval) segment.getLoadSpec().get("interval"));
              }

              @Override
              public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException {
                throw new UnsupportedOperationException();
              }

              @Override
              public void cleanup(DataSegment segment) throws SegmentLoadingException {}
            },
            new QueryRunnerFactoryConglomerate() {
              @Override
              public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(
                  QueryType query) {
                return (QueryRunnerFactory) factory;
              }
            },
            new NoopServiceEmitter(),
            serverManagerExec,
            new DefaultObjectMapper(),
            new LocalCacheProvider().get(),
            new CacheConfig());

    loadQueryable("test", "1", new Interval("P1d/2011-04-01"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-02"));
    loadQueryable("test", "2", new Interval("P1d/2011-04-02"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-03"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-04"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-05"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T01"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T02"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T03"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T05"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T06"));
    loadQueryable("test2", "1", new Interval("P1d/2011-04-01"));
    loadQueryable("test2", "1", new Interval("P1d/2011-04-02"));
  }
Exemple #2
0
 @Provides
 @ManageLifecycle
 public ServiceEmitter getServiceEmitter(
     @Self Supplier<DruidNode> configSupplier, Emitter emitter) {
   final DruidNode config = configSupplier.get();
   final ServiceEmitter retVal =
       new ServiceEmitter(config.getServiceName(), config.getHost(), emitter);
   EmittingLogger.registerEmitter(retVal);
   return retVal;
 }
Exemple #3
0
  private void initializeEmitter() {
    if (emitter == null) {
      final HttpClient httpClient =
          HttpClientInit.createClient(
              HttpClientConfig.builder().withNumConnections(1).build(), lifecycle);

      emitter =
          new ServiceEmitter(
              PropUtils.getProperty(props, "druid.service"),
              PropUtils.getProperty(props, "druid.host"),
              Emitters.create(props, httpClient, jsonMapper, lifecycle));
    }
    EmittingLogger.registerEmitter(emitter);
  }
Exemple #4
0
  @Before
  public void setUp() throws IOException {
    EmittingLogger.registerEmitter(new NoopServiceEmitter());

    factory = new MyQueryRunnerFactory();
    serverManager =
        new ServerManager(
            new SegmentLoader() {
              @Override
              public Segment getSegment(final DataSegment segment) {
                return new SegmentForTesting(
                    MapUtils.getString(segment.getLoadSpec(), "version"),
                    (Interval) segment.getLoadSpec().get("interval"));
              }

              @Override
              public void cleanup(DataSegment segment) throws SegmentLoadingException {}
            },
            new QueryRunnerFactoryConglomerate() {
              @Override
              public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(
                  QueryType query) {
                return (QueryRunnerFactory) factory;
              }
            },
            new NoopServiceEmitter(),
            MoreExecutors.sameThreadExecutor());

    loadQueryable("test", "1", new Interval("P1d/2011-04-01"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-02"));
    loadQueryable("test", "2", new Interval("P1d/2011-04-02"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-03"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-04"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-05"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T01"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T02"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T03"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T05"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T06"));
    loadQueryable("test2", "1", new Interval("P1d/2011-04-01"));
    loadQueryable("test2", "1", new Interval("P1d/2011-04-02"));
  }
  @Before
  public void setUp() throws Exception {
    req = EasyMock.createStrictMock(HttpServletRequest.class);
    supervisorManager = EasyMock.createMock(SupervisorManager.class);
    taskLockbox = EasyMock.createStrictMock(TaskLockbox.class);
    taskLockbox.syncFromStorage();
    EasyMock.expectLastCall().atLeastOnce();
    taskLockbox.add(EasyMock.<Task>anyObject());
    EasyMock.expectLastCall().atLeastOnce();
    taskLockbox.remove(EasyMock.<Task>anyObject());
    EasyMock.expectLastCall().atLeastOnce();

    // for second Noop Task directly added to deep storage.
    taskLockbox.add(EasyMock.<Task>anyObject());
    EasyMock.expectLastCall().atLeastOnce();
    taskLockbox.remove(EasyMock.<Task>anyObject());
    EasyMock.expectLastCall().atLeastOnce();

    taskActionClientFactory = EasyMock.createStrictMock(TaskActionClientFactory.class);
    EasyMock.expect(taskActionClientFactory.create(EasyMock.<Task>anyObject()))
        .andReturn(null)
        .anyTimes();
    EasyMock.replay(taskLockbox, taskActionClientFactory);

    taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null));
    runTaskCountDownLatches = new CountDownLatch[2];
    runTaskCountDownLatches[0] = new CountDownLatch(1);
    runTaskCountDownLatches[1] = new CountDownLatch(1);
    taskCompletionCountDownLatches = new CountDownLatch[2];
    taskCompletionCountDownLatches[0] = new CountDownLatch(1);
    taskCompletionCountDownLatches[1] = new CountDownLatch(1);
    announcementLatch = new CountDownLatch(1);
    IndexerZkConfig indexerZkConfig =
        new IndexerZkConfig(new ZkPathsConfig(), null, null, null, null, null);
    setupServerAndCurator();
    curator.start();
    curator.blockUntilConnected();
    curator.create().creatingParentsIfNeeded().forPath(indexerZkConfig.getLeaderLatchPath());
    druidNode = new DruidNode("hey", "what", 1234);
    ServiceEmitter serviceEmitter = new NoopServiceEmitter();
    taskMaster =
        new TaskMaster(
            new TaskQueueConfig(null, new Period(1), null, new Period(10)),
            taskLockbox,
            taskStorage,
            taskActionClientFactory,
            druidNode,
            indexerZkConfig,
            new TaskRunnerFactory<MockTaskRunner>() {
              @Override
              public MockTaskRunner build() {
                return new MockTaskRunner(runTaskCountDownLatches, taskCompletionCountDownLatches);
              }
            },
            curator,
            new NoopServiceAnnouncer() {
              @Override
              public void announce(DruidNode node) {
                announcementLatch.countDown();
              }
            },
            serviceEmitter,
            supervisorManager);
    EmittingLogger.registerEmitter(serviceEmitter);
  }
  @Before
  public void setUp() throws Exception {
    emitter = EasyMock.createMock(ServiceEmitter.class);
    EmittingLogger.registerEmitter(emitter);
    queryRunnerFactoryConglomerate =
        EasyMock.createStrictMock(QueryRunnerFactoryConglomerate.class);
    monitorScheduler = EasyMock.createStrictMock(MonitorScheduler.class);
    publishCountDown = new CountDownLatch(1);
    announcedSinks = 0;
    pushedSegments = 0;
    tmpDir = temporaryFolder.newFolder();
    TestUtils testUtils = new TestUtils();
    mapper = testUtils.getTestObjectMapper();

    tqc =
        mapper.readValue(
            "{\"startDelay\":\"PT0S\", \"restartDelay\":\"PT1S\", \"storageSyncRate\":\"PT0.5S\"}",
            TaskQueueConfig.class);
    indexSpec = new IndexSpec();

    if (taskStorageType.equals("HeapMemoryTaskStorage")) {
      ts = new HeapMemoryTaskStorage(new TaskStorageConfig(null) {});
    } else if (taskStorageType.equals("MetadataTaskStorage")) {
      testDerbyConnector = derbyConnectorRule.getConnector();
      mapper.registerSubtypes(
          new NamedType(MockExceptionalFirehoseFactory.class, "mockExcepFirehoseFactory"),
          new NamedType(MockFirehoseFactory.class, "mockFirehoseFactory"));
      testDerbyConnector.createTaskTables();
      testDerbyConnector.createSegmentTable();
      ts =
          new MetadataTaskStorage(
              testDerbyConnector,
              new TaskStorageConfig(null),
              new SQLMetadataStorageActionHandlerFactory(
                  testDerbyConnector,
                  derbyConnectorRule.metadataTablesConfigSupplier().get(),
                  mapper));
    } else {
      throw new RuntimeException(String.format("Unknown task storage type [%s]", taskStorageType));
    }

    serverView =
        new FilteredServerView() {
          @Override
          public void registerSegmentCallback(
              Executor exec, ServerView.SegmentCallback callback, Predicate<DataSegment> filter) {
            segmentCallbacks.add(callback);
          }
        };
    setUpAndStartTaskQueue(
        new DataSegmentPusher() {
          @Override
          public String getPathForHadoop(String dataSource) {
            throw new UnsupportedOperationException();
          }

          @Override
          public DataSegment push(File file, DataSegment segment) throws IOException {
            pushedSegments++;
            return segment;
          }
        });
  }
Exemple #7
0
  public AppenderatorTester(final int maxRowsInMemory, final File basePersistDirectory) {
    objectMapper = new DefaultObjectMapper();
    objectMapper.registerSubtypes(LinearShardSpec.class);

    final Map<String, Object> parserMap =
        objectMapper.convertValue(
            new MapInputRowParser(
                new JSONParseSpec(
                    new TimestampSpec("ts", "auto", null), new DimensionsSpec(null, null, null))),
            Map.class);
    schema =
        new DataSchema(
            DATASOURCE,
            parserMap,
            new AggregatorFactory[] {
              new CountAggregatorFactory("count"), new LongSumAggregatorFactory("met", "met")
            },
            new UniformGranularitySpec(Granularity.MINUTE, QueryGranularities.NONE, null),
            objectMapper);

    tuningConfig =
        new RealtimeTuningConfig(
            maxRowsInMemory,
            null,
            null,
            basePersistDirectory,
            null,
            null,
            null,
            null,
            null,
            null,
            0,
            0,
            null,
            null);

    metrics = new FireDepartmentMetrics();
    queryExecutor = Execs.singleThreaded("queryExecutor(%d)");

    indexIO =
        new IndexIO(
            objectMapper,
            new ColumnConfig() {
              @Override
              public int columnCacheSizeBytes() {
                return 0;
              }
            });
    indexMerger = new IndexMerger(objectMapper, indexIO);

    emitter =
        new ServiceEmitter(
            "test",
            "test",
            new LoggingEmitter(
                new Logger(AppenderatorTester.class), LoggingEmitter.Level.INFO, objectMapper));
    emitter.start();
    EmittingLogger.registerEmitter(emitter);
    dataSegmentPusher =
        new DataSegmentPusher() {
          @Deprecated
          @Override
          public String getPathForHadoop(String dataSource) {
            return getPathForHadoop();
          }

          @Override
          public String getPathForHadoop() {
            throw new UnsupportedOperationException();
          }

          @Override
          public DataSegment push(File file, DataSegment segment) throws IOException {
            pushedSegments.add(segment);
            return segment;
          }
        };
    appenderator =
        Appenderators.createRealtime(
            schema,
            tuningConfig,
            metrics,
            dataSegmentPusher,
            objectMapper,
            indexIO,
            indexMerger,
            new DefaultQueryRunnerFactoryConglomerate(
                ImmutableMap.<Class<? extends Query>, QueryRunnerFactory>of(
                    TimeseriesQuery.class,
                    new TimeseriesQueryRunnerFactory(
                        new TimeseriesQueryQueryToolChest(
                            new IntervalChunkingQueryRunnerDecorator(
                                queryExecutor, QueryRunnerTestHelper.NOOP_QUERYWATCHER, emitter)),
                        new TimeseriesQueryEngine(),
                        QueryRunnerTestHelper.NOOP_QUERYWATCHER))),
            new DataSegmentAnnouncer() {
              @Override
              public void announceSegment(DataSegment segment) throws IOException {}

              @Override
              public void unannounceSegment(DataSegment segment) throws IOException {}

              @Override
              public void announceSegments(Iterable<DataSegment> segments) throws IOException {}

              @Override
              public void unannounceSegments(Iterable<DataSegment> segments) throws IOException {}

              @Override
              public boolean isAnnounced(DataSegment segment) {
                return false;
              }
            },
            emitter,
            queryExecutor,
            MapCache.create(2048),
            new CacheConfig());
  }