@Override public void close() throws Exception { appenderator.close(); queryExecutor.shutdownNow(); emitter.close(); FileUtils.deleteDirectory(tuningConfig.getBasePersistDirectory()); }
@Test public void testDefaultBasePersistDirectory() { final RealtimeTuningConfig tuningConfig1 = RealtimeTuningConfig.makeDefaultTuningConfig(null); final RealtimeTuningConfig tuningConfig2 = RealtimeTuningConfig.makeDefaultTuningConfig(null); Assert.assertNotEquals( tuningConfig1.getBasePersistDirectory(), tuningConfig2.getBasePersistDirectory()); }
@Override public Appenderator build( final DataSchema schema, final RealtimeTuningConfig config, final FireDepartmentMetrics metrics) { return Appenderators.createRealtime( schema, config.withBasePersistDirectory( makeBasePersistSubdirectory( config.getBasePersistDirectory(), schema.getDataSource(), config.getShardSpec())), metrics, dataSegmentPusher, objectMapper, indexIO, indexMerger, conglomerate, segmentAnnouncer, emitter, queryExecutorService, cache, cacheConfig); }
@After public void tearDown() throws Exception { EasyMock.verify(announcer, segmentPublisher, dataSegmentPusher, serverView, emitter); FileUtils.deleteDirectory( new File(tuningConfig.getBasePersistDirectory(), schema.getDataSource())); }
@Override public void run() { plumber = initPlumber(); final Period intermediatePersistPeriod = config.getIntermediatePersistPeriod(); try { plumber.startJob(); // Delay firehose connection to avoid claiming input resources while the plumber is starting // up. firehose = initFirehose(); long nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis(); while (firehose.hasMore()) { InputRow inputRow = null; try { try { inputRow = firehose.nextRow(); } catch (Exception e) { log.debug(e, "thrown away line due to exception, considering unparseable"); metrics.incrementUnparseable(); continue; } boolean lateEvent = false; boolean indexLimitExceeded = false; try { lateEvent = plumber.add(inputRow) == -1; } catch (IndexSizeExceededException e) { log.info("Index limit exceeded: %s", e.getMessage()); indexLimitExceeded = true; } if (indexLimitExceeded || lateEvent) { metrics.incrementThrownAway(); log.debug("Throwing away event[%s]", inputRow); if (indexLimitExceeded || System.currentTimeMillis() > nextFlush) { plumber.persist(firehose.commit()); nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis(); } continue; } final Sink sink = plumber.getSink(inputRow.getTimestampFromEpoch()); if ((sink != null && !sink.canAppendRow()) || System.currentTimeMillis() > nextFlush) { plumber.persist(firehose.commit()); nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis(); } metrics.incrementProcessed(); } catch (ParseException e) { if (inputRow != null) { log.error(e, "unparseable line: %s", inputRow); } metrics.incrementUnparseable(); } } } catch (RuntimeException e) { log.makeAlert( e, "RuntimeException aborted realtime processing[%s]", fireDepartment.getDataSchema().getDataSource()) .emit(); normalExit = false; throw e; } catch (Error e) { log.makeAlert( e, "Exception aborted realtime processing[%s]", fireDepartment.getDataSchema().getDataSource()) .emit(); normalExit = false; throw e; } finally { CloseQuietly.close(firehose); if (normalExit) { plumber.finishJob(); plumber = null; firehose = null; } } }
@Test public void testSerdeWithNonDefaults() throws Exception { String jsonStr = "{\n" + " \"type\": \"realtime\",\n" + " \"maxRowsInMemory\": 100,\n" + " \"intermediatePersistPeriod\": \"PT1H\",\n" + " \"windowPeriod\": \"PT1H\",\n" + " \"basePersistDirectory\": \"/tmp/xxx\",\n" + " \"maxPendingPersists\": 100,\n" + " \"buildV9Directly\": false,\n" + " \"persistThreadPriority\": 100,\n" + " \"mergeThreadPriority\": 100,\n" + " \"reportParseExceptions\": true,\n" + " \"handoffConditionTimeout\": 100\n" + "}"; ObjectMapper mapper = TestHelper.getObjectMapper(); RealtimeTuningConfig config = (RealtimeTuningConfig) mapper.readValue( mapper.writeValueAsString(mapper.readValue(jsonStr, TuningConfig.class)), TuningConfig.class); Assert.assertEquals("/tmp/xxx", config.getBasePersistDirectory().toString()); Assert.assertEquals(false, config.getBuildV9Directly()); Assert.assertEquals(100, config.getHandoffConditionTimeout()); Assert.assertEquals(new IndexSpec(), config.getIndexSpec()); Assert.assertEquals(new Period("PT1H"), config.getIntermediatePersistPeriod()); Assert.assertEquals(NoneShardSpec.instance(), config.getShardSpec()); Assert.assertEquals(100, config.getMaxPendingPersists()); Assert.assertEquals(100, config.getMaxRowsInMemory()); Assert.assertEquals(100, config.getMergeThreadPriority()); Assert.assertEquals(100, config.getPersistThreadPriority()); Assert.assertEquals(new Period("PT1H"), config.getWindowPeriod()); Assert.assertEquals(true, config.isReportParseExceptions()); }
@Test public void testSerdeWithDefaults() throws Exception { String jsonStr = "{\"type\":\"realtime\"}"; ObjectMapper mapper = TestHelper.getObjectMapper(); RealtimeTuningConfig config = (RealtimeTuningConfig) mapper.readValue( mapper.writeValueAsString(mapper.readValue(jsonStr, TuningConfig.class)), TuningConfig.class); Assert.assertNotNull(config.getBasePersistDirectory()); Assert.assertEquals(true, config.getBuildV9Directly()); Assert.assertEquals(0, config.getHandoffConditionTimeout()); Assert.assertEquals(new IndexSpec(), config.getIndexSpec()); Assert.assertEquals(new Period("PT10M"), config.getIntermediatePersistPeriod()); Assert.assertEquals(NoneShardSpec.instance(), config.getShardSpec()); Assert.assertEquals(0, config.getMaxPendingPersists()); Assert.assertEquals(75000, config.getMaxRowsInMemory()); Assert.assertEquals(0, config.getMergeThreadPriority()); Assert.assertEquals(0, config.getPersistThreadPriority()); Assert.assertEquals(new Period("PT10M"), config.getWindowPeriod()); Assert.assertEquals(false, config.isReportParseExceptions()); }
@Test public void testSpecificBasePersistDirectory() { final RealtimeTuningConfig tuningConfig = RealtimeTuningConfig.makeDefaultTuningConfig(new File("/tmp/nonexistent")); Assert.assertEquals(new File("/tmp/nonexistent"), tuningConfig.getBasePersistDirectory()); }
@Test public void testSwap() throws Exception { final DataSchema schema = new DataSchema( "test", null, new AggregatorFactory[] {new CountAggregatorFactory("rows")}, new UniformGranularitySpec(Granularity.HOUR, QueryGranularities.MINUTE, null), new DefaultObjectMapper()); final Interval interval = new Interval("2013-01-01/2013-01-02"); final String version = new DateTime().toString(); RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig( 100, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null); final Sink sink = new Sink( interval, schema, tuningConfig.getShardSpec(), version, tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()); sink.add( new InputRow() { @Override public List<String> getDimensions() { return Lists.newArrayList(); } @Override public long getTimestampFromEpoch() { return new DateTime("2013-01-01").getMillis(); } @Override public DateTime getTimestamp() { return new DateTime("2013-01-01"); } @Override public List<String> getDimension(String dimension) { return Lists.newArrayList(); } @Override public float getFloatMetric(String metric) { return 0; } @Override public long getLongMetric(String metric) { return 0L; } @Override public Object getRaw(String dimension) { return null; } @Override public int compareTo(Row o) { return 0; } }); FireHydrant currHydrant = sink.getCurrHydrant(); Assert.assertEquals(new Interval("2013-01-01/PT1M"), currHydrant.getIndex().getInterval()); FireHydrant swapHydrant = sink.swap(); sink.add( new InputRow() { @Override public List<String> getDimensions() { return Lists.newArrayList(); } @Override public long getTimestampFromEpoch() { return new DateTime("2013-01-01").getMillis(); } @Override public DateTime getTimestamp() { return new DateTime("2013-01-01"); } @Override public List<String> getDimension(String dimension) { return Lists.newArrayList(); } @Override public float getFloatMetric(String metric) { return 0; } @Override public long getLongMetric(String metric) { return 0L; } @Override public Object getRaw(String dimension) { return null; } @Override public int compareTo(Row o) { return 0; } }); Assert.assertEquals(currHydrant, swapHydrant); Assert.assertNotSame(currHydrant, sink.getCurrHydrant()); Assert.assertEquals( new Interval("2013-01-01/PT1M"), sink.getCurrHydrant().getIndex().getInterval()); Assert.assertEquals(2, Iterators.size(sink.iterator())); }