@Test public void testBatchEvents() throws InterruptedException, EventDeliveryException { StressSource source = new StressSource(); source.setChannelProcessor(mockProcessor); Context context = new Context(); context.put("maxTotalEvents", "35"); context.put("batchSize", "10"); source.configure(context); source.start(); for (int i = 0; i < 50; i++) { if (source.process() == Status.BACKOFF) { TestCase.assertTrue("Source should have sent all events in 4 batches", i == 4); break; } if (i < 3) { verify(mockProcessor, times(i + 1)).processEventBatch(getLastProcessedEventList(source)); } else { verify(mockProcessor, times(1)).processEventBatch(getLastProcessedEventList(source)); } } long successfulEvents = getCounterGroup(source).get("events.successful"); TestCase.assertTrue( "Number of successful events should be 35 but was " + successfulEvents, successfulEvents == 35); long failedEvents = getCounterGroup(source).get("events.failed"); TestCase.assertTrue( "Number of failure events should be 0 but was " + failedEvents, failedEvents == 0); }
/** Verify that setting a timeout in the configuration does not impact the database setting */ @Test public void timeoutConfiguredTest() { String host = "localhost"; int timeout = 10; JedisPoolFactory mockJedisPoolFactory = mock(JedisPoolFactory.class); RedisSink redisSink = new RedisSink(mockJedisPoolFactory); Channel channel = mock(Channel.class); redisSink.setChannel(channel); Context context = new Context(); context.put(RedisSinkConfigurationConstant.HOST, host); context.put(RedisSinkConfigurationConstant.BATCH_SIZE, "10"); context.put(RedisSinkConfigurationConstant.TIMEOUT, Integer.toString(timeout)); redisSink.configure(context); redisSink.start(); verify(mockJedisPoolFactory) .create( any(JedisPoolConfig.class), eq(host), eq(Protocol.DEFAULT_PORT), eq(timeout), isNull(String.class), eq(Protocol.DEFAULT_DATABASE)); }
@Test(groups = "dev") public void autoWrapTest() throws EventDeliveryException { ctx.put(MongoSink.AUTO_WRAP, Boolean.toString(true)); ctx.put(MongoSink.DB_NAME, "test_wrap"); MongoSink sink = new MongoSink(); Configurables.configure(sink, ctx); sink.setChannel(channel); sink.start(); Transaction tx = channel.getTransaction(); tx.begin(); String msg = "2012/10/26 11:23:08 [error] 7289#0: *6430831 open() \"/usr/local/nginx/html/50x.html\" failed (2: No such file or directory), client: 10.160.105.161, server: sg15.redatoms.com, request: \"POST /mojo/ajax/embed HTTP/1.0\", upstream: \"fastcgi://unix:/tmp/php-fpm.sock:\", host: \"sg15.redatoms.com\", referrer: \"http://sg15.redatoms.com/mojo/mobile/package\""; Event e = EventBuilder.withBody(msg.getBytes()); channel.put(e); tx.commit(); tx.close(); sink.process(); sink.stop(); DB db = mongo.getDB("test_wrap"); DBCollection collection = db.getCollection("test_log"); DBCursor cursor = collection.find(new BasicDBObject(MongoSink.DEFAULT_WRAP_FIELD, msg)); assertTrue(cursor.hasNext()); DBObject dbObject = cursor.next(); assertNotNull(dbObject); assertEquals(dbObject.get(MongoSink.DEFAULT_WRAP_FIELD), msg); mongo.dropDatabase("test_wrap"); }
@Test public void testMaxSuccessfulEvents() throws InterruptedException, EventDeliveryException { StressSource source = new StressSource(); source.setChannelProcessor(mockProcessor); Context context = new Context(); context.put("maxSuccessfulEvents", "35"); source.configure(context); source.start(); for (int i = 0; i < 10; i++) { source.process(); } // 1 failed call, 10 successful doThrow(new ChannelException("stub")).when(mockProcessor).processEvent(getEvent(source)); source.process(); doNothing().when(mockProcessor).processEvent(getEvent(source)); for (int i = 0; i < 10; i++) { source.process(); } // 1 failed call, 50 succesful doThrow(new ChannelException("stub")).when(mockProcessor).processEvent(getEvent(source)); source.process(); doNothing().when(mockProcessor).processEvent(getEvent(source)); for (int i = 0; i < 50; i++) { source.process(); } // We should have called processEvent(evt) 37 times, twice for failures // and twice for successful events. verify(mockProcessor, times(37)).processEvent(getEvent(source)); }
@Test public void testBatchEventsWithoutMatTotalEvents() throws InterruptedException, EventDeliveryException { StressSource source = new StressSource(); source.setChannelProcessor(mockProcessor); Context context = new Context(); context.put("batchSize", "10"); source.configure(context); source.start(); for (int i = 0; i < 10; i++) { Assert.assertFalse( "StressSource with no maxTotalEvents should not return " + Status.BACKOFF, source.process() == Status.BACKOFF); } verify(mockProcessor, times(10)).processEventBatch(getLastProcessedEventList(source)); long successfulEvents = getCounterGroup(source).get("events.successful"); TestCase.assertTrue( "Number of successful events should be 100 but was " + successfulEvents, successfulEvents == 100); long failedEvents = getCounterGroup(source).get("events.failed"); TestCase.assertTrue( "Number of failure events should be 0 but was " + failedEvents, failedEvents == 0); }
@Override public void doConfigure(Context context) { final String regex = context.getString(CONFIG_REGULAR_EXPRESSION, REGEX_DEFAULT); final boolean regexIgnoreCase = context.getBoolean(IGNORE_CASE_CONFIG, IGNORE_CASE_DEFAULT); inputPattern = Pattern.compile(regex, Pattern.DOTALL + (regexIgnoreCase ? Pattern.CASE_INSENSITIVE : 0)); }
@Override public void configure(Context context) { hours = context.getString("hours"); Preconditions.checkArgument(StringUtils.isNotEmpty(hours), "Must configure with a valid hours"); outputPattern = context.getString("outputPattern"); Preconditions.checkArgument( StringUtils.isNotEmpty(outputPattern), "Must configure with a valid outputPattern"); }
@Test public void shouldUseSpecifiedSerializer() throws Exception { Context context = new Context(); context.put(SERIALIZER, "org.apache.flume.sink.elasticsearch.FakeEventSerializer"); assertNull(fixture.getEventSerializer()); fixture.configure(context); assertTrue(fixture.getEventSerializer() instanceof FakeEventSerializer); }
/** * Verify that the required configuration is set * * @param context */ private void ensureConfigCompleteness(Context context) { if (StringUtils.isEmpty(context.getString(RabbitMQConstants.CONFIG_EXCHANGENAME)) && StringUtils.isEmpty(context.getString(RabbitMQConstants.CONFIG_QUEUENAME))) { throw new IllegalArgumentException( "You must configure at least one of queue name or exchange name parameters"); } }
@Test public void testRollbackWithSink() throws Exception { final NullSink nullSink = new NullSink(); Context ctx = new Context(); ctx.put("batchSize", "1"); nullSink.configure(ctx); nullSink.setChannel(channel); final int numItems = 99; Thread t = new Thread() { @Override public void run() { int count = 0; while (count++ < numItems) { try { nullSink.process(); Thread.sleep(1); } catch (EventDeliveryException e) { break; } catch (Exception e) { Throwables.propagate(e); } } } }; t.setDaemon(true); t.setName("NullSink"); t.start(); putEvents(channel, "rollback", 10, 100); Transaction transaction; // put an item we will rollback transaction = channel.getTransaction(); transaction.begin(); channel.put(EventBuilder.withBody("this is going to be rolledback".getBytes(Charsets.UTF_8))); transaction.rollback(); transaction.close(); while (t.isAlive()) { Thread.sleep(1); } // simulate crash channel.stop(); channel = createFileChannel(); // get the item which was not rolled back transaction = channel.getTransaction(); transaction.begin(); Event event = channel.take(); transaction.commit(); transaction.close(); Assert.assertNotNull(event); Assert.assertEquals("rollback-90-9", new String(event.getBody(), Charsets.UTF_8)); }
@Override public void configure(Context context) { /* * Default is to listen on UDP port 162 on all IPv4 interfaces. * Since 162 is a privileged port, snmptrapd must typically be run as root. * Or change to non-privileged port > 1024. */ bindAddress = context.getString("bind", DEFAULT_BIND); bindPort = context.getInteger("port", DEFAULT_PORT); }
@Test(groups = "dev") public void timestampExistingFieldTest() throws EventDeliveryException, ParseException { ctx.put(MongoSink.MODEL, MongoSink.CollectionModel.dynamic.name()); String tsField = "createdOn"; ctx.put(MongoSink.TIMESTAMP_FIELD, tsField); MongoSink sink = new MongoSink(); Configurables.configure(sink, ctx); sink.setChannel(channel); sink.start(); JSONObject msg = new JSONObject(); msg.put("age", 11); msg.put("birthday", new Date().getTime()); String dateText = "2013-02-19T14:20:53+08:00"; msg.put(tsField, dateText); Transaction tx; for (int i = 0; i < 10; i++) { tx = channel.getTransaction(); tx.begin(); msg.put("name", "test" + i); JSONObject header = new JSONObject(); header.put(MongoSink.COLLECTION, "my_events"); header.put(MongoSink.DB_NAME, "dynamic_db"); Event e = EventBuilder.withBody(msg.toJSONString().getBytes(), header); channel.put(e); tx.commit(); tx.close(); } sink.process(); sink.stop(); msg.put(tsField, MongoSink.dateTimeFormatter.parseDateTime(dateText).toDate()); for (int i = 0; i < 10; i++) { msg.put("name", "test" + i); System.out.println("i = " + i); DB db = mongo.getDB("dynamic_db"); DBCollection collection = db.getCollection("my_events"); DBCursor cursor = collection.find(new BasicDBObject(msg)); assertTrue(cursor.hasNext()); DBObject dbObject = cursor.next(); assertNotNull(dbObject); assertEquals(dbObject.get("name"), msg.get("name")); assertEquals(dbObject.get("age"), msg.get("age")); assertEquals(dbObject.get("birthday"), msg.get("birthday")); assertTrue(dbObject.get(tsField) instanceof Date); System.out.println("ts = " + dbObject.get(tsField)); } }
@Test public void shouldUseSpecifiedIndexNameBuilder() throws Exception { Context context = new Context(); context.put( ElasticSearchSinkConstants.INDEX_NAME_BUILDER, "org.apache.flume.sink.elasticsearch.FakeIndexNameBuilder"); assertNull(fixture.getIndexNameBuilder()); fixture.configure(context); assertTrue(fixture.getIndexNameBuilder() instanceof FakeIndexNameBuilder); }
// @Test public void testLifecycle() throws InterruptedException, LifecycleException { LOG.debug("Starting..."); Context context = new Context(); context.put("hdfs.path", testPath); context.put("hdfs.filePrefix", "pageview"); Configurables.configure(sink, context); sink.setChannel(new MemoryChannel()); sink.start(); sink.stop(); }
/** * The initialization method for the Source. The context contains all the Flume configuration * info, and can be used to retrieve any configuration values necessary to set up the Source. */ @Override public void configure(Context context) { consumerKey = context.getString(TwitterSourceConstants.CONSUMER_KEY_KEY); consumerSecret = context.getString(TwitterSourceConstants.CONSUMER_SECRET_KEY); accessToken = context.getString(TwitterSourceConstants.ACCESS_TOKEN_KEY); accessTokenSecret = context.getString(TwitterSourceConstants.ACCESS_TOKEN_SECRET_KEY); String keywordString = context.getString(TwitterSourceConstants.KEYWORDS_KEY, ""); keywords = keywordString.split(","); for (int i = 0; i < keywords.length; i++) { keywords[i] = keywords[i].trim(); } }
@Override public void configure(Context context) throws FlumeException { preserveExisting = context.getBoolean(PRESERVE, PRESERVE_DEFAULT); key = context.getString(KEY, KEY_DEFAULT); file = context.getString(FILE); period = context.getInteger(PERIOD, new Integer(PERIOD_DEFAULT)); if (file != null) { value = readHeader(file); } else { logger.error("CSVHeaderInterceptor - file not specified"); throw new FlumeException("CSVHeaderInterceptor - file not specified"); } }
/** Configures the keys required for accessing Twitter. Also gets the productKeys */ @Override public void configure(Context context) { consumerKey = context.getString("consumerKey"); consumerSecret = context.getString("consumerSecret"); accessToken = context.getString("accessToken"); accessTokenSecret = context.getString("accessTokenSecret"); String prodctKeys = context.getString("Product", ""); searchFor = prodctKeys.split(","); for (int i = 0; i < searchFor.length; i++) { searchFor[i] = searchFor[i].trim(); } }
@Test public void testMaxTotalEvents() throws InterruptedException, EventDeliveryException { StressSource source = new StressSource(); source.setChannelProcessor(mockProcessor); Context context = new Context(); context.put("maxTotalEvents", "35"); source.configure(context); source.start(); for (int i = 0; i < 50; i++) { source.process(); } verify(mockProcessor, times(35)).processEvent(getEvent(source)); }
/** {@inheritDoc} */ @Override public void configure(final Context context) { hostName = context.getString(HOSTNAME_CONFIG_PROP_NAME); port = context.getInteger(PORT_CONFIG_PROP_NAME); batchSize = context.getInteger(BATCH_SIZE_PROP_NAME, DEFAULT_BATCH_SIZE); if (sinkCounter == null) { sinkCounter = new SinkCounter(getName()); } LOGGER.info( "Configuring ZipkinSpanCollectorSink. hostname: {}, port: {}, batchsize: {}", hostName, port, batchSize); }
@Test public void checkStatusFileCorrectlyUpdated() throws Exception { File file = File.createTempFile("statusFileName", ".txt"); when(context.getString("status.file.path", "/var/lib/flume")).thenReturn(file.getParent()); when(context.getString("status.file.name")).thenReturn(file.getName()); SQLSourceHelper sqlSourceHelper = new SQLSourceHelper(context, "Source Name"); sqlSourceHelper.setCurrentIndex(10); sqlSourceHelper.updateStatusFile(); SQLSourceHelper sqlSourceHelper2 = new SQLSourceHelper(context, "Source Name"); assertEquals(10L, sqlSourceHelper2.getCurrentIndex()); }
@Override public void configure(Context context) { port = Integer.parseInt(context.getString("port")); bindAddress = context.getString("bind"); try { maxThreads = context.getInteger(THREADS, 0); } catch (NumberFormatException e) { logger.warn( "AVRO source\'s \"threads\" property must specify an integer value.", context.getString(THREADS)); } if (sourceCounter == null) { sourceCounter = new SourceCounter(getName()); } }
/** * Constructor to initialize hibernate configuration parameters * * @param sqlSourceHelper Contains the configuration parameters from flume config file */ public HibernateHelper(SQLSourceHelper sqlSourceHelper) { this.sqlSourceHelper = sqlSourceHelper; Context context = sqlSourceHelper.getContext(); Map<String, String> hibernateProperties = context.getSubProperties("hibernate."); Iterator<Map.Entry<String, String>> it = hibernateProperties.entrySet().iterator(); config = new Configuration(); Map.Entry<String, String> e; while (it.hasNext()) { e = it.next(); config.setProperty("hibernate." + e.getKey(), e.getValue()); } }
@Override public void configure(Context context) { String resolutionsStr = context.getString("resolutions", "month,day,hour,minute,second"); String[] resolutionsArray = resolutionsStr.split(","); for (String resolution : resolutionsArray) { if (resolution.trim().equals("month")) { resolutions[4] = true; } else if (resolution.trim().equals("day")) { resolutions[3] = true; } else if (resolution.trim().equals("hour")) { resolutions[2] = true; } else if (resolution.trim().equals("minute")) { resolutions[1] = true; } else if (resolution.trim().equals("second")) { resolutions[0] = true; } else { LOGGER.warn("[" + this.getName() + "] Unknown resolution " + resolution); } // if else } // for LOGGER.debug( "[" + this.getName() + "] Reading configuration (resolutions=" + resolutionsStr + ")"); super.configure(context); } // configure
@Test(groups = "dev") public void sinkSingleModelTest() throws EventDeliveryException { ctx.put(MongoSink.MODEL, MongoSink.CollectionModel.single.name()); MongoSink sink = new MongoSink(); Configurables.configure(sink, ctx); sink.setChannel(channel); sink.start(); Transaction tx = channel.getTransaction(); tx.begin(); JSONObject msg = new JSONObject(); msg.put("name", "test"); msg.put("age", 11); msg.put("birthday", new Date().getTime()); Event e = EventBuilder.withBody(msg.toJSONString().getBytes()); channel.put(e); tx.commit(); tx.close(); sink.process(); sink.stop(); DB db = mongo.getDB("test_events"); DBCollection collection = db.getCollection("test_log"); DBCursor cursor = collection.find(new BasicDBObject(msg)); assertTrue(cursor.hasNext()); DBObject dbObject = cursor.next(); assertNotNull(dbObject); assertEquals(dbObject.get("name"), msg.get("name")); assertEquals(dbObject.get("age"), msg.get("age")); assertEquals(dbObject.get("birthday"), msg.get("birthday")); }
private RecoverableMemoryChannel createFileChannel() { RecoverableMemoryChannel channel = new RecoverableMemoryChannel(); context = new Context(); context.put(RecoverableMemoryChannel.WAL_DATA_DIR, dataDir.getAbsolutePath()); Configurables.configure(channel, context); channel.start(); return channel; }
public void createAvroFile(File file, String codec, boolean useSchemaUrl) throws IOException { // serialize a few events using the reflection-based avro serializer OutputStream out = new FileOutputStream(file); Context ctx = new Context(); if (codec != null) { ctx.put("compressionCodec", codec); } Schema schema = Schema.createRecord("myrecord", null, null, false); schema.setFields( Arrays.asList( new Schema.Field[] { new Schema.Field("message", Schema.create(Schema.Type.STRING), null, null) })); GenericRecordBuilder recordBuilder = new GenericRecordBuilder(schema); File schemaFile = null; if (useSchemaUrl) { schemaFile = File.createTempFile(getClass().getSimpleName(), ".avsc"); Files.write(schema.toString(), schemaFile, Charsets.UTF_8); } EventSerializer.Builder builder = new AvroEventSerializer.Builder(); EventSerializer serializer = builder.build(ctx, out); serializer.afterCreate(); for (int i = 0; i < 3; i++) { GenericRecord record = recordBuilder.set("message", "Hello " + i).build(); Event event = EventBuilder.withBody(serializeAvro(record, schema)); if (schemaFile == null) { event.getHeaders().put(AvroEventSerializer.AVRO_SCHEMA_LITERAL_HEADER, schema.toString()); } else { event .getHeaders() .put( AvroEventSerializer.AVRO_SCHEMA_URL_HEADER, schemaFile.toURI().toURL().toExternalForm()); } serializer.write(event); } serializer.flush(); serializer.beforeClose(); out.flush(); out.close(); }
@SuppressWarnings("unchecked") @Test public void ThrowableThrownExceptionTest() throws EventDeliveryException { Jedis jedis = mock(Jedis.class); // Not really true, but fits the requirement when(jedis.lpush(any(byte[].class), any(byte[].class))).thenThrow(clazz); JedisPool jedisPool = mock(JedisPool.class); MockJedisPoolFactory mockJedisPoolFactory = new MockJedisPoolFactory(jedisPool, jedis); Channel channel = mock(Channel.class); Transaction transactionMock = mock(Transaction.class); when(channel.getTransaction()).thenReturn(transactionMock); Event testEvent = new SimpleEvent(); byte[] testBody = new byte[] {'b', 'o', 'd', 'y'}; testEvent.setBody(testBody); when(channel.take()).thenReturn(testEvent); RedisSink redisSink = new RedisSink(mockJedisPoolFactory); redisSink.setChannel(channel); Context context = new Context(); context.put(RedisSinkConfigurationConstant.HOST, "localhost"); redisSink.configure(context); redisSink.start(); redisSink.process(); verify(channel, times(1)).getTransaction(); verify(channel, times(1)).take(); verify(transactionMock, times(1)).begin(); verify(transactionMock, times(1)).close(); verify(transactionMock, times(0)).commit(); verify(transactionMock, times(1)).rollback(); verify(jedisPool, times(1)).getResource(); verify(jedisPool, times(1)).returnResource(jedis); verify(jedis, times(1)) .lpush(eq(RedisSinkConfigurationConstant.DEFAULT_KEY.getBytes()), any(byte[][].class)); }
@BeforeMethod(groups = {"dev"}) public static void setup() throws UnknownHostException { mongo = new Mongo("localhost", 27017); Map<String, String> ctxMap = new HashMap<String, String>(); ctxMap.put(MongoSink.HOST, "localhost"); ctxMap.put(MongoSink.PORT, "27017"); ctxMap.put(MongoSink.DB_NAME, "test_events"); ctxMap.put(MongoSink.COLLECTION, "test_log"); ctxMap.put(MongoSink.BATCH_SIZE, "100"); ctx.putAll(ctxMap); Context channelCtx = new Context(); channelCtx.put("capacity", "1000000"); channelCtx.put("transactionCapacity", "1000000"); channel = new MemoryChannel(); Configurables.configure(channel, channelCtx); }
@Test(expected = NullPointerException.class) public void testNoJson() throws Exception { try { context.put("urlJson", ""); dynamicUrlHandler = new DynamicUrlHandler(); dynamicUrlHandler.configure(context); } catch (NullPointerException e) { } }
@Test(expected = RestSourceException.class) public void testEmptyJson() throws Exception { try { context.put("urlJson", EMPTY_URL_JSON); dynamicUrlHandler = new DynamicUrlHandler(); dynamicUrlHandler.configure(context); } catch (RestSourceException e) { } }