@Test public void shouldIndexFiveEvents() throws Exception { // Make it so we only need to call process once parameters.put(BATCH_SIZE, "5"); Configurables.configure(fixture, new Context(parameters)); Channel channel = bindAndStartChannel(fixture); int numberOfEvents = 5; Event[] events = new Event[numberOfEvents]; Transaction tx = channel.getTransaction(); tx.begin(); for (int i = 0; i < numberOfEvents; i++) { String body = "event #" + i + " of " + numberOfEvents; Event event = EventBuilder.withBody(body.getBytes()); events[i] = event; channel.put(event); } tx.commit(); tx.close(); fixture.process(); fixture.stop(); client.admin().indices().refresh(Requests.refreshRequest(timestampedIndexName)).actionGet(); assertMatchAllQuery(numberOfEvents, events); assertBodyQuery(5, events); }
@Test public void shouldIndexFiveEventsOverThreeBatches() throws Exception { parameters.put(BATCH_SIZE, "2"); Configurables.configure(fixture, new Context(parameters)); Channel channel = bindAndStartChannel(fixture); int numberOfEvents = 5; Event[] events = new Event[numberOfEvents]; Transaction tx = channel.getTransaction(); tx.begin(); for (int i = 0; i < numberOfEvents; i++) { String body = "event #" + i + " of " + numberOfEvents; Event event = EventBuilder.withBody(body.getBytes()); events[i] = event; channel.put(event); } tx.commit(); tx.close(); int count = 0; Status status = Status.READY; while (status != Status.BACKOFF) { count++; status = fixture.process(); } fixture.stop(); assertEquals(3, count); client.admin().indices().refresh(Requests.refreshRequest(timestampedIndexName)).actionGet(); assertMatchAllQuery(numberOfEvents, events); assertBodyQuery(5, events); }
@Test(groups = "dev") public void autoWrapTest() throws EventDeliveryException { ctx.put(MongoSink.AUTO_WRAP, Boolean.toString(true)); ctx.put(MongoSink.DB_NAME, "test_wrap"); MongoSink sink = new MongoSink(); Configurables.configure(sink, ctx); sink.setChannel(channel); sink.start(); Transaction tx = channel.getTransaction(); tx.begin(); String msg = "2012/10/26 11:23:08 [error] 7289#0: *6430831 open() \"/usr/local/nginx/html/50x.html\" failed (2: No such file or directory), client: 10.160.105.161, server: sg15.redatoms.com, request: \"POST /mojo/ajax/embed HTTP/1.0\", upstream: \"fastcgi://unix:/tmp/php-fpm.sock:\", host: \"sg15.redatoms.com\", referrer: \"http://sg15.redatoms.com/mojo/mobile/package\""; Event e = EventBuilder.withBody(msg.getBytes()); channel.put(e); tx.commit(); tx.close(); sink.process(); sink.stop(); DB db = mongo.getDB("test_wrap"); DBCollection collection = db.getCollection("test_log"); DBCursor cursor = collection.find(new BasicDBObject(MongoSink.DEFAULT_WRAP_FIELD, msg)); assertTrue(cursor.hasNext()); DBObject dbObject = cursor.next(); assertNotNull(dbObject); assertEquals(dbObject.get(MongoSink.DEFAULT_WRAP_FIELD), msg); mongo.dropDatabase("test_wrap"); }
@Test(groups = "dev") public void sinkSingleModelTest() throws EventDeliveryException { ctx.put(MongoSink.MODEL, MongoSink.CollectionModel.single.name()); MongoSink sink = new MongoSink(); Configurables.configure(sink, ctx); sink.setChannel(channel); sink.start(); Transaction tx = channel.getTransaction(); tx.begin(); JSONObject msg = new JSONObject(); msg.put("name", "test"); msg.put("age", 11); msg.put("birthday", new Date().getTime()); Event e = EventBuilder.withBody(msg.toJSONString().getBytes()); channel.put(e); tx.commit(); tx.close(); sink.process(); sink.stop(); DB db = mongo.getDB("test_events"); DBCollection collection = db.getCollection("test_log"); DBCursor cursor = collection.find(new BasicDBObject(msg)); assertTrue(cursor.hasNext()); DBObject dbObject = cursor.next(); assertNotNull(dbObject); assertEquals(dbObject.get("name"), msg.get("name")); assertEquals(dbObject.get("age"), msg.get("age")); assertEquals(dbObject.get("birthday"), msg.get("birthday")); }
@Test public void shouldFailToConfigureWithInvalidSerializerClass() throws Exception { parameters.put(SERIALIZER, "java.lang.String"); try { Configurables.configure(fixture, new Context(parameters)); } catch (ClassCastException e) { // expected } parameters.put(SERIALIZER, FakeConfigurable.class.getName()); try { Configurables.configure(fixture, new Context(parameters)); } catch (IllegalArgumentException e) { // expected } }
private RecoverableMemoryChannel createFileChannel() { RecoverableMemoryChannel channel = new RecoverableMemoryChannel(); context = new Context(); context.put(RecoverableMemoryChannel.WAL_DATA_DIR, dataDir.getAbsolutePath()); Configurables.configure(channel, context); channel.start(); return channel; }
@Test(groups = "dev") public void timestampExistingFieldTest() throws EventDeliveryException, ParseException { ctx.put(MongoSink.MODEL, MongoSink.CollectionModel.dynamic.name()); String tsField = "createdOn"; ctx.put(MongoSink.TIMESTAMP_FIELD, tsField); MongoSink sink = new MongoSink(); Configurables.configure(sink, ctx); sink.setChannel(channel); sink.start(); JSONObject msg = new JSONObject(); msg.put("age", 11); msg.put("birthday", new Date().getTime()); String dateText = "2013-02-19T14:20:53+08:00"; msg.put(tsField, dateText); Transaction tx; for (int i = 0; i < 10; i++) { tx = channel.getTransaction(); tx.begin(); msg.put("name", "test" + i); JSONObject header = new JSONObject(); header.put(MongoSink.COLLECTION, "my_events"); header.put(MongoSink.DB_NAME, "dynamic_db"); Event e = EventBuilder.withBody(msg.toJSONString().getBytes(), header); channel.put(e); tx.commit(); tx.close(); } sink.process(); sink.stop(); msg.put(tsField, MongoSink.dateTimeFormatter.parseDateTime(dateText).toDate()); for (int i = 0; i < 10; i++) { msg.put("name", "test" + i); System.out.println("i = " + i); DB db = mongo.getDB("dynamic_db"); DBCollection collection = db.getCollection("my_events"); DBCursor cursor = collection.find(new BasicDBObject(msg)); assertTrue(cursor.hasNext()); DBObject dbObject = cursor.next(); assertNotNull(dbObject); assertEquals(dbObject.get("name"), msg.get("name")); assertEquals(dbObject.get("age"), msg.get("age")); assertEquals(dbObject.get("birthday"), msg.get("birthday")); assertTrue(dbObject.get(tsField) instanceof Date); System.out.println("ts = " + dbObject.get(tsField)); } }
// @Test public void testLifecycle() throws InterruptedException, LifecycleException { LOG.debug("Starting..."); Context context = new Context(); context.put("hdfs.path", testPath); context.put("hdfs.filePrefix", "pageview"); Configurables.configure(sink, context); sink.setChannel(new MemoryChannel()); sink.start(); sink.stop(); }
@Test public void testReconfigure() throws Exception { List<String> in = Lists.newArrayList(); try { while (true) { in.addAll(putEvents(channel, "restart", 1, 1)); } } catch (ChannelException e) { Assert.assertEquals("Cannot acquire capacity", e.getMessage()); } Configurables.configure(channel, context); List<String> out = takeEvents(channel, 1, Integer.MAX_VALUE); Collections.sort(in); Collections.sort(out); Assert.assertEquals(in, out); }
@Test(groups = "dev") public void sinkDynamicDbTest() throws EventDeliveryException { ctx.put(MongoSink.MODEL, MongoSink.CollectionModel.dynamic.name()); MongoSink sink = new MongoSink(); Configurables.configure(sink, ctx); sink.setChannel(channel); sink.start(); JSONObject msg = new JSONObject(); msg.put("age", 11); msg.put("birthday", new Date().getTime()); Transaction tx; for (int i = 0; i < 10; i++) { tx = channel.getTransaction(); tx.begin(); msg.put("name", "test" + i); JSONObject header = new JSONObject(); header.put(MongoSink.COLLECTION, "my_events"); header.put(MongoSink.DB_NAME, "dynamic_db"); Event e = EventBuilder.withBody(msg.toJSONString().getBytes(), header); channel.put(e); tx.commit(); tx.close(); } sink.process(); sink.stop(); for (int i = 0; i < 10; i++) { msg.put("name", "test" + i); System.out.println("i = " + i); DB db = mongo.getDB("dynamic_db"); DBCollection collection = db.getCollection("my_events"); DBCursor cursor = collection.find(new BasicDBObject(msg)); assertTrue(cursor.hasNext()); DBObject dbObject = cursor.next(); assertNotNull(dbObject); assertEquals(dbObject.get("name"), msg.get("name")); assertEquals(dbObject.get("age"), msg.get("age")); assertEquals(dbObject.get("birthday"), msg.get("birthday")); } }
@Test public void shouldIndexOneEvent() throws Exception { Configurables.configure(fixture, new Context(parameters)); Channel channel = bindAndStartChannel(fixture); Transaction tx = channel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody("event #1 or 1".getBytes()); channel.put(event); tx.commit(); tx.close(); fixture.process(); fixture.stop(); client.admin().indices().refresh(Requests.refreshRequest(timestampedIndexName)).actionGet(); assertMatchAllQuery(1, event); assertBodyQuery(1, event); }
@BeforeMethod(groups = {"dev"}) public static void setup() throws UnknownHostException { mongo = new Mongo("localhost", 27017); Map<String, String> ctxMap = new HashMap<String, String>(); ctxMap.put(MongoSink.HOST, "localhost"); ctxMap.put(MongoSink.PORT, "27017"); ctxMap.put(MongoSink.DB_NAME, "test_events"); ctxMap.put(MongoSink.COLLECTION, "test_log"); ctxMap.put(MongoSink.BATCH_SIZE, "100"); ctx.putAll(ctxMap); Context channelCtx = new Context(); channelCtx.put("capacity", "1000000"); channelCtx.put("transactionCapacity", "1000000"); channel = new MemoryChannel(); Configurables.configure(channel, channelCtx); }
@OnScheduled public void onScheduled(final SchedulingContext context) { try { channel = new NifiSinkSessionChannel(SUCCESS, FAILURE); channel.start(); sink = SINK_FACTORY.create( context.getProperty(SOURCE_NAME).getValue(), context.getProperty(SINK_TYPE).getValue()); sink.setChannel(channel); String flumeConfig = context.getProperty(FLUME_CONFIG).getValue(); String agentName = context.getProperty(AGENT_NAME).getValue(); String sinkName = context.getProperty(SOURCE_NAME).getValue(); Configurables.configure(sink, getFlumeSinkContext(flumeConfig, agentName, sinkName)); sink.start(); } catch (Throwable th) { getLogger().error("Error creating sink", th); throw Throwables.propagate(th); } }
@Test public void shouldIndexComplexJsonEvent() throws Exception { Configurables.configure(fixture, new Context(parameters)); Channel channel = bindAndStartChannel(fixture); Transaction tx = channel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody("{\"event\":\"json content\",\"num\":1}".getBytes()); channel.put(event); tx.commit(); tx.close(); fixture.process(); fixture.stop(); client.admin().indices().refresh(Requests.refreshRequest(timestampedIndexName)).actionGet(); Map<String, Object> expectedBody = new HashMap<String, Object>(); expectedBody.put("event", "json content"); expectedBody.put("num", 1); assertSearch(1, performSearch(QueryBuilders.matchAllQuery()), expectedBody, event); assertSearch( 1, performSearch(QueryBuilders.fieldQuery("@message.event", "json")), expectedBody, event); }
public void doTestTextBatchAppend() throws Exception { LOG.debug("Starting..."); final long rollCount = 10; final long batchSize = 2; final String fileName = "PageView"; String newPath = testPath + "/singleTextBucket"; int totalEvents = 0; int i = 1, j = 1; // clear the test directory Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path dirPath = new Path(newPath); fs.delete(dirPath, true); fs.mkdirs(dirPath); Context context = new Context(); context.put("hdfs.path", newPath); context.put("hdfs.rollCount", String.valueOf(rollCount)); context.put("hdfs.batchSize", String.valueOf(batchSize)); context.put("hdfs.filePrefix", "pageview"); Channel channel = new MemoryChannel(); Configurables.configure(channel, context); sink.setChannel(channel); sink.start(); Calendar eventDate = Calendar.getInstance(); Date currentDate = new Date(); Map<String, String> header = new HashMap<String, String>(); header.put("topic", "PageView"); List<String> bodies = Lists.newArrayList(); // 将测试的事件推入到通道中 for (i = 1; i <= (rollCount * 10) / batchSize; i++) { Transaction txn = channel.getTransaction(); txn.begin(); for (j = 1; j <= batchSize; j++) { header.put("timestamp", String.valueOf(currentDate.getTime())); Event event = new SimpleEvent(); eventDate.clear(); eventDate.set(2014, i, i, i, 0); String body = "Test." + i + "." + j; event.setHeaders(header); event.setBody(body.getBytes()); bodies.add(body); channel.put(event); totalEvents++; } txn.commit(); txn.close(); // execute sink to process the events sink.process(); } sink.stop(); FileStatus[] dirStat = fs.listStatus(dirPath); Path fList[] = FileUtil.stat2Paths(dirStat); long expectedFiles = totalEvents / rollCount; if (totalEvents % rollCount > 0) { expectedFiles++; } Assert.assertEquals( "num files wrong, found: " + Lists.newArrayList(fList), expectedFiles, fList.length); // 检查所有写入文件的内容 verifyOutputTextFiles(fs, conf, dirPath.toUri().getPath(), fileName, bodies); }
// @Ignore @Before public void setUp() { conf = ConfigFactory.load(); List<String> zkHosts = conf.getStringList("zookeeper.hosts"); for (String host : zkHosts) { ZOOKEEPER_HOSTS += host + ","; } ZOOKEEPER_HOSTS = ZOOKEEPER_HOSTS.substring(0, ZOOKEEPER_HOSTS.length() - 1); List<String> kafkaHosts = conf.getStringList("kafka.hosts"); for (String host : kafkaHosts) { KAFKA_HOSTS += host + ","; } KAFKA_HOSTS = KAFKA_HOSTS.substring(0, KAFKA_HOSTS.length() - 1); LOGGER.debug("Using Zookeeper hosts: " + ZOOKEEPER_HOSTS); LOGGER.debug("Using Zookeeper hosts: " + KAFKA_HOSTS); // try { // zookeeperServer = new ZookeeperServer(); // zookeeperServer.start(); // } catch (Exception e) { // e.printStackTrace(); // } // try { // kafkaServer = new KafkaServer(); // kafkaServer.start(); // } catch (Exception e) { // e.printStackTrace(); // } String[] connection = KAFKA_HOSTS.split(":"); // simpleConsumer = new SimpleConsumer("localhost", 9092, 60000, 1024, CLIENT_ID); simpleConsumer = new SimpleConsumer(connection[0], Integer.parseInt(connection[1]), 60000, 1024, CLIENT_ID); kafkaSink = new KafkaSink(); Context kafkaContext = new Context(); kafkaContext.put("topic", "test"); kafkaContext.put("writeBody", "false"); kafkaContext.put("kafka.metadata.broker.list", KAFKA_HOSTS); kafkaContext.put("kafka.serializer.class", "kafka.serializer.StringEncoder"); Configurables.configure(kafkaSink, kafkaContext); Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "200"); channel = new MemoryChannel(); channel.setName("junitChannel"); Configurables.configure(channel, channelContext); kafkaSink.setChannel(channel); channel.start(); kafkaSink.start(); }