@Test public void testApplication() throws Exception { // LocalMode.runApp(new ApacheAccessLogAnalaysis(), 10000); @SuppressWarnings("deprecation") ApacheAccessLogAnalaysis app = new ApacheAccessLogAnalaysis(); LocalMode lma = LocalMode.newInstance(); app.populateDAG(lma.getDAG(), new Configuration(false)); LocalMode.Controller lc = lma.getController(); lc.setHeartbeatMonitoringEnabled(false); lc.run(); }
/** * Test AbstractKafkaSinglePortInputOperator (i.e. an input adapter for Kafka, aka consumer). This * module receives data from an outside test generator through Kafka message bus and feed that * data into Malhar streaming platform. * * <p>[Generate message and send that to Kafka message bus] ==> [Receive that message through * Kafka input adapter(i.e. consumer) and send using emitTuples() interface on output port during * onMessage call] * * @throws Exception */ public void testKafkaInputOperator( int sleepTime, final int totalCount, KafkaConsumer consumer, boolean isValid) throws Exception { // initial the latch for this test latch = new CountDownLatch(1); // Start producer KafkaTestProducer p = new KafkaTestProducer(TEST_TOPIC); p.setSendCount(totalCount); new Thread(p).start(); // Create DAG for testing. LocalMode lma = LocalMode.newInstance(); DAG dag = lma.getDAG(); // Create KafkaSinglePortStringInputOperator KafkaSinglePortStringInputOperator node = dag.addOperator("Kafka message consumer", KafkaSinglePortStringInputOperator.class); consumer.setTopic(TEST_TOPIC); if (isValid) { Set<String> brokerSet = new HashSet<String>(); brokerSet.add("localhost:9092"); consumer.setBrokerSet(brokerSet); } node.setConsumer(consumer); // Create Test tuple collector CollectorModule<String> collector = dag.addOperator("TestMessageCollector", new CollectorModule<String>()); // Connect ports dag.addStream("Kafka message", node.outputPort, collector.inputPort) .setLocality(Locality.CONTAINER_LOCAL); // Create local cluster final LocalMode.Controller lc = lma.getController(); lc.setHeartbeatMonitoringEnabled(false); lc.runAsync(); // Wait 30s for consumer finish consuming all the messages Assert.assertTrue("TIMEOUT: 30s ", latch.await(30000, TimeUnit.MILLISECONDS)); // Check results Assert.assertEquals("Collections size", 1, collections.size()); Assert.assertEquals("Tuple count", totalCount, collections.get(collector.inputPort.id).size()); logger.debug( String.format( "Number of emitted tuples: %d", collections.get(collector.inputPort.id).size())); p.close(); lc.shutdown(); }
@Test public void testApplication() throws IOException, Exception { try { LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml")); lma.prepareDAG(new Application(), conf); LocalMode.Controller lc = lma.getController(); lc.run(10000); // runs for 10 seconds and quits } catch (ConstraintViolationException e) { Assert.fail("constraint violations: " + e.getConstraintViolations()); } }
@Test public void testApplication() throws Exception { LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); conf.set("dt.operator.Unique.prop.tableName", "Test_Lookup_Cache"); conf.set("dt.operator.Unique.prop.store.dbUrl", "jdbc:hsqldb:mem:test;sql.syntax_mys=true"); conf.set("dt.operator.Unique.prop.store.dbDriver", "org.hsqldb.jdbcDriver"); lma.prepareDAG(new Application(), conf); lma.cloneDAG(); LocalMode.Controller lc = lma.getController(); lc.setHeartbeatMonitoringEnabled(false); lc.runAsync(); long now = System.currentTimeMillis(); while (System.currentTimeMillis() - now < 15000) { Thread.sleep(1000); } lc.shutdown(); }
@Test public void testApplication() throws Exception { Configuration conf = new Configuration(false); conf.addResource("dt-site-monitoring.xml"); Server server = new Server(0); Servlet servlet = new SamplePubSubWebSocketServlet(); ServletHolder sh = new ServletHolder(servlet); ServletContextHandler contextHandler = new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS); contextHandler.addServlet(sh, "/pubsub"); contextHandler.addServlet(sh, "/*"); server.start(); Connector connector[] = server.getConnectors(); conf.set("dt.attr.GATEWAY_CONNECT_ADDRESS", "localhost:" + connector[0].getLocalPort()); MRMonitoringApplication application = new MRMonitoringApplication(); LocalMode lma = LocalMode.newInstance(); lma.prepareDAG(application, conf); LocalMode.Controller lc = lma.getController(); lc.run(10000); server.stop(); }
/** * Test KafkaOutputOperator (i.e. an output adapter for Kafka, aka producer). This module sends * data into an ActiveMQ message bus. * * <p>[Generate tuple] ==> [send tuple through Kafka output adapter(i.e. producer) into Kafka * message bus] ==> [receive data in outside Kaka listener (i.e consumer)] * * @throws Exception */ @Test @SuppressWarnings({"SleepWhileInLoop", "empty-statement"}) public void testKafkaOutputOperator() throws Exception { // Setup a message listener to receive the message KafkaConsumer listener = new KafkaConsumer("topic1"); new Thread(listener).start(); // Malhar module to send message // Create DAG for testing. LocalMode lma = LocalMode.newInstance(); DAG dag = lma.getDAG(); // Create ActiveMQStringSinglePortOutputOperator StringGeneratorInputOperator generator = dag.addOperator("TestStringGenerator", StringGeneratorInputOperator.class); KafkaStringSinglePortOutputOperator node = dag.addOperator("Kafka message producer", KafkaStringSinglePortOutputOperator.class); // Set configuration parameters for Kafka node.setTopic("topic1"); // Connect ports dag.addStream("Kafka message", generator.outputPort, node.inputPort) .setLocality(Locality.CONTAINER_LOCAL); // Create local cluster final LocalMode.Controller lc = lma.getController(); lc.runAsync(); Thread.sleep(2000); lc.shutdown(); // Check values send vs received Assert.assertEquals("Number of emitted tuples", tupleCount, listener.holdingBuffer.size()); logger.debug(String.format("Number of emitted tuples: %d", listener.holdingBuffer.size())); Assert.assertEquals( "First tuple", "testString 1", listener.getMessage(listener.holdingBuffer.peek())); listener.close(); }