public static void main(String[] args) throws IOException, ConnectionException { Logger.getRootLogger().setLevel(Level.WARN); // *** start the storm cluster LocalCluster cluster = new LocalCluster(); // *** start kafka LocalKafkaBroker broker = new LocalKafkaBroker(0, 9090, 4, "localhost:2000"); ReplayConfig replay = new ReplayConfig().staticHosts(broker.getHostPortStrings(), broker.getNumPartitions()); // *** build a topology // KarmaConfig karmaConfig = new KarmaConfigImpl("a", replay, new // InMemoryReducerState()); KarmaConfig karmaConfig = new KarmaConfigImpl("a", replay, new CassandraReducerState("demo")); StormTopology topology = buildTopology(karmaConfig); // *** submit the topology to storm Config config = new Config(); config.setMaxSpoutPending(50); cluster.submitTopology("bankdemo", config, topology); // *** send some events Producer<Long, Message> kafkaProducer = broker.buildSyncProducer(); JsonProducer mb = new JsonProducer(kafkaProducer).printSendsToConsole(true); sendBankingDemoMessages(mb); Utils.sleep(100000); kafkaProducer.close(); }
public static void main(String[] args) throws IOException { Logger.getRootLogger().setLevel(Level.WARN); // *** start the storm cluster LocalCluster cluster = new LocalCluster(); // *** start the embedded kafka service LocalKafkaBroker broker = new LocalKafkaBroker(0, 9090, 1, "localhost:2000"); // *** configure replay and karma to use the local kafka instance ReplayConfig replay = new ReplayConfig().staticHosts(broker.getHostPortStrings(), broker.getNumPartitions()); KarmaConfig karmaConfig = new KarmaConfig("a").replay(replay).reducerState(new InMemoryReducerState()); KarmaTopologyBuilder karma = new KarmaTopologyBuilder(karmaConfig, "testA"); karma.setSpout( "orgSpout", new KafkaSpout(replay.buildReplaySpoutConfig("org", ORG_SCHEME, "orgSpoutId")), 4); karma.setSpout( "userSpout", new KafkaSpout(replay.buildReplaySpoutConfig("user", USER_SCHEME, "userSpoutId")), 4); karma .map("{ orgId }", "userSpout(id)", new Count("orgId")) .red("{ userCount }", "orgUserCounts(orgId)", new Sum()); karma .map("{ userCount }", "orgUserCounts(orgId)", new Count("userCount")) .red("{ total samples }", "allOrgs()", new Sum()) .fmt("{ totalUsers averagePerOrg }", "{ d -> [d.total, d.total / d.samples] }"); buildSniffer("allOrgs", karma); karma .map("{ orgId }", "userSpout(id)", "{ u -> emit(u.orgId, 1) }") .red("{ userCount }", "orgUserCounts2(orgId)", "{ a, b -> [a.userCount + b.userCount] }"); karma .map("{ userCount }", "orgUserCounts2(orgId)", "{ d -> emit(d.userCount, 1) }") .red( "{ total samples }", "allOrgs2()", "{ a, b -> [a.total + b.total, a.samples + b.samples]}") .fmt("{ totalUsers averagePerOrg }", "{ d -> [d.total, d.total / d.samples] }"); buildSniffer("allOrgs2", karma); // *** build a name count using the scripting support karma .map("{ name }", "userSpout(id)", "{ u -> emit(u.name, 1L) }") .red("{ count }", "nameCounts(name)", "{ a, b -> [a.count + b.count] }"); buildSniffer("nameCounts", karma); karma .map("{ name }", "userSpout(id)", "{ u -> emit(u.name, 1L) }") .red("{ count }", "nameCounts2(name)", "{ a, b -> [a.count + b.count] }"); buildSniffer("nameCounts2", karma); karma .map("{ orgId }", "userSpout(id)", "{ u -> emit(u.orgId, 1L) }") .red("{ count }", "empCounts(orgId)", "{ a, b -> [a.count + b.count] }"); buildSniffer("empCounts", karma); karma .map("{ name }", "userSpout(id)", "{ u -> emit(1L) }") .red("{ count }", "userCount()", "{ a, b -> [a.count + b.count] }"); buildSniffer("userCount", karma); karma .map("{ name }", "userSpout(id)", "{ u -> emit(1L) }") .red("{ count }", "userCount2()", "{ a, b -> [a.count + b.count] }"); buildSniffer("userCount2", karma); karma .map( "{ id name }", "orgSpout(id)", new Mapper() { @Override public void map(Tuple t, Emitter e) { e.emit(t.getValueByField("id"), t.getStringByField("name"), L()); } }) .map( "{ orgId name }", "userSpout(id)", new Mapper() { @Override public void map(Tuple t, Emitter e) { e.emit(t.getValueByField("orgId"), null, L(t.getStringByField("name"))); } }) .red( "{ orgName userNames }", "orgToUsernames(orgId)", new Reducer() { @Override public List reduce(Tuple key, Tuple a, Tuple b) { Set<String> names = new TreeSet<String>(); names.addAll((List) (a.getValueByField("userNames"))); names.addAll((List) (b.getValueByField("userNames"))); return L( a.getString(0) != null ? a.getString(0) : b.getString(0), new ArrayList(names)); } }); karma .map( "orgSpout", L("id"), L("id", "name"), new Mapper() { @Override public void map(Tuple t, Emitter e) { e.emit(t.getValueByField("id"), t.getStringByField("name"), L()); } }) .map( "userSpout", L("id"), L("orgId", "name"), new Mapper() { @Override public void map(Tuple t, Emitter e) { e.emit(t.getValueByField("orgId"), null, L(t.getStringByField("name"))); } }) .red( "{ orgName userNames }", "org2Usernames(orgId)", new Reducer() { @Override public List reduce(Tuple key, Tuple a, Tuple b) { Set<String> names = new TreeSet<String>(); names.addAll((List) (a.getValueByField("userNames"))); names.addAll((List) (b.getValueByField("userNames"))); return L( a.getString(0) != null ? a.getString(0) : b.getString(0), new ArrayList(names)); } }); buildSniffer("org2Usernames", karma); karma .map( "{ orgName userNames }", "org2Usernames(orgId)", new Mapper() { @Override public void map(Tuple t, Emitter e) { String orgName = t.getStringByField("orgName"); if (orgName != null) for (String userName : (List<String>) t.getValueByField("userNames")) { e.emit(userName, L(orgName)); } } }) .red( "{ orgNames }", "userNames2OrgNames(userName)", new Reducer() { @Override public List reduce(Tuple key, Tuple a, Tuple b) { System.out.println("userNames2OrgNames reducing: a: " + a + ", b: " + b); Set<String> orgNames = new TreeSet<String>(); orgNames.addAll((List) a.getValue(0)); orgNames.addAll((List) b.getValue(0)); return L(new ArrayList(orgNames)); } }); buildSniffer("userNames2OrgNames", karma); cluster.submitTopology("karma", new Config(), karma.createTopology()); Producer<Long, Message> producer = broker.buildSyncProducer(); // Demo.countdown("Adding orgs and users in ", 5); // sendOrgs(producer); // send100Users(producer); // Demo.readEnter("*** Adding acme", 3); sendOrg(producer, 1000, "Acme"); // Demo.readEnter("*** Adding 10 greggs", 3); for (int i = 0; i < 10; i++) { // Demo.readEnter("** Adding gregg " + (i + 1), 1); sendUser(producer, 2000 + i, "Gregg", 1000); } // Demo.readEnter("*** Changing greggs to seth and assigning to org 1 in", 3); Utils.sleep(2000); sendOrg(producer, 1, "Kfc"); for (int i = 0; i < 10; i++) { // Demo.readEnter("** Changing gregg " + (i + 1) + " to seth and Kfc in", 1); sendUser(producer, 2000 + i, "Seth", 1); } // Demo.readEnter("*** Deleting acme", 3); for (int i = 0; i < 10; i++) deleteUser(producer, 2000 + i); deleteOrg(producer, 1000); deleteOrg(producer, 1); Utils.sleep(100000); producer.close(); }