/** @param args the command line arguments 1) kafka broker */ public static void main(String[] args) { Properties props = new Properties(); if (args.length >= 1) { props.put("metadata.broker.list", args[0]); } else { props.put("metadata.broker.list", "192.168.47.129:9093"); } props.put("serializer.class", "eu.europeana.cloud.service.dps.storm.kafka.JsonEncoder"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, DpsTask> producer = new Producer<>(config); DpsTask msg = new DpsTask(); msg.setTaskName(PluginParameterKeys.NEW_ANNOTATION_MESSAGE); msg.addParameter(PluginParameterKeys.INDEX_DATA, "True"); IndexerInformations ii = new IndexerInformations(indexers[0], "index_mlt_4", "mlt4", "192.168.47.129:9300"); msg.addParameter(PluginParameterKeys.INDEXER, ii.toTaskString()); msg.addParameter(PluginParameterKeys.FILE_URL, "url to annotation"); KeyedMessage<String, DpsTask> data = new KeyedMessage<>(IndexerConstants.KAFKA_INPUT_TOPIC, msg); producer.send(data); producer.close(); }
public void sendUpUserMessage() throws IOException { Properties properties = new Properties(); properties.put("metadata.broker.list", "10.1.3.55:9092,10.1.3.56:9092,10.1.3.59:9092"); properties.put("serializer.class", "kafka.serializer.StringEncoder"); ProducerConfig producerConfig = new ProducerConfig(properties); kafka.javaapi.producer.Producer<String, String> producer = new kafka.javaapi.producer.Producer<String, String>(producerConfig); InputStream in = this.getClass().getResourceAsStream("/upusers.csv"); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); String line = null; while ((line = reader.readLine()) != null) { // System.out.println(line); ArrayList<String> list = new ArrayList<String>(Arrays.asList(line.replace(";NULL", "").split(","))); if (list.size() != 0) { list.remove(0); String uid = list.remove(0); String nline = Hashing.md5() .hashString( uid + System.currentTimeMillis() + new Random().nextLong(), Charsets.UTF_8) + "," + Joiner.on(",").join(list.toArray()).toString(); // String nline = Joiner.on(",").join(list.toArray()).toString(); KeyedMessage<String, String> message = new KeyedMessage<String, String>(TOPIC, nline); producer.send(message); // System.out.println(nline); // System.out.println(nline); } } }
public static void main(String[] args) { String s = 1 + 1 + '1' + 1 + 1 + 1 + 1 + "1"; System.out.println(s); Random rnd = new Random(); Properties props = new Properties(); props.put("metadata.broker.list", "120.25.204.152:9092"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("partitioner.class", "com.mt.kafka.SimplePartitioner"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); for (long nEvents = 0; nEvents < 4; nEvents++) { long runtime = new Date().getTime(); String ip = "192.168.2." + rnd.nextInt(255); String msg = runtime + ",www.example.com," + ip; KeyedMessage<String, String> data = new KeyedMessage<String, String>("page_visits", ip, msg); producer.send(data); } producer.close(); // FailoverCluster // RegistryProtocol // Invoker<T> }
@Override public void onReceive(Object event) throws Exception { MobileEyeEvent mee = (MobileEyeEvent) event; String eventToPass = mee.toString(); String driverId = String.valueOf(mee.getTruck().getDriver().getDriverId()); logger.info( "Creating event[" + eventToPass + "] for driver[" + driverId + "] in truck [" + mee.getTruck() + "]"); try { KeyedMessage<String, String> data = new KeyedMessage<String, String>(TOPIC, driverId, eventToPass); kafkaProducer.send(data); } catch (Exception e) { logger.error( "Error sending event[" + eventToPass + "] to Kafka queue (" + props.get("metadata.broker.list") + ")", e); } }
public void run() { try { Properties props = new Properties(); // props.put("metadata.broker.list", "sandbox.hortonworks.com:6667"); // props.put("zk.connect", "localhost:2181"); props.put("metadata.broker.list", "hadoop-m.c.mpcs53013-2015.internal:6667"); props.put( "zk.connect", "hadoop-w-1.c.mpcs53013-2015.internal:2181,hadoop-w-0.c.mpcs53013-2015.internal:2181,hadoop-m.c.mpcs53013-2015.internal:2181"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("request.required.acks", "1"); // setup Producer Config using this prop String TOPIC = "yuan_yelp_reviews"; ProducerConfig config = new ProducerConfig(props); // setup Producer using this producer config Producer<String, String> producer = new Producer<String, String>(config); InputStream in = new FileInputStream(new File(fileDir)); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); StringBuilder out = new StringBuilder(); String line; while ((line = reader.readLine()) != null) { System.out.println(line); KeyedMessage<String, String> data = new KeyedMessage<String, String>(TOPIC, line); producer.send(data); } } catch (IOException e) { e.printStackTrace(); } }
public void send(SimplifiedLog message, String topicName) { if (producer == null) { producer = KafkaUtils.createProducer("localhost:" + kafkaPort, KafkaProducerType.ASYNC, false); } producer.send(new KeyedMessage<>(topicName, message.getHostName(), message)); LOGGER.debug("Sent message: {}", message); }
public void sendKafka(String message) { setPropsKafkaConfigHolder(); setProducer(); setKafkaMessage(message); producer.send(kafkaMessage); producer.close(); }
public void produce(SimplifiedLog message, String topicName) { if (producer == null) producer = KafkaUtils.createProducer( KafkaUtils.createZkClient(zkServer.getConnectString()), ASYNC, false); producer.send(new KeyedMessage<>(topicName, message.getHostName(), message)); LOGGER.debug("Sent message: {}", message); }
private static void send(Producer<Long, Message> producer, String topic, List id, List value) { List tuple = new ArrayList(); tuple.addAll(id); tuple.addAll(value); byte[] bytes = ser.serialize(tuple); producer.send( new ProducerData<Long, Message>( topic, Crc64.getCrc(JSONValue.toJSONString(id).getBytes()), L(new Message(bytes)))); }
public static void main(String[] args) { String topic; String url; if (args.length == 0) { url = "localhost:9092"; topic = "sensorStream"; } else { url = args[0]; topic = args[1]; } String[] sensorEvents = new String[] { "<events>\n" + " <event>\n" + " <payloadData>\n" + " <sensorId>ID1</sensorId>\n" + " <sensorVersion>version1</sensorVersion>\n" + " <sensorValue>45</sensorValue>\n" + " </payloadData>\n" + " </event>\n" + "</events>", "<events>\n" + " <event>\n" + " <payloadData>\n" + " <sensorId>ID2</sensorId>\n" + " <sensorVersion>version2</sensorVersion>\n" + " <sensorValue>43</sensorValue>\n" + " </payloadData>\n" + " </event>\n" + "</events>", "<events>\n" + " <event>\n" + " <payloadData>\n" + " <sensorId>ID1</sensorId>\n" + " <sensorVersion>version3</sensorVersion>\n" + " <sensorValue>23</sensorValue>\n" + " </payloadData>\n" + " </event>\n" + "</events>" }; Properties props = new Properties(); props.put("metadata.broker.list", url); props.put("serializer.class", "kafka.serializer.StringEncoder"); ProducerConfig config = new ProducerConfig(props); Producer<String, Object> producer = new Producer<String, Object>(config); for (String sensorEvent : sensorEvents) { KeyedMessage<String, Object> data = new KeyedMessage<String, Object>(topic, sensorEvent); producer.send(data); } producer.close(); }
public void send(String topic, IKafkaEvent<K, V> event) { KeyedMessage<K, V> msg = new KeyedMessage<K, V>(topic, event.getKey(), event.getMessage()); long ts_start = System.nanoTime(); producer.send(msg); long dur = System.nanoTime() - ts_start; stats.update(dur); numDispatched.incrementAndGet(); }
public void sendMultiMessageKafka(String message) { setPropsKafkaConfigHolder(); setProducer(); for (int i = 0; i < 20; i++) { setKafkaMultiMessage(message); } producer.send(kafkaMultiMessage); producer.close(); }
public void send(String topic, String message) throws Exception { Producer<Integer, String> producer = new kafka.javaapi.producer.Producer<Integer, String>( new ProducerConfig(Config.getInstance().getProperties(Config.CONTEXT.PRODUCER))); Config.getInstance() .getProperties(Config.CONTEXT.PRODUCER) .forEach((x, y) -> LOG.debug(x + "=" + y)); producer.send(new KeyedMessage<Integer, String>(topic, message)); producer.close(); }
public void start() throws IOException { KafkaProducer kafkaProducer = new KafkaProducer(); Producer<Integer, byte[]> producer = kafkaProducer.GetProducer(); Map<String, byte[]> hashMap = new HashMap<String, byte[]>(); byte[] staticPayload = new byte[100]; Arrays.fill(staticPayload, (byte) 0); hashMap.put("word", staticPayload); for (long i = 0; i < amountOfNumbers; i++) { KeyedMessage<Integer, byte[]> keyedMessage = new KeyedMessage<Integer, byte[]>(topicName, convert.toByteFrom(hashMap)); producer.send(keyedMessage); } producer.close(); }
private void testProduceAMessage() throws TimeoutException { // Produce a message so we can check new offsets. ProducerConfig conf = kafkaRule.producerConfigWithStringEncoder(); Producer<String, String> producer = new Producer<>(conf); producer.send(new KeyedMessage<>(testTopicName, "key", "value")); producer.close(); // Verify publish List<String> messages = kafkaRule.readStringMessages(testTopicName, 1); assertThat(messages, is(notNullValue())); assertThat(messages.size(), is(1)); assertThat(messages.get(0), is("value")); }
/* * (non-Javadoc) * @see org.apache.camel.Processor#process(org.apache.camel.Exchange) */ @Override @SuppressWarnings("unchecked") public void process(final Exchange exchange) throws Exception { String topicName; if (exchange.getIn().getHeaders().containsKey(KafkaComponent.TOPIC_NAME)) { topicName = exchange.getIn().getHeader(KafkaComponent.TOPIC_NAME, String.class); } else { topicName = endpoint.getTopicName(); } final List<Object> evts = exchange.getIn().getBody(List.class); if (evts != null) { final List<ProducerData<String, Message>> datas = new ArrayList<ProducerData<String, Message>>(); for (final Object obj : evts) { final ProducerData<String, Message> data = new ProducerData<String, Message>( topicName, new Message(BinaryHelper.getInstance().getBytes(obj))); datas.add(data); } producer.send(datas); if (LOG.isInfoEnabled()) { LOG.info("Kafka Producer multiple send : " + evts); } } else { final Object evt = exchange.getIn().getBody(); if (evt != null) { final ProducerData<String, Message> data = new ProducerData<String, Message>( topicName, new Message(BinaryHelper.getInstance().getBytes(evt))); producer.send(data); if (LOG.isInfoEnabled()) { LOG.info("Kafka Producer send : " + evt); } } } }
public void run() { int messageNo = 1; // while(true) { while (messageNo < 10) { Random r = new Random(); String messageStr = rndStr.get(r.nextInt(10)); System.out.println("[Producing Message" + messageNo + "]: " + messageStr); producer.send(new KeyedMessage<Integer, String>(topic, messageStr)); messageNo++; } }
public void run() { try { while (true) { KeyedMessage<byte[], byte[]> data = producerDataChannel.receiveRequest(); if (!data.equals(shutdownMessage)) { producer.send(data); if (logger.isDebugEnabled()) logger.debug(String.format("Sending message %s", new String(data.message()))); } else break; } logger.info("Producer thread " + threadName + " finished running"); } catch (Throwable t) { logger.fatal("Producer thread failure due to ", t); } finally { shutdownComplete.countDown(); } }
/** {@inheritDoc} */ @Override public void execute(String streamName, TupleEvent event) throws StreamingException { String result = null; try { result = (String) serde.serialize(BaseSerDe.changeEventsToList(event)); } catch (StreamSerDeException e) { LOG.warn("Ignore a serde exception.", e); } if (result == null) { LOG.warn("Ignore a null result in output."); return; } LOG.debug("The Output result is {}.", result); producer.send(new KeyedMessage<Integer, String>(topic, result)); LOG.debug("Kafka send success."); }
public static void main(String args[]) throws InterruptedException { Properties props = new Properties(); props.put("metadata.broker.list", args[0]); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); while (true) { for (String sentence : sentences) { KeyedMessage<String, String> data = new KeyedMessage<String, String>("storm-sentence", sentence); producer.send(data); Thread.sleep(10); } } }
@Override public void send(int seerCount, int subPubCount) throws JsonGenerationException, JsonMappingException, IOException, SeerReportingException { String messagePayloadAsJson = new ObjectMapper().writeValueAsString(messageSource); if (directlyToKafkaV1) { Producer<String, String> subpubProducer; Properties props = new Properties(); props.put( "metadata.broker.list", kafkaBroker); // broker 10.252.5.240, 10.252.1.99, 10.252.3.239 props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("request.required.acks", "1"); subpubProducer = new Producer<String, String>(new ProducerConfig(props)); KeyedMessage<String, String> data = new KeyedMessage<String, String>(seerTopic, messagePayloadAsJson); while (seerCount > 0) { subpubProducer.send(data); seerCount--; } subpubProducer.close(); System.out.println( "Publishing message to " + seerTopic + " topic; The kafka broker is " + kafkaBroker + "."); System.out.println("Message Has Body: \n" + messagePayloadAsJson + "\n"); } else { ClientConfiguration config = new ClientConfiguration(seerClient, "hRxQN4tBfy4S", seerServer); SeerClient client = new SeerClient(config); client.reportRawTincan(messagePayloadAsJson); System.out.println("Publishing Seer message"); System.out.println("Publishing message of type: " + "Tincan"); System.out.println("Message Has Body: \n" + messagePayloadAsJson + "\n"); } }
@Override public void run() { log.info(String.format("Producer started. Thread Id: %s", Thread.currentThread().getId())); int sequence = 0; try { String msg; while (!Thread.interrupted()) { msg = String.format("Sequence message: %s %s", sequence++, System.currentTimeMillis()); KeyedMessage<String, String> keyedMessage = new KeyedMessage<String, String>(topic, msg); producer.send(keyedMessage); log.info( String.format( "Producer. Thread Id: %s. Sent: %s", Thread.currentThread().getId(), msg)); Thread.sleep(500); } } catch (Exception e) { log.error("Producer Stopped: ", e); } }
@Override public void run() { producer = new Producer<String, String>(config); // for(int i=0; i<10; i++) { // String sLine = "I'm number " + i; // KeyedMessage<String, String> msg = new KeyedMessage<String, String>("group1", // sLine); // producer.send(msg); // } for (int i = 1; i <= 6; i++) { // 往6个分区发数据 List<KeyedMessage<String, String>> messageList = new ArrayList<KeyedMessage<String, String>>(); for (int j = 0; j < 6; j++) { // 每个分区6条讯息 messageList.add( new KeyedMessage<String, String> // String topic, String partition, String message ("blog", "partition[" + i + "]", "message[The " + i + " message]")); } producer.send(messageList); } }
public static void main(String[] args) throws Exception { String topic = "aichuche-topic"; Random random = new Random(128); Properties props = new Properties(); // props.put("zk.connect", "10.91.228.28:2181,10.91.228.29:2181,10.91.228.30:2181"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("metadata.broker.list", "210.51.31.68:9092,210.51.31.67:9092"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); for (int i = 0; i <= 10000000; i = i + 1) { String deviceId = i % 2 == 0 ? "+86test_1" : "+86test_2"; String currentDateUnixTimestamp = String.valueOf(DateUtils.getUnixTimestampFromCurrentDate()); // yyyyMMddHHmmss String currentDate = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss") .format(DateUtils.getLocalTimeDateFromUnixTimestamp(currentDateUnixTimestamp)); String data = deviceId + ";1185;2;101," + currentDateUnixTimestamp + ",-0.4884,-0.6512,9.3278,-0.0097,-0.0024,-0.0061,-17.1875,-1.8750,30.5625,31.253138,121.354008,3.4328;" + currentDate; String partitionKey = null; String mesg = data; KeyedMessage<String, String> data2 = new KeyedMessage<String, String>(topic, partitionKey, mesg); producer.send(data2); System.out.println("send to topic :" + mesg); Thread.sleep(2 * 1000); } producer.close(); System.out.println("=====================OVER================"); }
public static void main(String[] args) { String topic = args[0]; long events = Long.parseLong(args[1]); Random rnd = new Random(); Properties props = new Properties(); props.put("metadata.broker.list", "localhost:9092,localhost:9093"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("partitioner.class", "com.test.groups.SimplePartitioner"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); for (long nEvents = 0; nEvents < events; nEvents++) { long runtime = new Date().getTime(); String ip = "192.168.2." + rnd.nextInt(255); String msg = runtime + ",www.example.com," + ip; KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, ip, msg); producer.send(data); } producer.close(); }
@Override public Status process() throws EventDeliveryException { // TODO Auto-generated method stub Status result = Status.READY; Channel channel = getChannel(); Transaction transaction = null; Event event = null; String eventTopic = null; String eventKey = null; try { long processedEvent = 0; transaction = channel.getTransaction(); transaction.begin(); // 事务开始 messageList.clear(); for (; processedEvent < batchSize; processedEvent++) { event = channel.take(); // 从channel取出一个事件 if (event == null) { break; } // Event对象有头和体之分 Map<String, String> headers = event.getHeaders(); byte[] eventBody = event.getBody(); if ((eventTopic = headers.get(TOPIC_HDR)) == null) { // 判断event头部中的topic是否为null eventTopic = topic; } eventKey = headers.get(KEY_HDR); if (log.isDebugEnabled()) { log.debug("{Event}" + eventTopic + ":" + eventKey + ":" + new String(eventBody, CHARSET)); log.debug("event #{}", processedEvent); } KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(eventTopic, eventKey, eventBody); messageList.add(data); } if (processedEvent > 0) { producer.send(messageList); } transaction.commit(); // batchSize个事件处理完成,一次事务提交 } catch (Exception e) { String errorMsg = "Failed to publish events !"; log.error(errorMsg, e); result = Status.BACKOFF; if (transaction != null) { try { transaction.rollback(); log.debug("transaction rollback success !"); } catch (Exception ex) { log.error(errorMsg, ex); throw Throwables.propagate(ex); } } throw new EventDeliveryException(errorMsg, e); } finally { if (transaction != null) { transaction.close(); } } return result; }
public static void main(String[] args) throws IOException { Properties props = new Properties(); props.put("metadata.broker.list", "localhost:9092"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("partitioner.class", "test.SimplePartitioner"); /* props.put("message.max.bytes", "" + 1024 * 1024 * 40);*/ props.put("message.max.bytes", "1037626"); props.put("request.required.acks", "1"); props.put("retry.backoff.ms", "150"); props.put("message.send.max.retries", "10"); props.put("topic.metadata.refresh.interval.ms", "0"); ProducerConfig config = new ProducerConfig(props); final Producer<String, String> producer = new Producer<String, String>(config); String output = null; KeyedMessage<String, String> fbdata = new KeyedMessage<String, String>("facebook", output); // Generate facebook instance. Facebook facebook = new FacebookFactory().getInstance(); // Use default values for oauth app id. facebook.setOAuthAppId("1238270156199618", "177cef157d0c8c006d0067b49b4bde32"); AccessToken accessTokenString; try { accessTokenString = facebook.getOAuthAppAccessToken(); facebook.setOAuthAccessToken(accessTokenString); /// BrandBazaarr,rakulpreetsinghs // AnushkaShetty//SachinTendulkar//narendramodi while (true) { // File text = new File("/home/storm/Desktop/test/input.csv"); // Creating Scanner instnace to read File in Java // Scanner scnr = new Scanner(text); Scanner scnr = new Scanner(new File("/home/storm/Desktop/test/input.csv")); // Reading each line of file using Scanner class int lineNumber = 1; scnr.useDelimiter(","); while (scnr.hasNext()) { // String line = scnr.nextLine(); String line = scnr.next(); System.out.println("line " + lineNumber + " :" + line); // lineNumber++; String fbquery = line + "/?fields=posts.limit(1).since(2015).until(now){id,message,name,type,picture,link,caption,description,icon,application,shares,updated_time,source,comments.summary(true){comment_count,message,can_remove,id,created_time,can_like,like_count,comments{comment_count,comments{comment_count}}},place,object_id,privacy,status_type,created_time,story,parent_id,story_tags,full_picture,likes.summary(true){id,name,username}},id,hometown,website,about,location,birthday,name,tagged{message_tags},category,category_list,talking_about_count,likes"; try { RawAPIResponse rawresponse = facebook.callGetAPI(fbquery); JSONObject jsonobjmain = rawresponse.asJSONObject(); output = jsonobjmain.toString(); String postlike; String commentnext; JSONObject posts = jsonobjmain.getJSONObject("posts"); JSONArray postdata = posts.getJSONArray("data"); JSONObject postpaging = posts.getJSONObject("paging"); String postnext = postpaging.getString("next"); int count = 1; // int postlikecount=1; // int commentscount =1; JSONArray commetsarry; JSONArray likesdata; while (postnext != null) { count++; URL post_oracle = new URL(postnext); URLConnection yc = post_oracle.openConnection(); BufferedReader in = new BufferedReader(new InputStreamReader(yc.getInputStream())); String post_inputLine; JSONObject post_obj = new JSONObject(); while ((post_inputLine = in.readLine()) != null) { post_obj = new JSONObject(post_inputLine); JSONArray addposts = post_obj.getJSONArray("data"); for (int i = 0; i < addposts.length(); i++) { JSONObject addspostobj = addposts.getJSONObject(i); postdata.put(addspostobj); output = jsonobjmain.toString(); // adding up the postlikes System.out.println("ADDED POSTS CHANGED TO-STRING"); // ************************************************************* likes // **************************************************************** JSONObject likes = addspostobj.getJSONObject("likes"); likesdata = likes.getJSONArray("data"); // likes of the a post object try { JSONObject paging = likes.getJSONObject("paging"); postlike = paging.getString("next"); int postlikecount = 1; while (postlike != null) { // System.out.println("*********************************8"); postlikecount++; URL oraclepostlike = new URL(postlike); URLConnection oraclepostlikeyc = oraclepostlike.openConnection(); BufferedReader oraclepostlikeycin = new BufferedReader( new InputStreamReader(oraclepostlikeyc.getInputStream())); String postlikeinputLine; JSONObject postlikeadd = new JSONObject(); while ((postlikeinputLine = oraclepostlikeycin.readLine()) != null) { postlikeadd = new JSONObject(postlikeinputLine); // System.out.println(postlikeadd); JSONArray postaddlikes = postlikeadd.getJSONArray("data"); for (int like = 0; like < postaddlikes.length(); like++) { JSONObject addslikobj = postaddlikes.getJSONObject(like); likesdata.put(addslikobj); output = jsonobjmain.toString(); System.out.println("ADDED LIKES CHANGED TO STRING"); // System.out.println(likesdata); } // for close } try { JSONObject likesnullmake = postlikeadd.getJSONObject("paging"); postlike = likesnullmake.getString("next"); System.out.println("POST LIKES ENTERED LOOP COUNT" + postlikecount); } catch (Exception e) { postlike = null; System.out.println("there is no next in likes paging"); } oraclepostlikeycin.close(); } // System.out.println("POST LIKEZS INCREMENT " +postlikecount); } // try close catch (Exception e) { System.out.println("there is no next in likespg"); } // ********************************************************** likes end // ******************************************************************** // ********************************************************** comments // ******************************************************************* // comments JSONObject comments = addspostobj.getJSONObject("comments"); commetsarry = comments.getJSONArray("data"); try { JSONObject commentspg = comments.getJSONObject("paging"); commentnext = commentspg.getString("next"); int commentscount = 1; while (commentnext != null) { System.out.println("*********************************8"); commentscount++; URL oraclecomments = new URL(commentnext); URLConnection commentsyc = oraclecomments.openConnection(); BufferedReader commentsin = new BufferedReader(new InputStreamReader(commentsyc.getInputStream())); String commentsinputLine; JSONObject commentsobj = new JSONObject(); while ((commentsinputLine = commentsin.readLine()) != null) { commentsobj = new JSONObject(commentsinputLine); JSONArray commentsadd = commentsobj.getJSONArray("data"); for (int comentsinc = 0; comentsinc < commentsadd.length(); comentsinc++) { JSONObject commentsaddobj = commentsadd.getJSONObject(i); commetsarry.put(commentsaddobj); output = jsonobjmain.toString(); System.out.println("ADDED COMMENTS CHANGESD TO STRING"); } System.out.println("COMMENTS ENTERED LOOP COUNT" + commentscount); } // comments readline while close try { JSONObject commentssnullmake = commentsobj.getJSONObject("paging"); commentnext = commentssnullmake.getString("next"); } catch (Exception e) { commentnext = null; System.out.println("there is no comments next"); } commentsin.close(); } } // comments try close catch (Exception e) { commentnext = null; System.out.println("there is no comments next"); } // comments catch close } // for loop end // String output=jsonobjmain.toString(); try { producer.send(fbdata); System.out.println("sent done"); } catch (Exception e) { PrintStream out = new PrintStream(new FileOutputStream("/home/storm/Videos/erroroutput.txt")); System.setErr(out); System.out.println("system exception returned to a file"); } BufferedWriter writer = null; try { writer = new BufferedWriter(new FileWriter("/home/storm/Videos/posts.json")); writer.write(jsonobjmain.toString()); System.out.println("Done writing"); } catch (IOException e) { System.out.println("vanaja"); } finally { try { if (writer != null) writer.close(); } catch (IOException e) { System.out.println("dilip"); } } } try { JSONObject jo = post_obj.getJSONObject("paging"); postnext = jo.getString("next"); // System.out.println("try "+postnext); } catch (Exception e) { postnext = null; System.out.println("there is no post next"); } in.close(); System.out.println("LOOP COUNT STARTS WITH ONE " + count); } } catch (FacebookException e) { e.printStackTrace(); System.out.println("The great " + e); } // raw response try close } // read line while close } // read file while close } catch (Exception e) { System.out.println("This error is from first try " + e); } // starting catch-close } // main-close
/* (non-Javadoc) * @see com.mozilla.bagheera.producer.Producer#send(com.mozilla.bagheera.BagheeraProto.BagheeraMessage) */ @Override public void send(BagheeraMessage msg) { List<BagheeraMessage> list = new ArrayList<BagheeraMessage>(); list.add(msg); producer.send(new ProducerData<String, BagheeraMessage>(msg.getNamespace(), msg.getId(), list)); }
public void send(List<KeyedMessage<byte[], byte[]>> msgs) { producer.send(msgs); }
@BeforeClass public static void init() throws Exception { final ShutdownRegistryImpl shutdownRegistry = new ShutdownRegistryImpl(); try { zkTestServer = new ZooKeeperTestServer(0, shutdownRegistry, ZooKeeperTestServer.DEFAULT_SESSION_TIMEOUT); port = zkTestServer.startNetwork(); } catch (Exception e) { throw new RuntimeException(e); } Properties kafkaProps = new Properties(); kafkaProps.setProperty("num.partitions", "1"); kafkaProps.setProperty("port", "9092"); kafkaProps.setProperty("broker.id", "0"); kafkaProps.setProperty("log.dir", "/tmp/sensei-gateway-test-kafka-logs"); // override to the local running zk server kafkaProps.setProperty("zookeeper.connect", "localhost:" + port); kafkaLogFile = new File(kafkaProps.getProperty("log.dir")); FileUtils.deleteDirectory(kafkaLogFile); KafkaConfig kafkaConfig = new KafkaConfig(kafkaProps); kafkaServer = new KafkaServerStartable(kafkaConfig); kafkaServer.startup(); Configuration config = new PropertiesConfiguration(); config.addProperty( "sensei.gateway.class", "com.senseidb.gateway.kafka.KafkaDataProviderBuilder"); config.addProperty("sensei.gateway.kafka.group.id", "1"); config.addProperty("sensei.gateway.kafka.zookeeper.connect", "localhost:" + port); config.addProperty("sensei.gateway.kafka.auto.offset.reset", "smallest"); config.addProperty("sensei.gateway.kafka.topic", "test"); config.addProperty("sensei.gateway.provider.batchSize", "1"); pluginRegistry = SenseiPluginRegistry.build(config); pluginRegistry.start(); kafkaGateway = pluginRegistry.getBeanByFullPrefix("sensei.gateway", SenseiGateway.class); kafkaGateway.start(); config = new PropertiesConfiguration(); config.addProperty("sensei.gateway.class", "com.senseidb.gateway.kafka.SimpleKafkaGateway"); config.addProperty("sensei.gateway.kafka.host", "localhost"); config.addProperty("sensei.gateway.kafka.port", "9092"); config.addProperty("sensei.gateway.kafka.topic", "test"); config.addProperty("sensei.gateway.kafka.timeout", "3000"); config.addProperty("sensei.gateway.provider.batchSize", "1"); pluginRegistry2 = SenseiPluginRegistry.build(config); pluginRegistry2.start(); simpleKafkaGateway = pluginRegistry2.getBeanByFullPrefix("sensei.gateway", SenseiGateway.class); simpleKafkaGateway.start(); Properties props = new Properties(); props.put("metadata.broker.list", "localhost:9092"); props.put("serializer.class", "kafka.serializer.StringEncoder"); ProducerConfig producerConfig = new ProducerConfig(props); Producer<String, String> kafkaProducer = new Producer<String, String>(producerConfig); for (JSONObject jsonObj : BaseGatewayTestUtil.dataList) { KeyedMessage<String, String> data = new KeyedMessage<String, String>("test", jsonObj.toString()); kafkaProducer.send(data); } }