public static void main(String[] args) { String s = 1 + 1 + '1' + 1 + 1 + 1 + 1 + "1"; System.out.println(s); Random rnd = new Random(); Properties props = new Properties(); props.put("metadata.broker.list", "120.25.204.152:9092"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("partitioner.class", "com.mt.kafka.SimplePartitioner"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); for (long nEvents = 0; nEvents < 4; nEvents++) { long runtime = new Date().getTime(); String ip = "192.168.2." + rnd.nextInt(255); String msg = runtime + ",www.example.com," + ip; KeyedMessage<String, String> data = new KeyedMessage<String, String>("page_visits", ip, msg); producer.send(data); } producer.close(); // FailoverCluster // RegistryProtocol // Invoker<T> }
public void sendUpUserMessage() throws IOException { Properties properties = new Properties(); properties.put("metadata.broker.list", "10.1.3.55:9092,10.1.3.56:9092,10.1.3.59:9092"); properties.put("serializer.class", "kafka.serializer.StringEncoder"); ProducerConfig producerConfig = new ProducerConfig(properties); kafka.javaapi.producer.Producer<String, String> producer = new kafka.javaapi.producer.Producer<String, String>(producerConfig); InputStream in = this.getClass().getResourceAsStream("/upusers.csv"); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); String line = null; while ((line = reader.readLine()) != null) { // System.out.println(line); ArrayList<String> list = new ArrayList<String>(Arrays.asList(line.replace(";NULL", "").split(","))); if (list.size() != 0) { list.remove(0); String uid = list.remove(0); String nline = Hashing.md5() .hashString( uid + System.currentTimeMillis() + new Random().nextLong(), Charsets.UTF_8) + "," + Joiner.on(",").join(list.toArray()).toString(); // String nline = Joiner.on(",").join(list.toArray()).toString(); KeyedMessage<String, String> message = new KeyedMessage<String, String>(TOPIC, nline); producer.send(message); // System.out.println(nline); // System.out.println(nline); } } }
public static void main(String[] args) throws IOException, ConnectionException { Logger.getRootLogger().setLevel(Level.WARN); // *** start the storm cluster LocalCluster cluster = new LocalCluster(); // *** start kafka LocalKafkaBroker broker = new LocalKafkaBroker(0, 9090, 4, "localhost:2000"); ReplayConfig replay = new ReplayConfig().staticHosts(broker.getHostPortStrings(), broker.getNumPartitions()); // *** build a topology // KarmaConfig karmaConfig = new KarmaConfigImpl("a", replay, new // InMemoryReducerState()); KarmaConfig karmaConfig = new KarmaConfigImpl("a", replay, new CassandraReducerState("demo")); StormTopology topology = buildTopology(karmaConfig); // *** submit the topology to storm Config config = new Config(); config.setMaxSpoutPending(50); cluster.submitTopology("bankdemo", config, topology); // *** send some events Producer<Long, Message> kafkaProducer = broker.buildSyncProducer(); JsonProducer mb = new JsonProducer(kafkaProducer).printSendsToConsole(true); sendBankingDemoMessages(mb); Utils.sleep(100000); kafkaProducer.close(); }
public void run() { try { Properties props = new Properties(); // props.put("metadata.broker.list", "sandbox.hortonworks.com:6667"); // props.put("zk.connect", "localhost:2181"); props.put("metadata.broker.list", "hadoop-m.c.mpcs53013-2015.internal:6667"); props.put( "zk.connect", "hadoop-w-1.c.mpcs53013-2015.internal:2181,hadoop-w-0.c.mpcs53013-2015.internal:2181,hadoop-m.c.mpcs53013-2015.internal:2181"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("request.required.acks", "1"); // setup Producer Config using this prop String TOPIC = "yuan_yelp_reviews"; ProducerConfig config = new ProducerConfig(props); // setup Producer using this producer config Producer<String, String> producer = new Producer<String, String>(config); InputStream in = new FileInputStream(new File(fileDir)); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); StringBuilder out = new StringBuilder(); String line; while ((line = reader.readLine()) != null) { System.out.println(line); KeyedMessage<String, String> data = new KeyedMessage<String, String>(TOPIC, line); producer.send(data); } } catch (IOException e) { e.printStackTrace(); } }
/** @param args the command line arguments 1) kafka broker */ public static void main(String[] args) { Properties props = new Properties(); if (args.length >= 1) { props.put("metadata.broker.list", args[0]); } else { props.put("metadata.broker.list", "192.168.47.129:9093"); } props.put("serializer.class", "eu.europeana.cloud.service.dps.storm.kafka.JsonEncoder"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, DpsTask> producer = new Producer<>(config); DpsTask msg = new DpsTask(); msg.setTaskName(PluginParameterKeys.NEW_ANNOTATION_MESSAGE); msg.addParameter(PluginParameterKeys.INDEX_DATA, "True"); IndexerInformations ii = new IndexerInformations(indexers[0], "index_mlt_4", "mlt4", "192.168.47.129:9300"); msg.addParameter(PluginParameterKeys.INDEXER, ii.toTaskString()); msg.addParameter(PluginParameterKeys.FILE_URL, "url to annotation"); KeyedMessage<String, DpsTask> data = new KeyedMessage<>(IndexerConstants.KAFKA_INPUT_TOPIC, msg); producer.send(data); producer.close(); }
public void sendKafka(String message) { setPropsKafkaConfigHolder(); setProducer(); setKafkaMessage(message); producer.send(kafkaMessage); producer.close(); }
public static void main(String[] args) { String topic; String url; if (args.length == 0) { url = "localhost:9092"; topic = "sensorStream"; } else { url = args[0]; topic = args[1]; } String[] sensorEvents = new String[] { "<events>\n" + " <event>\n" + " <payloadData>\n" + " <sensorId>ID1</sensorId>\n" + " <sensorVersion>version1</sensorVersion>\n" + " <sensorValue>45</sensorValue>\n" + " </payloadData>\n" + " </event>\n" + "</events>", "<events>\n" + " <event>\n" + " <payloadData>\n" + " <sensorId>ID2</sensorId>\n" + " <sensorVersion>version2</sensorVersion>\n" + " <sensorValue>43</sensorValue>\n" + " </payloadData>\n" + " </event>\n" + "</events>", "<events>\n" + " <event>\n" + " <payloadData>\n" + " <sensorId>ID1</sensorId>\n" + " <sensorVersion>version3</sensorVersion>\n" + " <sensorValue>23</sensorValue>\n" + " </payloadData>\n" + " </event>\n" + "</events>" }; Properties props = new Properties(); props.put("metadata.broker.list", url); props.put("serializer.class", "kafka.serializer.StringEncoder"); ProducerConfig config = new ProducerConfig(props); Producer<String, Object> producer = new Producer<String, Object>(config); for (String sensorEvent : sensorEvents) { KeyedMessage<String, Object> data = new KeyedMessage<String, Object>(topic, sensorEvent); producer.send(data); } producer.close(); }
public void sendMultiMessageKafka(String message) { setPropsKafkaConfigHolder(); setProducer(); for (int i = 0; i < 20; i++) { setKafkaMultiMessage(message); } producer.send(kafkaMultiMessage); producer.close(); }
public void send(String topic, String message) throws Exception { Producer<Integer, String> producer = new kafka.javaapi.producer.Producer<Integer, String>( new ProducerConfig(Config.getInstance().getProperties(Config.CONTEXT.PRODUCER))); Config.getInstance() .getProperties(Config.CONTEXT.PRODUCER) .forEach((x, y) -> LOG.debug(x + "=" + y)); producer.send(new KeyedMessage<Integer, String>(topic, message)); producer.close(); }
public void start() throws IOException { KafkaProducer kafkaProducer = new KafkaProducer(); Producer<Integer, byte[]> producer = kafkaProducer.GetProducer(); Map<String, byte[]> hashMap = new HashMap<String, byte[]>(); byte[] staticPayload = new byte[100]; Arrays.fill(staticPayload, (byte) 0); hashMap.put("word", staticPayload); for (long i = 0; i < amountOfNumbers; i++) { KeyedMessage<Integer, byte[]> keyedMessage = new KeyedMessage<Integer, byte[]>(topicName, convert.toByteFrom(hashMap)); producer.send(keyedMessage); } producer.close(); }
private void testProduceAMessage() throws TimeoutException { // Produce a message so we can check new offsets. ProducerConfig conf = kafkaRule.producerConfigWithStringEncoder(); Producer<String, String> producer = new Producer<>(conf); producer.send(new KeyedMessage<>(testTopicName, "key", "value")); producer.close(); // Verify publish List<String> messages = kafkaRule.readStringMessages(testTopicName, 1); assertThat(messages, is(notNullValue())); assertThat(messages.size(), is(1)); assertThat(messages.get(0), is("value")); }
@Override public void onReceive(Object event) throws Exception { MobileEyeEvent mee = (MobileEyeEvent) event; String eventToPass = mee.toString(); String driverId = String.valueOf(mee.getTruck().getDriver().getDriverId()); logger.info( "Creating event[" + eventToPass + "] for driver[" + driverId + "] in truck [" + mee.getTruck() + "]"); try { KeyedMessage<String, String> data = new KeyedMessage<String, String>(TOPIC, driverId, eventToPass); kafkaProducer.send(data); } catch (Exception e) { logger.error( "Error sending event[" + eventToPass + "] to Kafka queue (" + props.get("metadata.broker.list") + ")", e); } }
public void send(SimplifiedLog message, String topicName) { if (producer == null) { producer = KafkaUtils.createProducer("localhost:" + kafkaPort, KafkaProducerType.ASYNC, false); } producer.send(new KeyedMessage<>(topicName, message.getHostName(), message)); LOGGER.debug("Sent message: {}", message); }
public void produce(SimplifiedLog message, String topicName) { if (producer == null) producer = KafkaUtils.createProducer( KafkaUtils.createZkClient(zkServer.getConnectString()), ASYNC, false); producer.send(new KeyedMessage<>(topicName, message.getHostName(), message)); LOGGER.debug("Sent message: {}", message); }
/* * (non-Javadoc) * @see org.apache.camel.impl.DefaultProducer#doStop() */ @Override protected void doStop() throws Exception { super.doStop(); producer.close(); if (LOG.isInfoEnabled()) { LOG.info("Kafka Producer Component stoped"); } }
public void awaitShutdown() { try { shutdownComplete.await(); producer.close(); logger.info("Producer thread " + threadName + " shutdown complete"); } catch (InterruptedException ie) { logger.warn("Interrupt during shutdown of ProducerThread", ie); } }
public void send(String topic, IKafkaEvent<K, V> event) { KeyedMessage<K, V> msg = new KeyedMessage<K, V>(topic, event.getKey(), event.getMessage()); long ts_start = System.nanoTime(); producer.send(msg); long dur = System.nanoTime() - ts_start; stats.update(dur); numDispatched.incrementAndGet(); }
public static void main(String args[]) throws InterruptedException { Properties props = new Properties(); props.put("metadata.broker.list", args[0]); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); while (true) { for (String sentence : sentences) { KeyedMessage<String, String> data = new KeyedMessage<String, String>("storm-sentence", sentence); producer.send(data); Thread.sleep(10); } } }
private static void send(Producer<Long, Message> producer, String topic, List id, List value) { List tuple = new ArrayList(); tuple.addAll(id); tuple.addAll(value); byte[] bytes = ser.serialize(tuple); producer.send( new ProducerData<Long, Message>( topic, Crc64.getCrc(JSONValue.toJSONString(id).getBytes()), L(new Message(bytes)))); }
@Override public void send(int seerCount, int subPubCount) throws JsonGenerationException, JsonMappingException, IOException, SeerReportingException { String messagePayloadAsJson = new ObjectMapper().writeValueAsString(messageSource); if (directlyToKafkaV1) { Producer<String, String> subpubProducer; Properties props = new Properties(); props.put( "metadata.broker.list", kafkaBroker); // broker 10.252.5.240, 10.252.1.99, 10.252.3.239 props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("request.required.acks", "1"); subpubProducer = new Producer<String, String>(new ProducerConfig(props)); KeyedMessage<String, String> data = new KeyedMessage<String, String>(seerTopic, messagePayloadAsJson); while (seerCount > 0) { subpubProducer.send(data); seerCount--; } subpubProducer.close(); System.out.println( "Publishing message to " + seerTopic + " topic; The kafka broker is " + kafkaBroker + "."); System.out.println("Message Has Body: \n" + messagePayloadAsJson + "\n"); } else { ClientConfiguration config = new ClientConfiguration(seerClient, "hRxQN4tBfy4S", seerServer); SeerClient client = new SeerClient(config); client.reportRawTincan(messagePayloadAsJson); System.out.println("Publishing Seer message"); System.out.println("Publishing message of type: " + "Tincan"); System.out.println("Message Has Body: \n" + messagePayloadAsJson + "\n"); } }
public static void main(String[] args) throws Exception { String topic = "aichuche-topic"; Random random = new Random(128); Properties props = new Properties(); // props.put("zk.connect", "10.91.228.28:2181,10.91.228.29:2181,10.91.228.30:2181"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("metadata.broker.list", "210.51.31.68:9092,210.51.31.67:9092"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); for (int i = 0; i <= 10000000; i = i + 1) { String deviceId = i % 2 == 0 ? "+86test_1" : "+86test_2"; String currentDateUnixTimestamp = String.valueOf(DateUtils.getUnixTimestampFromCurrentDate()); // yyyyMMddHHmmss String currentDate = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss") .format(DateUtils.getLocalTimeDateFromUnixTimestamp(currentDateUnixTimestamp)); String data = deviceId + ";1185;2;101," + currentDateUnixTimestamp + ",-0.4884,-0.6512,9.3278,-0.0097,-0.0024,-0.0061,-17.1875,-1.8750,30.5625,31.253138,121.354008,3.4328;" + currentDate; String partitionKey = null; String mesg = data; KeyedMessage<String, String> data2 = new KeyedMessage<String, String>(topic, partitionKey, mesg); producer.send(data2); System.out.println("send to topic :" + mesg); Thread.sleep(2 * 1000); } producer.close(); System.out.println("=====================OVER================"); }
public static void main(String[] args) { String topic = args[0]; long events = Long.parseLong(args[1]); Random rnd = new Random(); Properties props = new Properties(); props.put("metadata.broker.list", "localhost:9092,localhost:9093"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("partitioner.class", "com.test.groups.SimplePartitioner"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); for (long nEvents = 0; nEvents < events; nEvents++) { long runtime = new Date().getTime(); String ip = "192.168.2." + rnd.nextInt(255); String msg = runtime + ",www.example.com," + ip; KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, ip, msg); producer.send(data); } producer.close(); }
public void run() { int messageNo = 1; // while(true) { while (messageNo < 10) { Random r = new Random(); String messageStr = rndStr.get(r.nextInt(10)); System.out.println("[Producing Message" + messageNo + "]: " + messageStr); producer.send(new KeyedMessage<Integer, String>(topic, messageStr)); messageNo++; } }
/* * (non-Javadoc) * @see org.apache.camel.Processor#process(org.apache.camel.Exchange) */ @Override @SuppressWarnings("unchecked") public void process(final Exchange exchange) throws Exception { String topicName; if (exchange.getIn().getHeaders().containsKey(KafkaComponent.TOPIC_NAME)) { topicName = exchange.getIn().getHeader(KafkaComponent.TOPIC_NAME, String.class); } else { topicName = endpoint.getTopicName(); } final List<Object> evts = exchange.getIn().getBody(List.class); if (evts != null) { final List<ProducerData<String, Message>> datas = new ArrayList<ProducerData<String, Message>>(); for (final Object obj : evts) { final ProducerData<String, Message> data = new ProducerData<String, Message>( topicName, new Message(BinaryHelper.getInstance().getBytes(obj))); datas.add(data); } producer.send(datas); if (LOG.isInfoEnabled()) { LOG.info("Kafka Producer multiple send : " + evts); } } else { final Object evt = exchange.getIn().getBody(); if (evt != null) { final ProducerData<String, Message> data = new ProducerData<String, Message>( topicName, new Message(BinaryHelper.getInstance().getBytes(evt))); producer.send(data); if (LOG.isInfoEnabled()) { LOG.info("Kafka Producer send : " + evt); } } } }
public void run() { try { while (true) { KeyedMessage<byte[], byte[]> data = producerDataChannel.receiveRequest(); if (!data.equals(shutdownMessage)) { producer.send(data); if (logger.isDebugEnabled()) logger.debug(String.format("Sending message %s", new String(data.message()))); } else break; } logger.info("Producer thread " + threadName + " finished running"); } catch (Throwable t) { logger.fatal("Producer thread failure due to ", t); } finally { shutdownComplete.countDown(); } }
/** {@inheritDoc} */ @Override public void execute(String streamName, TupleEvent event) throws StreamingException { String result = null; try { result = (String) serde.serialize(BaseSerDe.changeEventsToList(event)); } catch (StreamSerDeException e) { LOG.warn("Ignore a serde exception.", e); } if (result == null) { LOG.warn("Ignore a null result in output."); return; } LOG.debug("The Output result is {}.", result); producer.send(new KeyedMessage<Integer, String>(topic, result)); LOG.debug("Kafka send success."); }
@Override public void run() { log.info(String.format("Producer started. Thread Id: %s", Thread.currentThread().getId())); int sequence = 0; try { String msg; while (!Thread.interrupted()) { msg = String.format("Sequence message: %s %s", sequence++, System.currentTimeMillis()); KeyedMessage<String, String> keyedMessage = new KeyedMessage<String, String>(topic, msg); producer.send(keyedMessage); log.info( String.format( "Producer. Thread Id: %s. Sent: %s", Thread.currentThread().getId(), msg)); Thread.sleep(500); } } catch (Exception e) { log.error("Producer Stopped: ", e); } }
@Override public void run() { producer = new Producer<String, String>(config); // for(int i=0; i<10; i++) { // String sLine = "I'm number " + i; // KeyedMessage<String, String> msg = new KeyedMessage<String, String>("group1", // sLine); // producer.send(msg); // } for (int i = 1; i <= 6; i++) { // 往6个分区发数据 List<KeyedMessage<String, String>> messageList = new ArrayList<KeyedMessage<String, String>>(); for (int j = 0; j < 6; j++) { // 每个分区6条讯息 messageList.add( new KeyedMessage<String, String> // String topic, String partition, String message ("blog", "partition[" + i + "]", "message[The " + i + " message]")); } producer.send(messageList); } }
/** {@inheritDoc} */ @Override public void destroy() throws StreamingException { if (producer != null) { producer.close(); } }
public void close() { producer.close(); }