@Override protected KafkaEndpoint createEndpoint(String uri, String remaining, Map<String, Object> params) throws Exception { KafkaEndpoint endpoint = new KafkaEndpoint(uri, this); String brokers = remaining.split("\\?")[0]; if (brokers != null) { endpoint.getConfiguration().setBrokers(brokers); } // configure component options before endpoint properties which can override from params endpoint.getConfiguration().setWorkerPool(workerPool); setProperties(endpoint, params); return endpoint; }
/* * (non-Javadoc) * @see org.apache.camel.Processor#process(org.apache.camel.Exchange) */ @Override @SuppressWarnings("unchecked") public void process(final Exchange exchange) throws Exception { String topicName; if (exchange.getIn().getHeaders().containsKey(KafkaComponent.TOPIC_NAME)) { topicName = exchange.getIn().getHeader(KafkaComponent.TOPIC_NAME, String.class); } else { topicName = endpoint.getTopicName(); } final List<Object> evts = exchange.getIn().getBody(List.class); if (evts != null) { final List<ProducerData<String, Message>> datas = new ArrayList<ProducerData<String, Message>>(); for (final Object obj : evts) { final ProducerData<String, Message> data = new ProducerData<String, Message>( topicName, new Message(BinaryHelper.getInstance().getBytes(obj))); datas.add(data); } producer.send(datas); if (LOG.isInfoEnabled()) { LOG.info("Kafka Producer multiple send : " + evts); } } else { final Object evt = exchange.getIn().getBody(); if (evt != null) { final ProducerData<String, Message> data = new ProducerData<String, Message>( topicName, new Message(BinaryHelper.getInstance().getBytes(evt))); producer.send(data); if (LOG.isInfoEnabled()) { LOG.info("Kafka Producer send : " + evt); } } } }
/** * @param endpoint * @throws ClassNotFoundException */ public KafkaProducer(final KafkaEndpoint endpoint) throws ClassNotFoundException { super(endpoint); this.endpoint = endpoint; final Properties props = new Properties(); props.put("zk.connect", endpoint.getZkConnect()); if (!"".equals(endpoint.getSerializerClass())) { props.put("serializer.class", endpoint.getSerializerClass()); } if (!"".equals(endpoint.getPartitionerClass())) { props.put("partitioner.class", endpoint.getPartitionerClass()); } props.put("producer.type", endpoint.getProducerType()); props.put("broker.list", endpoint.getBrokerList()); props.put("buffer.size", endpoint.getBufferSize()); props.put("connect.timeout.ms", endpoint.getConnectTimeoutMs()); props.put("socket.timeout.ms", endpoint.getSocketTimeoutMs()); props.put("reconnect.interval", endpoint.getReconnectInterval()); props.put("max.message.size", endpoint.getMaxMessageSize()); props.put("compression.codec", endpoint.getCompressionCodec()); props.put("compressed.topics", endpoint.getCompressedTopics()); props.put("zk.read.num.retries", endpoint.getZkReadNumRetries()); // producer.type=async if ("async".equals(endpoint.getProducerType())) { props.put("queue.time", endpoint.getQueueTime()); props.put("queue.size", endpoint.getQueueSize()); props.put("batch.size", endpoint.getBatchSize()); if (!"".equals(endpoint.getEventHandler())) { props.put("event.handler", endpoint.getEventHandler()); } props.put("event.handler.props", endpoint.getEventHandlerProps()); if (!"".equals(endpoint.getCallbackHandler())) { props.put("callback.handler", endpoint.getCallbackHandler()); } props.put("callback.handler.props", endpoint.getCallbackHandlerProps()); } final ProducerConfig config = new ProducerConfig(props); if (LOG.isInfoEnabled()) { LOG.info("Kafka producer Component initialized"); } producer = new kafka.javaapi.producer.Producer<String, Message>(config); }