public static void main(String[] args) throws IOException, ParseException {
    KafkaConsumerHelper helper = new KafkaConsumerHelper(args);
    JavaStreamingContext jssc = null;
    try {
      jssc = helper.createJavaStreamingContext(APP_NAME);
      Utils.consolog("jssc=[" + jssc + "]");

      JavaPairReceiverInputDStream<String, String> messages = helper.createReceiverStream(jssc);
      Utils.consolog("messages=[" + messages + "]");

      countMessagesPerProducer(messages);

      Utils.consolog("start...awaitTermination...    (type 'Q' to finish)");
      helper.startAndWait(jssc);

    } catch (Exception e) {
      e.printStackTrace();
    } finally {
      if (jssc != null) {
        Utils.consolog("stopping...closing...");
        helper.stopAndClose(jssc);

        System.out.println("~~~~~~~~~~~~~~~~~~~~~~kafkaStream.saveAsHadoopFiles");
      }
      Utils.consolog("~~ DONE ~~");
    }
  }
Ejemplo n.º 2
0
  public static void main(String[] args) throws IOException, ParseException {
    KafkaConsumerHelper helper = new KafkaConsumerHelper(args);
    JavaStreamingContext jssc = null;
    try {
      jssc = helper.createJavaStreamingContext(APP_NAME);
      JavaPairReceiverInputDStream<String, String> rs = helper.createReceiverStream(jssc);

      Configuration conf = new Configuration();
      conf.set("fs.defaultFS", "hdfs://localhost:9000");
      conf.set("dfs.replication", "1");
      conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
      conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());

      rs.saveAsNewAPIHadoopFiles(
          "prefix", "txt", Integer.class, Integer.class, TextOutputFormat.class, conf);

      //            rs.saveAsHadoopFiles("qwe_prefix_", "_qwe_suffix", );
      //            JavaDStream<Text> map = rs.map(t -> new Text(t._2()));

      //            rs.foreachRDD(new VoidFunction<JavaPairRDD<String, String>>() {
      //                @Override
      //                public void call(JavaPairRDD<String, String> pair) throws Exception {
      //                    pair.saveAsHadoopFile();
      //                }
      //            });

      Utils.consolog("start...awaitTermination...    (type 'Q' to finish)");
      helper.startAndWait(jssc);

    } catch (Exception e) {
      e.printStackTrace();
    } finally {
      if (jssc != null) {
        Utils.consolog("stopping...closing...");
        helper.stopAndClose(jssc);

        System.out.println("~~~~~~~~~~~~~~~~~~~~~~kafkaStream.saveAsHadoopFiles");
      }
      Utils.consolog("~~ DONE ~~");
    }
  }