public static void main(String[] args) {
    SparkConf conf =
        new SparkConf()
            .setMaster("local[4]")
            .setAppName("SparkStreamingPullDataFromFlume for Java");
    JavaStreamingContext jsc = new JavaStreamingContext(conf, Durations.seconds(30));
    //		JavaReceiverInputDStream<SparkFlumeEvent> lines = FlumeUtils.createStream(jsc,"master1",
    // 9999); flume push data to Spark Streaming
    JavaReceiverInputDStream<SparkFlumeEvent> lines =
        FlumeUtils.createPollingStream(
            jsc, "master1", 9999); // Spark Streaming pull data from flume
    JavaDStream<String> words =
        lines.flatMap(
            new FlatMapFunction<SparkFlumeEvent, String>() {
              private static final long serialVersionUID = 1L;

              @Override
              public Iterable<String> call(SparkFlumeEvent event) throws Exception {
                String line = new String(event.event().getBody().array());
                return Arrays.asList(line.split(" "));
              }
            });

    JavaPairDStream<String, Integer> pairs =
        words.mapToPair(
            new PairFunction<String, String, Integer>() {

              private static final long serialVersionUID = 1L;

              @Override
              public Tuple2<String, Integer> call(String word) throws Exception {
                return new Tuple2<String, Integer>(word, 1);
              }
            });

    JavaPairDStream<String, Integer> wordsCount =
        pairs.reduceByKey(
            new Function2<
                Integer, Integer, Integer>() { // 对相同的Key,进行Value的累计(包括Local和Reducer级别同时Reduce)

              private static final long serialVersionUID = 1L;

              @Override
              public Integer call(Integer v1, Integer v2) throws Exception {
                return v1 + v2;
              }
            });

    wordsCount.print();

    jsc.start();

    jsc.awaitTermination();
    jsc.close();
  }
  public static void main(String[] args) {
    if (args.length != 3) {
      System.err.println("Usage: JavaFlumeEventCount <master> <host> <port>");
      System.exit(1);
    }

    String master = args[0];
    String host = args[1];
    int port = Integer.parseInt(args[2]);

    Duration batchInterval = new Duration(2000);

    JavaStreamingContext ssc =
        new JavaStreamingContext(
            master,
            "FlumeEventCount",
            batchInterval,
            System.getenv("SPARK_HOME"),
            JavaStreamingContext.jarOfClass(JavaFlumeEventCount.class));
    JavaDStream<SparkFlumeEvent> flumeStream = FlumeUtils.createStream(ssc, "localhost", port);

    flumeStream.count();

    flumeStream
        .count()
        .map(
            new Function<Long, String>() {
              @Override
              public String call(Long in) {
                return "Received " + in + " flume events.";
              }
            })
        .print();

    ssc.start();
  }