예제 #1
0
  @Test
  public void testTypeInfo() {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    DataStream<Long> src1 = env.generateSequence(0, 0);
    assertEquals(TypeExtractor.getForClass(Long.class), src1.getType());

    DataStream<Tuple2<Integer, String>> map =
        src1.map(
            new MapFunction<Long, Tuple2<Integer, String>>() {
              @Override
              public Tuple2<Integer, String> map(Long value) throws Exception {
                return null;
              }
            });

    assertEquals(TypeExtractor.getForObject(new Tuple2<>(0, "")), map.getType());

    DataStream<String> window =
        map.windowAll(GlobalWindows.create())
            .trigger(PurgingTrigger.of(CountTrigger.of(5)))
            .apply(
                new AllWindowFunction<Tuple2<Integer, String>, String, GlobalWindow>() {
                  @Override
                  public void apply(
                      GlobalWindow window,
                      Iterable<Tuple2<Integer, String>> values,
                      Collector<String> out)
                      throws Exception {}
                });

    assertEquals(TypeExtractor.getForClass(String.class), window.getType());

    DataStream<CustomPOJO> flatten =
        window
            .windowAll(GlobalWindows.create())
            .trigger(PurgingTrigger.of(CountTrigger.of(5)))
            .fold(
                new CustomPOJO(),
                new FoldFunction<String, CustomPOJO>() {
                  private static final long serialVersionUID = 1L;

                  @Override
                  public CustomPOJO fold(CustomPOJO accumulator, String value) throws Exception {
                    return null;
                  }
                });

    assertEquals(TypeExtractor.getForClass(CustomPOJO.class), flatten.getType());
  }
예제 #2
0
 /**
  * Windows this {@code DataStream} into tumbling count windows.
  *
  * <p>Note: This operation can be inherently non-parallel since all elements have to pass through
  * the same operator instance. (Only for special cases, such as aligned time windows is it
  * possible to perform this operation in parallel).
  *
  * @param size The size of the windows in number of elements.
  */
 public AllWindowedStream<T, GlobalWindow> countWindowAll(long size) {
   return windowAll(GlobalWindows.create()).trigger(PurgingTrigger.of(CountTrigger.of(size)));
 }
  @Test
  @SuppressWarnings("unchecked")
  public void testCountTrigger() throws Exception {
    closeCalled.set(0);

    final int WINDOW_SIZE = 4;

    TypeInformation<Tuple2<String, Integer>> inputType =
        TypeInfoParser.parse("Tuple2<String, Integer>");

    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
        new ReducingStateDescriptor<>(
            "window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));

    WindowOperator<
            String,
            Tuple2<String, Integer>,
            Tuple2<String, Integer>,
            Tuple2<String, Integer>,
            GlobalWindow>
        operator =
            new WindowOperator<>(
                GlobalWindows.create(),
                new GlobalWindow.Serializer(),
                new TupleKeySelector(),
                BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
                stateDesc,
                new InternalSingleValueWindowFunction<>(
                    new PassThroughWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>()),
                PurgingTrigger.of(CountTrigger.of(WINDOW_SIZE)));

    operator.setInputType(
        TypeInfoParser.<Tuple2<String, Integer>>parse("Tuple2<String, Integer>"),
        new ExecutionConfig());

    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
        testHarness = new OneInputStreamOperatorTestHarness<>(operator);

    testHarness.configureForKeyedStream(new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

    testHarness.open();

    // The global window actually ignores these timestamps...

    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));

    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));

    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));

    TestHarnessUtil.assertOutputEqualsSorted(
        "Output was not correct.",
        expectedOutput,
        testHarness.getOutput(),
        new ResultSortComparator());

    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));

    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));

    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));

    TestHarnessUtil.assertOutputEqualsSorted(
        "Output was not correct.",
        expectedOutput,
        testHarness.getOutput(),
        new ResultSortComparator());

    testHarness.close();
  }
예제 #4
0
  /**
   * Tests {@link SingleOutputStreamOperator#name(String)} functionality.
   *
   * @throws Exception
   */
  @Test
  public void testNaming() throws Exception {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    DataStream<Long> dataStream1 =
        env.generateSequence(0, 0)
            .name("testSource1")
            .map(
                new MapFunction<Long, Long>() {
                  @Override
                  public Long map(Long value) throws Exception {
                    return null;
                  }
                })
            .name("testMap");

    DataStream<Long> dataStream2 =
        env.generateSequence(0, 0)
            .name("testSource2")
            .map(
                new MapFunction<Long, Long>() {
                  @Override
                  public Long map(Long value) throws Exception {
                    return null;
                  }
                })
            .name("testMap");

    DataStreamSink<Long> connected =
        dataStream1
            .connect(dataStream2)
            .flatMap(
                new CoFlatMapFunction<Long, Long, Long>() {
                  private static final long serialVersionUID = 1L;

                  @Override
                  public void flatMap1(Long value, Collector<Long> out) throws Exception {}

                  @Override
                  public void flatMap2(Long value, Collector<Long> out) throws Exception {}
                })
            .name("testCoFlatMap")
            .windowAll(GlobalWindows.create())
            .trigger(PurgingTrigger.of(CountTrigger.of(10)))
            .fold(
                0L,
                new FoldFunction<Long, Long>() {
                  private static final long serialVersionUID = 1L;

                  @Override
                  public Long fold(Long accumulator, Long value) throws Exception {
                    return null;
                  }
                })
            .name("testWindowFold")
            .print();

    // test functionality through the operator names in the execution plan
    String plan = env.getExecutionPlan();

    assertTrue(plan.contains("testSource1"));
    assertTrue(plan.contains("testSource2"));
    assertTrue(plan.contains("testMap"));
    assertTrue(plan.contains("testMap"));
    assertTrue(plan.contains("testCoFlatMap"));
    assertTrue(plan.contains("testWindowFold"));
  }
예제 #5
0
  /** Tests whether parallelism gets set. */
  @Test
  public void testParallelism() {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    DataStreamSource<Tuple2<Long, Long>> src = env.fromElements(new Tuple2<>(0L, 0L));
    env.setParallelism(10);

    SingleOutputStreamOperator<Long, ?> map =
        src.map(
                new MapFunction<Tuple2<Long, Long>, Long>() {
                  @Override
                  public Long map(Tuple2<Long, Long> value) throws Exception {
                    return null;
                  }
                })
            .name("MyMap");

    DataStream<Long> windowed =
        map.windowAll(GlobalWindows.create())
            .trigger(PurgingTrigger.of(CountTrigger.of(10)))
            .fold(
                0L,
                new FoldFunction<Long, Long>() {
                  @Override
                  public Long fold(Long accumulator, Long value) throws Exception {
                    return null;
                  }
                });

    windowed.addSink(new NoOpSink<Long>());

    DataStreamSink<Long> sink =
        map.addSink(
            new SinkFunction<Long>() {
              private static final long serialVersionUID = 1L;

              @Override
              public void invoke(Long value) throws Exception {}
            });

    assertEquals(1, env.getStreamGraph().getStreamNode(src.getId()).getParallelism());
    assertEquals(10, env.getStreamGraph().getStreamNode(map.getId()).getParallelism());
    assertEquals(1, env.getStreamGraph().getStreamNode(windowed.getId()).getParallelism());
    assertEquals(
        10, env.getStreamGraph().getStreamNode(sink.getTransformation().getId()).getParallelism());

    env.setParallelism(7);

    // Some parts, such as windowing rely on the fact that previous operators have a parallelism
    // set when instantiating the Discretizer. This would break if we dynamically changed
    // the parallelism of operations when changing the setting on the Execution Environment.
    assertEquals(1, env.getStreamGraph().getStreamNode(src.getId()).getParallelism());
    assertEquals(10, env.getStreamGraph().getStreamNode(map.getId()).getParallelism());
    assertEquals(1, env.getStreamGraph().getStreamNode(windowed.getId()).getParallelism());
    assertEquals(
        10, env.getStreamGraph().getStreamNode(sink.getTransformation().getId()).getParallelism());

    try {
      src.setParallelism(3);
      fail();
    } catch (IllegalArgumentException success) {
      // do nothing
    }

    DataStreamSource<Long> parallelSource = env.generateSequence(0, 0);
    parallelSource.addSink(new NoOpSink<Long>());
    assertEquals(7, env.getStreamGraph().getStreamNode(parallelSource.getId()).getParallelism());

    parallelSource.setParallelism(3);
    assertEquals(3, env.getStreamGraph().getStreamNode(parallelSource.getId()).getParallelism());

    map.setParallelism(2);
    assertEquals(2, env.getStreamGraph().getStreamNode(map.getId()).getParallelism());

    sink.setParallelism(4);
    assertEquals(
        4, env.getStreamGraph().getStreamNode(sink.getTransformation().getId()).getParallelism());
  }