@Test
  public void testRestoreWithInterrupt() throws Exception {

    Configuration taskConfig = new Configuration();
    StreamConfig cfg = new StreamConfig(taskConfig);
    cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
    cfg.setStreamOperator(new StreamSource<>(new TestSource()));

    StreamStateHandle lockingHandle = new InterruptLockingStateHandle();

    TaskDeploymentDescriptor tdd = createTaskDeploymentDescriptor(taskConfig, lockingHandle);
    Task task = createTask(tdd);

    // start the task and wait until it is in "restore"
    task.startTaskThread();
    IN_RESTORE_LATCH.await();

    // trigger cancellation and signal to continue
    task.cancelExecution();

    task.getExecutingThread().join(30000);

    if (task.getExecutionState() == ExecutionState.CANCELING) {
      fail("Task is stuck and not canceling");
    }

    assertEquals(ExecutionState.CANCELED, task.getExecutionState());
    assertNull(task.getFailureCause());
  }
 private static StreamConfig createTaskConfig(
     KeySelector<?, ?> partitioner, TypeSerializer<?> keySerializer) {
   StreamConfig cfg = new StreamConfig(new Configuration());
   cfg.setStatePartitioner(partitioner);
   cfg.setStateKeySerializer(keySerializer);
   return cfg;
 }
  @Override
  public void setup(
      StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<OUT>> output) {
    this.container = containingTask;
    this.config = config;
    this.output = output;
    this.runtimeContext =
        new StreamingRuntimeContext(
            this, container.getEnvironment(), container.getAccumulatorMap());

    stateKeySelector1 = config.getStatePartitioner(0, getUserCodeClassloader());
    stateKeySelector2 = config.getStatePartitioner(1, getUserCodeClassloader());

    try {
      TypeSerializer<Object> keySerializer = config.getStateKeySerializer(getUserCodeClassloader());
      // if the keySerializer is null we still need to create the state backend
      // for the non-partitioned state features it provides, such as the state output streams
      String operatorIdentifier =
          getClass().getSimpleName()
              + "_"
              + config.getVertexID()
              + "_"
              + runtimeContext.getIndexOfThisSubtask();
      stateBackend = container.createStateBackend(operatorIdentifier, keySerializer);
    } catch (Exception e) {
      throw new RuntimeException("Could not initialize state backend. ", e);
    }
  }
  public StreamTaskTestHarness(AbstractInvokable task, TypeInformation<OUT> outputType) {
    this.task = task;
    this.memorySize = DEFAULT_MEMORY_MANAGER_SIZE;
    this.bufferSize = DEFAULT_NETWORK_BUFFER_SIZE;

    this.jobConfig = new Configuration();
    this.taskConfig = new Configuration();
    this.executionConfig = new ExecutionConfig();

    streamConfig = new StreamConfig(taskConfig);
    streamConfig.setChainStart();
    streamConfig.setBufferTimeout(0);
    streamConfig.setTimeCharacteristic(TimeCharacteristic.EventTime);

    outputSerializer = outputType.createSerializer(executionConfig);
    outputStreamRecordSerializer = new StreamElementSerializer<OUT>(outputSerializer);
  }
Beispiel #5
0
  private StateBackend<?> createStateBackend() throws Exception {
    StateBackend<?> configuredBackend = configuration.getStateBackend(userClassLoader);

    if (configuredBackend != null) {
      // backend has been configured on the environment
      LOG.info("Using user-defined state backend: " + configuredBackend);
      return configuredBackend;
    } else {
      // see if we have a backend specified in the configuration
      Configuration flinkConfig = getEnvironment().getTaskManagerInfo().getConfiguration();
      String backendName = flinkConfig.getString(ConfigConstants.STATE_BACKEND, null);

      if (backendName == null) {
        LOG.warn(
            "No state backend has been specified, using default state backend (Memory / JobManager)");
        backendName = "jobmanager";
      }

      backendName = backendName.toLowerCase();
      switch (backendName) {
        case "jobmanager":
          LOG.info("State backend is set to heap memory (checkpoint to jobmanager)");
          return MemoryStateBackend.defaultInstance();

        case "filesystem":
          FsStateBackend backend = new FsStateBackendFactory().createFromConfig(flinkConfig);
          LOG.info(
              "State backend is set to heap memory (checkpoints to filesystem \""
                  + backend.getBasePath()
                  + "\")");
          return backend;

        default:
          try {
            @SuppressWarnings("rawtypes")
            Class<? extends StateBackendFactory> clazz =
                Class.forName(backendName, false, userClassLoader)
                    .asSubclass(StateBackendFactory.class);

            return clazz.newInstance().createFromConfig(flinkConfig);
          } catch (ClassNotFoundException e) {
            throw new IllegalConfigurationException(
                "Cannot find configured state backend: " + backendName);
          } catch (ClassCastException e) {
            throw new IllegalConfigurationException(
                "The class configured under '"
                    + ConfigConstants.STATE_BACKEND
                    + "' is not a valid state backend factory ("
                    + backendName
                    + ')');
          } catch (Throwable t) {
            throw new IllegalConfigurationException("Cannot create configured state backend", t);
          }
      }
    }
  }
  /**
   * This test verifies that open() and close() are correctly called. This test also verifies that
   * timestamps of emitted elements are correct. {@link CoStreamMap} assigns the input timestamp to
   * emitted elements.
   */
  @Test
  @SuppressWarnings("unchecked")
  public void testOpenCloseAndTimestamps() throws Exception {
    final TwoInputStreamTask<String, Integer, String> coMapTask =
        new TwoInputStreamTask<String, Integer, String>();
    final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness =
        new TwoInputStreamTaskTestHarness<String, Integer, String>(
            coMapTask,
            BasicTypeInfo.STRING_TYPE_INFO,
            BasicTypeInfo.INT_TYPE_INFO,
            BasicTypeInfo.STRING_TYPE_INFO);

    StreamConfig streamConfig = testHarness.getStreamConfig();
    CoStreamMap<String, Integer, String> coMapOperator =
        new CoStreamMap<String, Integer, String>(new TestOpenCloseMapFunction());
    streamConfig.setStreamOperator(coMapOperator);

    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();

    testHarness.invoke();

    testHarness.processElement(new StreamRecord<String>("Hello", initialTime + 1), 0, 0);
    expectedOutput.add(new StreamRecord<String>("Hello", initialTime + 1));

    // wait until the input is processed to ensure ordering of the output
    testHarness.waitForInputProcessing();

    testHarness.processElement(new StreamRecord<Integer>(1337, initialTime + 2), 1, 0);

    expectedOutput.add(new StreamRecord<String>("1337", initialTime + 2));

    testHarness.endInput();

    testHarness.waitForTaskCompletion();

    Assert.assertTrue(
        "RichFunction methods where not called.", TestOpenCloseMapFunction.closeCalled);

    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());
  }
Beispiel #7
0
  public static <IN, OUT, KEY> List<OUT> createAndExecuteForKeyedStream(
      OneInputStreamOperator<IN, OUT> operator,
      List<IN> inputs,
      KeySelector<IN, KEY> keySelector,
      TypeInformation<KEY> keyType) {

    MockContext<IN, OUT> mockContext = new MockContext<IN, OUT>(inputs);

    StreamConfig config = new StreamConfig(new Configuration());
    if (keySelector != null && keyType != null) {
      config.setStateKeySerializer(keyType.createSerializer(new ExecutionConfig()));
      config.setStatePartitioner(keySelector);
    }

    final ScheduledExecutorService timerService = Executors.newSingleThreadScheduledExecutor();
    final Object lock = new Object();
    final StreamTask<?, ?> mockTask = createMockTaskWithTimer(timerService, lock);

    operator.setup(mockTask, config, mockContext.output);
    try {
      operator.open();

      StreamRecord<IN> record = new StreamRecord<IN>(null);
      for (IN in : inputs) {
        record = record.replace(in);
        synchronized (lock) {
          operator.setKeyContextElement(record);
          operator.processElement(record);
        }
      }

      operator.close();
    } catch (Exception e) {
      throw new RuntimeException("Cannot invoke operator.", e);
    } finally {
      timerService.shutdownNow();
    }

    return mockContext.getOutputs();
  }
  @Override
  public void init() throws Exception {
    StreamConfig configuration = getConfiguration();

    TypeSerializer<IN> inSerializer = configuration.getTypeSerializerIn1(getUserCodeClassLoader());
    int numberOfInputs = configuration.getNumberOfInputs();

    if (numberOfInputs > 0) {
      InputGate[] inputGates = getEnvironment().getAllInputGates();
      inputProcessor =
          new StreamInputProcessor<IN>(
              inputGates,
              inSerializer,
              getCheckpointBarrierListener(),
              configuration.getCheckpointMode(),
              getEnvironment().getIOManager(),
              getExecutionConfig().areTimestampsEnabled());

      // make sure that stream tasks report their I/O statistics
      AccumulatorRegistry registry = getEnvironment().getAccumulatorRegistry();
      AccumulatorRegistry.Reporter reporter = registry.getReadWriteReporter();
      inputProcessor.setReporter(reporter);
    }
  }
Beispiel #9
0
  @Override
  public final void registerInputOutput() throws Exception {
    LOG.debug("registerInputOutput for {}", getName());

    boolean initializationCompleted = false;
    try {
      AccumulatorRegistry accumulatorRegistry = getEnvironment().getAccumulatorRegistry();

      userClassLoader = getUserCodeClassLoader();
      configuration = new StreamConfig(getTaskConfiguration());
      accumulatorMap = accumulatorRegistry.getUserMap();

      stateBackend = createStateBackend();
      stateBackend.initializeForJob(getEnvironment().getJobID());

      headOperator = configuration.getStreamOperator(userClassLoader);
      operatorChain =
          new OperatorChain<>(this, headOperator, accumulatorRegistry.getReadWriteReporter());

      if (headOperator != null) {
        headOperator.setup(this, configuration, operatorChain.getChainEntryPoint());
      }

      timerService =
          Executors.newSingleThreadScheduledExecutor(
              new DispatcherThreadFactory(TRIGGER_THREAD_GROUP, "Time Trigger for " + getName()));

      // task specific initialization
      init();

      initializationCompleted = true;
    } finally {
      if (!initializationCompleted) {
        if (timerService != null) {
          timerService.shutdownNow();
        }
        if (operatorChain != null) {
          operatorChain.releaseOutputs();
        }
      }
    }
  }
  @SuppressWarnings("unchecked")
  private void initializeOutput() {
    outputList = new ConcurrentLinkedQueue<Object>();

    mockEnv.addOutput(outputList, outputStreamRecordSerializer);

    streamConfig.setOutputSelectors(Collections.<OutputSelector<?>>emptyList());
    streamConfig.setNumberOfOutputs(1);

    StreamOperator<OUT> dummyOperator =
        new AbstractStreamOperator<OUT>() {
          private static final long serialVersionUID = 1L;
        };

    List<StreamEdge> outEdgesInOrder = new LinkedList<StreamEdge>();
    StreamNode sourceVertexDummy =
        new StreamNode(
            null,
            0,
            "group",
            dummyOperator,
            "source dummy",
            new LinkedList<OutputSelector<?>>(),
            SourceStreamTask.class);
    StreamNode targetVertexDummy =
        new StreamNode(
            null,
            1,
            "group",
            dummyOperator,
            "target dummy",
            new LinkedList<OutputSelector<?>>(),
            SourceStreamTask.class);

    outEdgesInOrder.add(
        new StreamEdge(
            sourceVertexDummy,
            targetVertexDummy,
            0,
            new LinkedList<String>(),
            new BroadcastPartitioner<Object>()));
    streamConfig.setOutEdgesInOrder(outEdgesInOrder);
    streamConfig.setNonChainedOutputs(outEdgesInOrder);
    streamConfig.setTypeSerializerOut(outputSerializer);
    streamConfig.setVertexID(0);
  }
  /**
   * This test verifies that checkpoint barriers and barrier buffers work correctly with concurrent
   * checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e. some inputs
   * receive barriers from an earlier checkpoint, thereby blocking, then all inputs receive barriers
   * from a later checkpoint.
   */
  @Test
  @SuppressWarnings("unchecked")
  public void testOvertakingCheckpointBarriers() throws Exception {
    final TwoInputStreamTask<String, Integer, String> coMapTask =
        new TwoInputStreamTask<String, Integer, String>();
    final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness =
        new TwoInputStreamTaskTestHarness<String, Integer, String>(
            coMapTask,
            2,
            2,
            new int[] {1, 2},
            BasicTypeInfo.STRING_TYPE_INFO,
            BasicTypeInfo.INT_TYPE_INFO,
            BasicTypeInfo.STRING_TYPE_INFO);

    StreamConfig streamConfig = testHarness.getStreamConfig();
    CoStreamMap<String, Integer, String> coMapOperator =
        new CoStreamMap<String, Integer, String>(new IdentityMap());
    streamConfig.setStreamOperator(coMapOperator);

    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
    long initialTime = 0L;

    testHarness.invoke();

    testHarness.processEvent(new CheckpointBarrier(0, 0), 0, 0);

    // These elements should be buffered until we receive barriers from
    // all inputs
    testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0);
    testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 0);

    // These elements should be forwarded, since we did not yet receive a checkpoint barrier
    // on that input, only add to same input, otherwise we would not know the ordering
    // of the output since the Task might read the inputs in any order
    testHarness.processElement(new StreamRecord<Integer>(42, initialTime), 1, 1);
    testHarness.processElement(new StreamRecord<Integer>(1337, initialTime), 1, 1);
    expectedOutput.add(new StreamRecord<String>("42", initialTime));
    expectedOutput.add(new StreamRecord<String>("1337", initialTime));

    testHarness.waitForInputProcessing();
    // we should not yet see the barrier, only the two elements from non-blocked input
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());

    // Now give a later barrier to all inputs, this should unblock the first channel,
    // thereby allowing the two blocked elements through
    testHarness.processEvent(new CheckpointBarrier(1, 1), 0, 0);
    testHarness.processEvent(new CheckpointBarrier(1, 1), 0, 1);
    testHarness.processEvent(new CheckpointBarrier(1, 1), 1, 0);
    testHarness.processEvent(new CheckpointBarrier(1, 1), 1, 1);

    expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime));
    expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime));
    expectedOutput.add(new CheckpointBarrier(1, 1));

    testHarness.waitForInputProcessing();

    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());

    // Then give the earlier barrier, these should be ignored
    testHarness.processEvent(new CheckpointBarrier(0, 0), 0, 1);
    testHarness.processEvent(new CheckpointBarrier(0, 0), 1, 0);
    testHarness.processEvent(new CheckpointBarrier(0, 0), 1, 1);

    testHarness.waitForInputProcessing();

    testHarness.endInput();

    testHarness.waitForTaskCompletion();

    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());
  }
  /** This test verifies that checkpoint barriers are correctly forwarded. */
  @Test
  @SuppressWarnings("unchecked")
  public void testCheckpointBarriers() throws Exception {
    final TwoInputStreamTask<String, Integer, String> coMapTask =
        new TwoInputStreamTask<String, Integer, String>();
    final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness =
        new TwoInputStreamTaskTestHarness<String, Integer, String>(
            coMapTask,
            2,
            2,
            new int[] {1, 2},
            BasicTypeInfo.STRING_TYPE_INFO,
            BasicTypeInfo.INT_TYPE_INFO,
            BasicTypeInfo.STRING_TYPE_INFO);

    StreamConfig streamConfig = testHarness.getStreamConfig();
    CoStreamMap<String, Integer, String> coMapOperator =
        new CoStreamMap<String, Integer, String>(new IdentityMap());
    streamConfig.setStreamOperator(coMapOperator);

    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
    long initialTime = 0L;

    testHarness.invoke();

    testHarness.processEvent(new CheckpointBarrier(0, 0), 0, 0);

    // This element should be buffered since we received a checkpoint barrier on
    // this input
    testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0);

    // This one should go through
    testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 1);
    expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime));

    // These elements should be forwarded, since we did not yet receive a checkpoint barrier
    // on that input, only add to same input, otherwise we would not know the ordering
    // of the output since the Task might read the inputs in any order
    testHarness.processElement(new StreamRecord<Integer>(11, initialTime), 1, 1);
    testHarness.processElement(new StreamRecord<Integer>(111, initialTime), 1, 1);
    expectedOutput.add(new StreamRecord<String>("11", initialTime));
    expectedOutput.add(new StreamRecord<String>("111", initialTime));

    testHarness.waitForInputProcessing();
    // we should not yet see the barrier, only the two elements from non-blocked input
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", testHarness.getOutput(), expectedOutput);

    testHarness.processEvent(new CheckpointBarrier(0, 0), 0, 1);
    testHarness.processEvent(new CheckpointBarrier(0, 0), 1, 0);
    testHarness.processEvent(new CheckpointBarrier(0, 0), 1, 1);

    testHarness.waitForInputProcessing();

    // now we should see the barrier and after that the buffered elements
    expectedOutput.add(new CheckpointBarrier(0, 0));
    expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime));
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", testHarness.getOutput(), expectedOutput);

    testHarness.endInput();

    testHarness.waitForTaskCompletion();

    List<String> resultElements = TestHarnessUtil.getRawElementsFromOutput(testHarness.getOutput());
    Assert.assertEquals(4, resultElements.size());
  }
  /**
   * This test verifies that watermarks are correctly forwarded. This also checks whether watermarks
   * are forwarded only when we have received watermarks from all inputs. The forwarded watermark
   * must be the minimum of the watermarks of all inputs.
   */
  @Test
  @SuppressWarnings("unchecked")
  public void testWatermarkForwarding() throws Exception {
    final TwoInputStreamTask<String, Integer, String> coMapTask =
        new TwoInputStreamTask<String, Integer, String>();
    final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness =
        new TwoInputStreamTaskTestHarness<String, Integer, String>(
            coMapTask,
            2,
            2,
            new int[] {1, 2},
            BasicTypeInfo.STRING_TYPE_INFO,
            BasicTypeInfo.INT_TYPE_INFO,
            BasicTypeInfo.STRING_TYPE_INFO);

    StreamConfig streamConfig = testHarness.getStreamConfig();
    CoStreamMap<String, Integer, String> coMapOperator =
        new CoStreamMap<String, Integer, String>(new IdentityMap());
    streamConfig.setStreamOperator(coMapOperator);

    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
    long initialTime = 0L;

    testHarness.invoke();

    testHarness.processElement(new Watermark(initialTime), 0, 0);
    testHarness.processElement(new Watermark(initialTime), 0, 1);

    testHarness.processElement(new Watermark(initialTime), 1, 0);

    // now the output should still be empty
    testHarness.waitForInputProcessing();
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());

    testHarness.processElement(new Watermark(initialTime), 1, 1);

    // now the watermark should have propagated, Map simply forward Watermarks
    testHarness.waitForInputProcessing();
    expectedOutput.add(new Watermark(initialTime));
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());

    // contrary to checkpoint barriers these elements are not blocked by watermarks
    testHarness.processElement(new StreamRecord<String>("Hello", initialTime), 0, 0);
    testHarness.processElement(new StreamRecord<Integer>(42, initialTime), 1, 1);
    expectedOutput.add(new StreamRecord<String>("Hello", initialTime));
    expectedOutput.add(new StreamRecord<String>("42", initialTime));

    testHarness.waitForInputProcessing();
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());

    testHarness.processElement(new Watermark(initialTime + 4), 0, 0);
    testHarness.processElement(new Watermark(initialTime + 3), 0, 1);
    testHarness.processElement(new Watermark(initialTime + 3), 1, 0);
    testHarness.processElement(new Watermark(initialTime + 2), 1, 1);

    // check whether we get the minimum of all the watermarks, this must also only occur in
    // the output after the two StreamRecords
    expectedOutput.add(new Watermark(initialTime + 2));
    testHarness.waitForInputProcessing();
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());

    // advance watermark from one of the inputs, now we should get a now one since the
    // minimum increases
    testHarness.processElement(new Watermark(initialTime + 4), 1, 1);
    testHarness.waitForInputProcessing();
    expectedOutput.add(new Watermark(initialTime + 3));
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());

    // advance the other two inputs, now we should get a new one since the
    // minimum increases again
    testHarness.processElement(new Watermark(initialTime + 4), 0, 1);
    testHarness.processElement(new Watermark(initialTime + 4), 1, 0);
    testHarness.waitForInputProcessing();
    expectedOutput.add(new Watermark(initialTime + 4));
    TestHarnessUtil.assertOutputEquals(
        "Output was not correct.", expectedOutput, testHarness.getOutput());

    testHarness.endInput();

    testHarness.waitForTaskCompletion();

    List<String> resultElements = TestHarnessUtil.getRawElementsFromOutput(testHarness.getOutput());
    Assert.assertEquals(2, resultElements.size());
  }