Esempio n. 1
0
 @Override
 public void run() {
   try {
     workflow.getService("test").run();
   } catch (ExecutionException e) {
     e.printStackTrace();
   }
 }
Esempio n. 2
0
  @SuppressWarnings("deprecation")
  public void testSingleMethodPersistence() throws IOException, ParseException, CompilerError {
    String workflowStr = "workflow {\n" + "def job test() {\n" + "}\n" + "}\n";
    Workflow workflow =
        new SparkWorkflowParser(dbAccessor).loadWorkflow(new StringReader(workflowStr));
    MockBlockingTask execution = new MockBlockingTask();
    workflow.getJobs().get("test").addTask(execution);

    workflow.compile(mockJmxServer);

    assertTrue(!execution.runToPause);
    assertTrue(!execution.runToEnd);

    Thread parallelRun = new Thread(new WorkflowRunner(workflow));
    parallelRun.setDaemon(true);
    parallelRun.start();

    for (int i = 0; !execution.runToPause; i++) {
      if (i > 10000000) fail("Timeout waiting for blocking task to start");
      Thread.yield();
    }
    assertTrue(execution.runToPause);
    assertTrue(!execution.runToEnd);

    // I mean to do this. We're simulated an unexpected shutdown.
    parallelRun.stop();
    execution = null;
    workflow = null;
    parallelRun = null;

    workflow = new SparkWorkflowParser(dbAccessor).loadWorkflow(new StringReader(workflowStr));
    execution = new MockBlockingTask();
    workflow.getJobs().get("test").addTask(execution);

    workflow.compile(new JmxServer("dummy", 0));
    execution.unblock();
    // We need to call the launch services command here to restart any blocked jobs.
    // Since the test service did not complete it should launch automatically
    workflow.launchServices();

    for (int i = 0; !execution.runToEnd; i++) {
      if (i > 10000000) fail("Timeout waiting for blocking task to complete");
      Thread.yield();
    }
    assertTrue(execution.runToEnd);

    // Check that all contexts are cleared
    assertEquals(0, dbAccessor.getNumContexts());
  }