public void testConfigNotPropagation() throws Exception {
    Path subWorkflowAppPath = getFsTestCaseDir();
    FileSystem fs = getFileSystem();
    Writer writer = new OutputStreamWriter(fs.create(new Path(subWorkflowAppPath, "workflow.xml")));
    writer.write(APP1);
    writer.close();

    XConfiguration protoConf = getBaseProtoConf();
    WorkflowJobBean workflow = createBaseWorkflow(protoConf, "W");
    String defaultConf = workflow.getConf();
    XConfiguration newConf = new XConfiguration(new StringReader(defaultConf));
    newConf.set("abc", "xyz");
    workflow.setConf(newConf.toXmlString());

    final WorkflowActionBean action = (WorkflowActionBean) workflow.getActions().get(0);
    action.setConf(
        "<sub-workflow xmlns='uri:oozie:workflow:0.1' name='subwf'>"
            + "      <app-path>"
            + subWorkflowAppPath
            + File.separator
            + "workflow.xml"
            + "</app-path>"
            + "      <configuration>"
            + "        <property>"
            + "          <name>a</name>"
            + "          <value>A</value>"
            + "        </property>"
            + "      </configuration>"
            + "</sub-workflow>");

    SubWorkflowActionExecutor subWorkflow = new SubWorkflowActionExecutor();
    subWorkflow.start(new Context(workflow, action), action);

    final OozieClient oozieClient =
        subWorkflow.getWorkflowClient(
            new Context(workflow, action), SubWorkflowActionExecutor.LOCAL);
    waitFor(
        JOB_TIMEOUT,
        new Predicate() {
          public boolean evaluate() throws Exception {
            return oozieClient.getJobInfo(action.getExternalId()).getStatus()
                == WorkflowJob.Status.SUCCEEDED;
          }
        });

    assertEquals(
        WorkflowJob.Status.SUCCEEDED, oozieClient.getJobInfo(action.getExternalId()).getStatus());

    subWorkflow.check(new Context(workflow, action), action);

    assertEquals(WorkflowAction.Status.DONE, action.getStatus());

    subWorkflow.end(new Context(workflow, action), action);

    assertEquals(WorkflowAction.Status.OK, action.getStatus());

    WorkflowJob wf = oozieClient.getJobInfo(action.getExternalId());
    Configuration childConf = new XConfiguration(new StringReader(wf.getConf()));
    assertNull(childConf.get("abc"));
  }
Пример #2
0
 protected FileSystem getAppFileSystem(WorkflowJob workflow)
     throws HadoopAccessorException, IOException, URISyntaxException {
   URI uri = new URI(workflow.getAppPath());
   HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
   Configuration fsConf = has.createJobConf(uri.getAuthority());
   return has.createFileSystem(workflow.getUser(), uri, fsConf);
 }
Пример #3
0
 @Override
 protected Void execute() throws CommandException {
   LOG.debug("STARTED WFEndXCommand " + job.getId());
   deleteWFDir();
   LOG.debug("ENDED WFEndXCommand " + job.getId());
   return null;
 }
  @Test(enabled = false)
  public void testTableReplicationWithExistingTargetPartition() throws Exception {
    final String feedName = "customer-table-replicating-feed";
    final Map<String, String> overlay = sourceContext.getUniqueOverlay();
    String filePath =
        TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
    Assert.assertEquals(
        TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);

    filePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
    Assert.assertEquals(
        TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);

    HCatPartition sourcePartition =
        HiveTestUtils.getPartition(
            sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME, "ds", PARTITION_VALUE);
    Assert.assertNotNull(sourcePartition);

    addPartitionToTarget();
    // verify if the partition on the target exists before replication starts
    // to see import drops partition before importing partition
    HCatPartition targetPartition =
        HiveTestUtils.getPartition(
            targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME, "ds", PARTITION_VALUE);
    Assert.assertNotNull(targetPartition);

    filePath =
        TestContext.overlayParametersOverTemplate(
            "/table/customer-table-replicating-feed.xml", overlay);
    Assert.assertEquals(
        TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath), 0);

    // wait until the workflow job completes
    WorkflowJob jobInfo =
        OozieTestUtils.getWorkflowJob(
            targetContext.getCluster().getCluster(),
            OozieClient.FILTER_NAME + "=FALCON_FEED_REPLICATION_" + feedName);
    Assert.assertEquals(jobInfo.getStatus(), WorkflowJob.Status.SUCCEEDED);

    // verify if the partition on the target exists
    targetPartition =
        HiveTestUtils.getPartition(
            targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME, "ds", PARTITION_VALUE);
    Assert.assertNotNull(targetPartition);

    InstancesResult response =
        targetContext
            .getService()
            .path("api/instance/running/feed/" + feedName)
            .header("Cookie", targetContext.getAuthenticationToken())
            .accept(MediaType.APPLICATION_JSON)
            .get(InstancesResult.class);
    Assert.assertEquals(response.getStatus(), APIResult.Status.SUCCEEDED);

    TestContext.executeWithURL("entity -delete -type feed -name customer-table-replicating-feed");
    TestContext.executeWithURL("entity -delete -type cluster -name primary-cluster");
    TestContext.executeWithURL("entity -delete -type cluster -name bcp-cluster");
  }
  public void testSubworkflowLib() throws Exception {
    XConfiguration protoConf = getBaseProtoConf();
    WorkflowJobBean workflow = createBaseWorkflow(protoConf, "W");
    FileSystem fs = getFileSystem();
    Path parentLibJar = new Path(getFsTestCaseDir(), "lib/parentLibrary.jar");
    fs.create(parentLibJar);
    assertTrue(fs.exists(parentLibJar));
    String defaultConf = workflow.getConf();
    XConfiguration newConf = new XConfiguration(new StringReader(defaultConf));
    newConf.set(OozieClient.LIBPATH, parentLibJar.getParent().toString());
    workflow.setConf(newConf.toXmlString());

    Path subWorkflowAppPath = new Path(getFsTestCaseDir().toString(), "subwf");
    Writer writer = new OutputStreamWriter(fs.create(new Path(subWorkflowAppPath, "workflow.xml")));
    writer.write(APP1);
    writer.close();
    Path subwfLibJar = new Path(subWorkflowAppPath, "lib/subwfLibrary.jar");
    fs.create(subwfLibJar);
    assertTrue(fs.exists(subwfLibJar));

    final WorkflowActionBean action = (WorkflowActionBean) workflow.getActions().get(0);
    action.setConf(
        "<sub-workflow xmlns='uri:oozie:workflow:0.1' name='subwf'>"
            + "      <app-path>"
            + subWorkflowAppPath
            + File.separator
            + "workflow.xml"
            + "</app-path>"
            + "</sub-workflow>");
    SubWorkflowActionExecutor subWorkflow = new SubWorkflowActionExecutor();
    subWorkflow.start(new Context(workflow, action), action);

    final OozieClient oozieClient =
        subWorkflow.getWorkflowClient(
            new Context(workflow, action), SubWorkflowActionExecutor.LOCAL);
    waitFor(
        JOB_TIMEOUT,
        new Predicate() {
          public boolean evaluate() throws Exception {
            return oozieClient.getJobInfo(action.getExternalId()).getStatus()
                == WorkflowJob.Status.SUCCEEDED;
          }
        });

    assertEquals(
        WorkflowJob.Status.SUCCEEDED, oozieClient.getJobInfo(action.getExternalId()).getStatus());
    subWorkflow.check(new Context(workflow, action), action);
    assertEquals(WorkflowAction.Status.DONE, action.getStatus());
    subWorkflow.end(new Context(workflow, action), action);
    assertEquals(WorkflowAction.Status.OK, action.getStatus());

    WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
    WorkflowJob wf = oozieClient.getJobInfo(action.getExternalId());
    Configuration childConf = new XConfiguration(new StringReader(wf.getConf()));
    childConf = wps.createProtoActionConf(childConf, "authToken", true);
    assertEquals(childConf.get(WorkflowAppService.APP_LIB_PATH_LIST), subwfLibJar.toString());
  }
Пример #6
0
 private void deleteWFDir() throws CommandException {
   FileSystem fs;
   try {
     fs = getAppFileSystem(job);
     String wfDir = Services.get().getSystemId() + "/" + job.getId();
     Path wfDirPath = new Path(fs.getHomeDirectory(), wfDir);
     LOG.debug("WF tmp dir :" + wfDirPath);
     if (fs.exists(wfDirPath)) {
       fs.delete(wfDirPath, true);
     } else {
       LOG.debug("Tmp dir doesn't exist :" + wfDirPath);
     }
   } catch (Exception e) {
     LOG.error("Unable to delete WF temp dir of wf id :" + job.getId(), e);
     throw new CommandException(ErrorCode.E0819, job.getId(), e);
   }
 }
  public void testGetGroupFromParent() throws Exception {
    Path subWorkflowAppPath = getFsTestCaseDir();
    FileSystem fs = getFileSystem();
    Writer writer = new OutputStreamWriter(fs.create(new Path(subWorkflowAppPath, "workflow.xml")));
    writer.write(APP1);
    writer.close();

    XConfiguration protoConf = getBaseProtoConf();
    final WorkflowJobBean workflow = createBaseWorkflow(protoConf, "W");
    String defaultConf = workflow.getConf();
    XConfiguration newConf = new XConfiguration(new StringReader(defaultConf));
    String actionConf =
        "<sub-workflow xmlns='uri:oozie:workflow:0.1' name='subwf'>"
            + "      <app-path>"
            + subWorkflowAppPath
            + File.separator
            + "workflow.xml"
            + "</app-path>"
            + "      <configuration>"
            + "        <property>"
            + "          <name>a</name>"
            + "          <value>A</value>"
            + "        </property>"
            + "      </configuration>"
            + "</sub-workflow>";

    final WorkflowActionBean action = (WorkflowActionBean) workflow.getActions().get(0);
    action.setConf(actionConf);

    // negative test
    final SubWorkflowActionExecutor subWorkflow = new SubWorkflowActionExecutor();
    workflow.setConf(newConf.toXmlString());

    subWorkflow.start(new Context(workflow, action), action);

    OozieClient oozieClient =
        subWorkflow.getWorkflowClient(
            new Context(workflow, action), SubWorkflowActionExecutor.LOCAL);
    waitFor(
        5000,
        new Predicate() {
          @Override
          public boolean evaluate() throws Exception {
            subWorkflow.check(new Context(workflow, action), action);
            return action.getStatus() == WorkflowActionBean.Status.DONE;
          }
        });

    subWorkflow.check(new Context(workflow, action), action);
    subWorkflow.end(new Context(workflow, action), action);

    assertEquals(WorkflowAction.Status.OK, action.getStatus());

    WorkflowJob wf = oozieClient.getJobInfo(action.getExternalId());
    Configuration childConf = new XConfiguration(new StringReader(wf.getConf()));

    assertFalse(getTestGroup() == childConf.get(OozieClient.GROUP_NAME));

    // positive test
    newConf.set(OozieClient.GROUP_NAME, getTestGroup());
    workflow.setConf(newConf.toXmlString());
    final WorkflowActionBean action1 = new WorkflowActionBean();
    action1.setConf(actionConf);
    action1.setId("W1");

    subWorkflow.start(new Context(workflow, action1), action1);

    oozieClient =
        subWorkflow.getWorkflowClient(
            new Context(workflow, action1), SubWorkflowActionExecutor.LOCAL);

    waitFor(
        5000,
        new Predicate() {
          @Override
          public boolean evaluate() throws Exception {
            subWorkflow.check(new Context(workflow, action1), action1);
            return action1.getStatus() == WorkflowActionBean.Status.DONE;
          }
        });

    subWorkflow.check(new Context(workflow, action1), action1);
    subWorkflow.end(new Context(workflow, action1), action1);

    wf = oozieClient.getJobInfo(action1.getExternalId());
    childConf = new XConfiguration(new StringReader(wf.getConf()));
    assertEquals(getTestGroup(), childConf.get(OozieClient.GROUP_NAME));
  }
Пример #8
0
 @Override
 public String getEntityKey() {
   return job.getId();
 }
Пример #9
0
  public int run(WorkflowExecutionContext context) {
    try {
      OozieClient client = new OozieClient(context.getWorkflowEngineUrl());
      WorkflowJob jobInfo;
      try {
        jobInfo = client.getJobInfo(context.getUserSubflowId());
      } catch (OozieClientException e) {
        LOG.error("Error getting jobinfo for: {}", context.getUserSubflowId(), e);
        return 0;
      }

      // Assumption is - Each wf run will have a directory
      // the corresponding job logs are stored within the respective dir
      Path path =
          new Path(context.getLogDir() + "/" + String.format("%03d", context.getWorkflowRunId()));
      FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(path.toUri(), getConf());

      if (EntityType.FEED.name().equalsIgnoreCase(context.getEntityType())
          || notUserWorkflowEngineIsOozie(context.getUserWorkflowEngine())) {
        // if replication wf, retention wf or PIG Process
        copyOozieLog(client, fs, path, jobInfo.getId());

        List<WorkflowAction> workflowActions = jobInfo.getActions();
        for (int i = 0; i < workflowActions.size(); i++) {
          if (FALCON_ACTIONS.contains(workflowActions.get(i).getName())) {
            copyTTlogs(fs, path, jobInfo.getActions().get(i));
            break;
          }
        }
      } else {
        String flowId;
        // if process wf with pig, hive
        if (context.getUserWorkflowEngine().equals("pig")
            || context.getUserWorkflowEngine().equals("hive")) {
          flowId = jobInfo.getId();
        } else {
          // if process wf with oozie engine
          flowId = jobInfo.getExternalId();
        }
        copyOozieLog(client, fs, path, flowId);
        WorkflowJob subflowInfo = client.getJobInfo(flowId);
        List<WorkflowAction> actions = subflowInfo.getActions();
        for (WorkflowAction action : actions) {
          if (isActionTypeSupported(action)) {
            LOG.info(
                "Copying hadoop TT log for action: {} of type: {}",
                action.getName(),
                action.getType());
            copyTTlogs(fs, path, action);
          } else {
            LOG.info(
                "Ignoring hadoop TT log for non supported action: {} of type: {}",
                action.getName(),
                action.getType());
          }
        }
      }

    } catch (Exception e) {
      // JobLogMover doesn't throw exception, a failed log mover will not fail the user workflow
      LOG.error("Exception in log mover:", e);
    }
    return 0;
  }