/**
   * Tests should be enabled only in local environments as they need running instance of webserver
   */
  @Test
  public void testUpdateCheckUser() throws Exception {
    Map<String, String> overlay = getUniqueOverlay();
    String tmpFileName = overlayParametersOverTemplate(PROCESS_TEMPLATE, overlay);
    Process process =
        (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));
    Validity processValidity = process.getClusters().getClusters().get(0).getValidity();
    processValidity.setEnd(new Date(new Date().getTime() + 2 * 24 * 60 * 60 * 1000));
    File tmpFile = getTempFile();
    EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
    scheduleProcess(tmpFile.getAbsolutePath(), overlay);
    waitForBundleStart(Status.RUNNING);

    List<BundleJob> bundles = getBundles();
    Assert.assertEquals(bundles.size(), 1);
    Assert.assertEquals(bundles.get(0).getUser(), REMOTE_USER);

    ClientResponse response =
        this.service
            .path("api/entities/definition/feed/" + outputFeedName)
            .header("Remote-User", REMOTE_USER)
            .accept(MediaType.TEXT_XML)
            .get(ClientResponse.class);
    Feed feed =
        (Feed)
            EntityType.FEED
                .getUnmarshaller()
                .unmarshal(new StringReader(response.getEntity(String.class)));

    // change output feed path and update feed as another user
    feed.getLocations()
        .getLocations()
        .get(0)
        .setPath("/falcon/test/output2/${YEAR}/${MONTH}/${DAY}");
    tmpFile = getTempFile();
    EntityType.FEED.getMarshaller().marshal(feed, tmpFile);
    response =
        this.service
            .path("api/entities/update/feed/" + outputFeedName)
            .header("Remote-User", "testuser")
            .accept(MediaType.TEXT_XML)
            .post(ClientResponse.class, getServletInputStream(tmpFile.getAbsolutePath()));
    assertSuccessful(response);

    bundles = getBundles();
    Assert.assertEquals(bundles.size(), 2);
    Assert.assertEquals(bundles.get(0).getUser(), REMOTE_USER);
    Assert.assertEquals(bundles.get(1).getUser(), REMOTE_USER);
  }
Ejemplo n.º 2
0
  public int run(WorkflowExecutionContext context) {
    try {
      OozieClient client = new OozieClient(context.getWorkflowEngineUrl());
      WorkflowJob jobInfo;
      try {
        jobInfo = client.getJobInfo(context.getUserSubflowId());
      } catch (OozieClientException e) {
        LOG.error("Error getting jobinfo for: {}", context.getUserSubflowId(), e);
        return 0;
      }

      // Assumption is - Each wf run will have a directory
      // the corresponding job logs are stored within the respective dir
      Path path =
          new Path(context.getLogDir() + "/" + String.format("%03d", context.getWorkflowRunId()));
      FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(path.toUri(), getConf());

      if (EntityType.FEED.name().equalsIgnoreCase(context.getEntityType())
          || notUserWorkflowEngineIsOozie(context.getUserWorkflowEngine())) {
        // if replication wf, retention wf or PIG Process
        copyOozieLog(client, fs, path, jobInfo.getId());

        List<WorkflowAction> workflowActions = jobInfo.getActions();
        for (int i = 0; i < workflowActions.size(); i++) {
          if (FALCON_ACTIONS.contains(workflowActions.get(i).getName())) {
            copyTTlogs(fs, path, jobInfo.getActions().get(i));
            break;
          }
        }
      } else {
        String flowId;
        // if process wf with pig, hive
        if (context.getUserWorkflowEngine().equals("pig")
            || context.getUserWorkflowEngine().equals("hive")) {
          flowId = jobInfo.getId();
        } else {
          // if process wf with oozie engine
          flowId = jobInfo.getExternalId();
        }
        copyOozieLog(client, fs, path, flowId);
        WorkflowJob subflowInfo = client.getJobInfo(flowId);
        List<WorkflowAction> actions = subflowInfo.getActions();
        for (WorkflowAction action : actions) {
          if (isActionTypeSupported(action)) {
            LOG.info(
                "Copying hadoop TT log for action: {} of type: {}",
                action.getName(),
                action.getType());
            copyTTlogs(fs, path, action);
          } else {
            LOG.info(
                "Ignoring hadoop TT log for non supported action: {} of type: {}",
                action.getName(),
                action.getType());
          }
        }
      }

    } catch (Exception e) {
      // JobLogMover doesn't throw exception, a failed log mover will not fail the user workflow
      LOG.error("Exception in log mover:", e);
    }
    return 0;
  }