Exemple #1
0
 public static void createDir(final Path dirPath) throws FalconException {
   FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(dirPath.toUri());
   try {
     if (!fs.exists(dirPath)) {
       LOG.info("Creating directory: {}", dirPath);
       HadoopClientFactory.mkdirsWithDefaultPerms(fs, dirPath);
     }
   } catch (IOException e) {
     throw new FalconException("Error creating directory: " + dirPath, e);
   }
 }
  protected Path marshal(
      Cluster cluster, JAXBElement<?> jaxbElement, JAXBContext jaxbContext, Path outPath)
      throws FalconException {
    try {
      Marshaller marshaller = jaxbContext.createMarshaller();
      marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);

      if (LOG.isDebugEnabled()) {
        StringWriter writer = new StringWriter();
        marshaller.marshal(jaxbElement, writer);
        LOG.debug("Writing definition to {} on cluster {}", outPath, cluster.getName());
        LOG.debug(writer.getBuffer().toString());
      }

      FileSystem fs =
          HadoopClientFactory.get()
              .createProxiedFileSystem(outPath.toUri(), ClusterHelper.getConfiguration(cluster));
      OutputStream out = fs.create(outPath);
      try {
        marshaller.marshal(jaxbElement, out);
      } finally {
        out.close();
      }

      LOG.info("Marshalled {} to {}", jaxbElement.getDeclaredType(), outPath);
      return outPath;
    } catch (Exception e) {
      throw new FalconException("Unable to marshall app object", e);
    }
  }
Exemple #3
0
 public static void removeDir(final Path dirPath) throws FalconException {
   FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(dirPath.toUri());
   try {
     fs.delete(dirPath, true);
   } catch (IOException e) {
     throw new FalconException("Error creating directory: " + dirPath, e);
   }
 }
Exemple #4
0
 public static String createFile(final Path path, final String content) throws FalconException {
   OutputStream out = null;
   try {
     FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(path.toUri());
     out = fs.create(path);
     out.write(content.getBytes());
   } catch (IOException e) {
     throw new FalconException("Error preparing script file: " + path, e);
   } finally {
     IOUtils.closeQuietly(out);
   }
   return path.toString();
 }
 protected void copySharedLibs(Cluster cluster, Path libPath) throws FalconException {
   try {
     FileSystem fs =
         HadoopClientFactory.get()
             .createProxiedFileSystem(libPath.toUri(), ClusterHelper.getConfiguration(cluster));
     SharedLibraryHostingService.pushLibsToHDFS(
         fs,
         StartupProperties.get().getProperty("system.lib.location"),
         libPath,
         FALCON_JAR_FILTER);
   } catch (IOException e) {
     throw new FalconException("Failed to copy shared libs on cluster " + cluster.getName(), e);
   }
 }
Exemple #6
0
 public static String readHDFSFile(final String filePath, final String fileName)
     throws URISyntaxException, FalconException {
   BufferedReader br = null;
   try {
     Path path = new Path(filePath, fileName);
     FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(path.toUri());
     br = new BufferedReader(new InputStreamReader(fs.open(path)));
     StringBuilder fileContent = new StringBuilder();
     String line;
     while (true) {
       line = br.readLine();
       if (line == null) {
         break;
       }
       fileContent.append(line);
     }
     return fileContent.toString();
   } catch (IOException e) {
     throw new FalconException(
         "Error reading file from hdfs: " + filePath + fileName + ": " + e.toString(), e);
   } finally {
     IOUtils.closeQuietly(br);
   }
 }
Exemple #7
0
  public int run(WorkflowExecutionContext context) {
    try {
      OozieClient client = new OozieClient(context.getWorkflowEngineUrl());
      WorkflowJob jobInfo;
      try {
        jobInfo = client.getJobInfo(context.getUserSubflowId());
      } catch (OozieClientException e) {
        LOG.error("Error getting jobinfo for: {}", context.getUserSubflowId(), e);
        return 0;
      }

      // Assumption is - Each wf run will have a directory
      // the corresponding job logs are stored within the respective dir
      Path path =
          new Path(context.getLogDir() + "/" + String.format("%03d", context.getWorkflowRunId()));
      FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(path.toUri(), getConf());

      if (EntityType.FEED.name().equalsIgnoreCase(context.getEntityType())
          || notUserWorkflowEngineIsOozie(context.getUserWorkflowEngine())) {
        // if replication wf, retention wf or PIG Process
        copyOozieLog(client, fs, path, jobInfo.getId());

        List<WorkflowAction> workflowActions = jobInfo.getActions();
        for (int i = 0; i < workflowActions.size(); i++) {
          if (FALCON_ACTIONS.contains(workflowActions.get(i).getName())) {
            copyTTlogs(fs, path, jobInfo.getActions().get(i));
            break;
          }
        }
      } else {
        String flowId;
        // if process wf with pig, hive
        if (context.getUserWorkflowEngine().equals("pig")
            || context.getUserWorkflowEngine().equals("hive")) {
          flowId = jobInfo.getId();
        } else {
          // if process wf with oozie engine
          flowId = jobInfo.getExternalId();
        }
        copyOozieLog(client, fs, path, flowId);
        WorkflowJob subflowInfo = client.getJobInfo(flowId);
        List<WorkflowAction> actions = subflowInfo.getActions();
        for (WorkflowAction action : actions) {
          if (isActionTypeSupported(action)) {
            LOG.info(
                "Copying hadoop TT log for action: {} of type: {}",
                action.getName(),
                action.getType());
            copyTTlogs(fs, path, action);
          } else {
            LOG.info(
                "Ignoring hadoop TT log for non supported action: {} of type: {}",
                action.getName(),
                action.getType());
          }
        }
      }

    } catch (Exception e) {
      // JobLogMover doesn't throw exception, a failed log mover will not fail the user workflow
      LOG.error("Exception in log mover:", e);
    }
    return 0;
  }