private RunningJob submitAction(Context context, Namespace ns) throws Exception {
    Hive2ActionExecutor ae = new Hive2ActionExecutor();

    WorkflowAction action = context.getAction();

    ae.prepareActionDir(getFileSystem(), context);
    ae.submitLauncher(getFileSystem(), context, action);

    String jobId = action.getExternalId();
    String jobTracker = action.getTrackerUri();
    String consoleUrl = action.getConsoleUrl();
    assertNotNull(jobId);
    assertNotNull(jobTracker);
    assertNotNull(consoleUrl);
    Element e = XmlUtils.parseXml(action.getConf());
    XConfiguration conf =
        new XConfiguration(
            new StringReader(XmlUtils.prettyPrint(e.getChild("configuration", ns)).toString()));
    conf.set("mapred.job.tracker", e.getChildTextTrim("job-tracker", ns));
    conf.set("fs.default.name", e.getChildTextTrim("name-node", ns));
    conf.set("user.name", context.getProtoActionConf().get("user.name"));
    conf.set("group.name", getTestGroup());

    JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(jobTracker);
    XConfiguration.copy(conf, jobConf);
    String user = jobConf.get("user.name");
    JobClient jobClient =
        Services.get().get(HadoopAccessorService.class).createJobClient(user, jobConf);
    final RunningJob runningJob = jobClient.getJob(JobID.forName(jobId));
    assertNotNull(runningJob);
    return runningJob;
  }
Exemple #2
0
 @Override
 public void registerForNotification(URI uri, Configuration conf, String user, String actionID)
     throws URIHandlerException {
   HCatURI hcatURI;
   try {
     hcatURI = new HCatURI(uri);
   } catch (URISyntaxException e) {
     throw new URIHandlerException(ErrorCode.E0906, uri, e);
   }
   HCatAccessorService hcatService = Services.get().get(HCatAccessorService.class);
   if (!hcatService.isRegisteredForNotification(hcatURI)) {
     HCatClient client = getHCatClient(uri, conf, user);
     try {
       String topic = client.getMessageBusTopicName(hcatURI.getDb(), hcatURI.getTable());
       if (topic == null) {
         return;
       }
       hcatService.registerForNotification(
           hcatURI, topic, new HCatMessageHandler(uri.getAuthority()));
     } catch (HCatException e) {
       throw new HCatAccessorException(ErrorCode.E1501, e);
     } finally {
       closeQuietly(client, true);
     }
   }
   PartitionDependencyManagerService pdmService =
       Services.get().get(PartitionDependencyManagerService.class);
   pdmService.addMissingDependency(hcatURI, actionID);
 }
Exemple #3
0
  /**
   * Get coord job info
   *
   * @param request servlet request
   * @param response servlet response
   * @return JsonBean CoordinatorJobBean
   * @throws XServletException
   * @throws BaseEngineException
   */
  private JsonBean getCoordinatorJob(HttpServletRequest request, HttpServletResponse response)
      throws XServletException, BaseEngineException {
    JsonBean jobBean = null;
    CoordinatorEngine coordEngine =
        Services.get()
            .get(CoordinatorEngineService.class)
            .getCoordinatorEngine(getUser(request), getAuthToken(request));
    String jobId = getResourceName(request);
    String startStr = request.getParameter(RestConstants.OFFSET_PARAM);
    String lenStr = request.getParameter(RestConstants.LEN_PARAM);
    String filter = request.getParameter(RestConstants.JOB_FILTER_PARAM);
    int start = (startStr != null) ? Integer.parseInt(startStr) : 1;
    start = (start < 1) ? 1 : start;
    // Get default number of coordinator actions to be retrieved
    int defaultLen = Services.get().getConf().getInt(COORD_ACTIONS_DEFAULT_LENGTH, 1000);
    int len = (lenStr != null) ? Integer.parseInt(lenStr) : 0;
    len = (len < 1) ? defaultLen : len;
    try {
      JsonCoordinatorJob coordJob = coordEngine.getCoordJob(jobId, filter, start, len);
      jobBean = coordJob;
    } catch (CoordinatorEngineException ex) {
      throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ex);
    }

    return jobBean;
  }
  public void testActionCheck() throws Exception {
    JPAService jpaService = Services.get().get(JPAService.class);
    WorkflowJobBean job =
        this.addRecordToWfJobTable(WorkflowJob.Status.RUNNING, WorkflowInstance.Status.RUNNING);
    WorkflowActionBean action =
        this.addRecordToWfActionTable(job.getId(), "1", WorkflowAction.Status.PREP);
    WorkflowActionGetJPAExecutor wfActionGetCmd = new WorkflowActionGetJPAExecutor(action.getId());

    new ActionStartXCommand(action.getId(), "map-reduce").call();
    action = jpaService.execute(wfActionGetCmd);

    ActionExecutorContext context =
        new ActionXCommand.ActionExecutorContext(job, action, false, false);
    MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
    JobConf conf =
        actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action.getConf()));
    String user = conf.get("user.name");
    JobClient jobClient =
        Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);

    String launcherId = action.getExternalId();

    final RunningJob launcherJob = jobClient.getJob(JobID.forName(launcherId));

    waitFor(
        120 * 1000,
        new Predicate() {
          public boolean evaluate() throws Exception {
            return launcherJob.isComplete();
          }
        });
    assertTrue(launcherJob.isSuccessful());
    Map<String, String> actionData =
        LauncherMapperHelper.getActionData(getFileSystem(), context.getActionDir(), conf);
    assertTrue(LauncherMapperHelper.hasIdSwap(actionData));

    new ActionCheckXCommand(action.getId()).call();
    action = jpaService.execute(wfActionGetCmd);
    String mapperId = action.getExternalId();
    String childId = action.getExternalChildIDs();

    assertTrue(launcherId.equals(mapperId));

    final RunningJob mrJob = jobClient.getJob(JobID.forName(childId));

    waitFor(
        120 * 1000,
        new Predicate() {
          public boolean evaluate() throws Exception {
            return mrJob.isComplete();
          }
        });
    assertTrue(mrJob.isSuccessful());

    new ActionCheckXCommand(action.getId()).call();
    action = jpaService.execute(wfActionGetCmd);

    assertEquals("SUCCEEDED", action.getExternalStatus());
  }
Exemple #5
0
  /**
   * Provides functionality to test for set*Data calls not being made by the Action Handler.
   *
   * @param avoidParam set*Data function call to avoid.
   * @param expActionErrorCode the expected action error code.
   * @throws Exception
   */
  private void _testDataNotSet(String avoidParam, String expActionErrorCode) throws Exception {
    String workflowPath = getTestCaseFileUri("workflow.xml");
    Reader reader = IOUtils.getResourceAsReader("wf-ext-schema-valid.xml", -1);
    Writer writer = new FileWriter(new File(getTestCaseDir(), "workflow.xml"));
    IOUtils.copyCharStream(reader, writer);

    final DagEngine engine = new DagEngine("u");
    Configuration conf = new XConfiguration();
    conf.set(OozieClient.APP_PATH, workflowPath);
    conf.set(OozieClient.USER_NAME, getTestUser());

    conf.set(OozieClient.LOG_TOKEN, "t");
    conf.set("external-status", "ok");
    conf.set("signal-value", "based_on_action_status");
    conf.set(avoidParam, "true");

    final String jobId = engine.submitJob(conf, true);

    final WorkflowStore store = Services.get().get(WorkflowStoreService.class).create();
    store.beginTrx();
    Thread.sleep(2000);

    waitFor(
        5000,
        new Predicate() {
          public boolean evaluate() throws Exception {
            WorkflowJobBean bean = store.getWorkflow(jobId, false);
            return (bean.getWorkflowInstance().getStatus() == WorkflowInstance.Status.FAILED);
          }
        });
    store.commitTrx();
    store.closeTrx();

    final WorkflowStore store2 = Services.get().get(WorkflowStoreService.class).create();
    store2.beginTrx();
    assertEquals(
        WorkflowInstance.Status.FAILED,
        store2.getWorkflow(jobId, false).getWorkflowInstance().getStatus());
    assertEquals(WorkflowJob.Status.FAILED, engine.getJob(jobId).getStatus());

    List<WorkflowActionBean> actions = store2.getActionsForWorkflow(jobId, false);
    WorkflowActionBean action = null;
    for (WorkflowActionBean bean : actions) {
      if (bean.getType().equals("test")) {
        action = bean;
        break;
      }
    }
    assertNotNull(action);
    assertEquals(expActionErrorCode, action.getErrorCode());
    store2.commitTrx();
    store2.closeTrx();
  }
Exemple #6
0
 /**
  * Retrieve registration event
  *
  * @param jobId the jobId
  * @throws CommandException
  * @throws JPAExecutorException
  */
 public static void updateRegistrationEvent(String jobId)
     throws CommandException, JPAExecutorException {
   JPAService jpaService = Services.get().get(JPAService.class);
   SLAService slaService = Services.get().get(SLAService.class);
   try {
     SLARegistrationBean reg =
         SLARegistrationQueryExecutor.getInstance().get(SLARegQuery.GET_SLA_REG_ALL, jobId);
     if (reg != null) { // handle coord rerun with different config without sla
       slaService.updateRegistrationEvent(reg);
     }
   } catch (ServiceException e) {
     throw new CommandException(ErrorCode.E1007, " id " + jobId, e.getMessage(), e);
   }
 }
 @Override
 protected void setUp() throws Exception {
   super.setUp();
   services = new Services();
   services.init();
   jpaService = Services.get().get(JPAService.class);
 }
Exemple #8
0
  /**
   * Start LocalOozie.
   *
   * @throws Exception if LocalOozie could not be started.
   */
  public static synchronized void start() throws Exception {
    if (localOozieActive) {
      throw new IllegalStateException("LocalOozie is already initialized");
    }

    String log4jFile = System.getProperty(XLogService.LOG4J_FILE_ENV, null);
    String oozieLocalLog = System.getProperty("oozielocal.log", null);
    if (log4jFile == null) {
      System.setProperty(XLogService.LOG4J_FILE_ENV, "localoozie-log4j.properties");
    }
    if (oozieLocalLog == null) {
      System.setProperty("oozielocal.log", "./oozielocal.log");
    }

    localOozieActive = true;
    new Services().init();

    if (log4jFile != null) {
      System.setProperty(XLogService.LOG4J_FILE_ENV, log4jFile);
    } else {
      System.getProperties().remove(XLogService.LOG4J_FILE_ENV);
    }
    if (oozieLocalLog != null) {
      System.setProperty("oozielocal.log", oozieLocalLog);
    } else {
      System.getProperties().remove("oozielocal.log");
    }

    container = new EmbeddedServletContainer("oozie");
    container.addServletEndpoint("/callback", CallbackServlet.class);
    container.start();
    String callbackUrl = container.getServletURL("/callback");
    Services.get().getConf().set(CallbackService.CONF_BASE_URL, callbackUrl);
    XLog.getLog(LocalOozie.class).info("LocalOozie started callback set to [{0}]", callbackUrl);
  }
  private void checkCoordActions(String jobId, int number, CoordinatorJob.Status status) {
    try {
      JPAService jpaService = Services.get().get(JPAService.class);
      List<CoordinatorActionBean> actions =
          jpaService.execute(new CoordJobGetActionsJPAExecutor(jobId));
      if (actions.size() != number) {
        fail(
            "Should have "
                + number
                + " actions created for job "
                + jobId
                + ", but jave "
                + actions.size()
                + " actions.");
      }

      if (status != null) {
        CoordinatorJob job = jpaService.execute(new CoordJobGetJPAExecutor(jobId));
        if (job.getStatus() != status) {
          fail("Job status " + job.getStatus() + " should be " + status);
        }
      }
    } catch (JPAExecutorException se) {
      se.printStackTrace();
      fail("Job ID " + jobId + " was not stored properly in db");
    }
  }
 @Test
 public void testListenerConfigured() throws Exception {
   EventHandlerService ehs = services.get(EventHandlerService.class);
   assertNotNull(ehs);
   assertTrue(SLAService.isEnabled());
   assertTrue(ehs.listEventListeners().contains(SLAJobEventListener.class.getCanonicalName()));
 }
Exemple #11
0
  /** v1 service implementation to submit a workflow job */
  @SuppressWarnings("unchecked")
  private JSONObject submitWorkflowJob(HttpServletRequest request, Configuration conf)
      throws XServletException {

    JSONObject json = new JSONObject();

    try {
      String action = request.getParameter(RestConstants.ACTION_PARAM);
      if (action != null && !action.equals(RestConstants.JOB_ACTION_START)) {
        throw new XServletException(
            HttpServletResponse.SC_BAD_REQUEST,
            ErrorCode.E0303,
            RestConstants.ACTION_PARAM,
            action);
      }
      boolean startJob = (action != null);
      String user = conf.get(OozieClient.USER_NAME);
      DagEngine dagEngine =
          Services.get().get(DagEngineService.class).getDagEngine(user, getAuthToken(request));
      String id = dagEngine.submitJob(conf, startJob);
      json.put(JsonTags.JOB_ID, id);
    } catch (DagEngineException ex) {
      throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ex);
    }

    return json;
  }
Exemple #12
0
  @SuppressWarnings("unchecked")
  private JSONObject getBundleJobs(HttpServletRequest request) throws XServletException {
    JSONObject json = new JSONObject();
    try {
      String filter = request.getParameter(RestConstants.JOBS_FILTER_PARAM);
      String startStr = request.getParameter(RestConstants.OFFSET_PARAM);
      String lenStr = request.getParameter(RestConstants.LEN_PARAM);
      int start = (startStr != null) ? Integer.parseInt(startStr) : 1;
      start = (start < 1) ? 1 : start;
      int len = (lenStr != null) ? Integer.parseInt(lenStr) : 50;
      len = (len < 1) ? 50 : len;

      BundleEngine bundleEngine =
          Services.get()
              .get(BundleEngineService.class)
              .getBundleEngine(getUser(request), getAuthToken(request));
      BundleJobInfo jobs = bundleEngine.getBundleJobs(filter, start, len);
      List<BundleJobBean> jsonJobs = jobs.getBundleJobs();

      json.put(JsonTags.BUNDLE_JOBS, BundleJobBean.toJSONArray(jsonJobs));
      json.put(JsonTags.BUNDLE_JOB_TOTAL, jobs.getTotal());
      json.put(JsonTags.BUNDLE_JOB_OFFSET, jobs.getStart());
      json.put(JsonTags.BUNDLE_JOB_LEN, jobs.getLen());

    } catch (BundleEngineException ex) {
      throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ex);
    }
    return json;
  }
 /**
  * Return the default group to which the user belongs.
  *
  * <p>This implementation always returns 'users'.
  *
  * @param user user name.
  * @return default group of user.
  * @throws AuthorizationException thrown if the default group con not be retrieved.
  */
 public String getDefaultGroup(String user) throws AuthorizationException {
   try {
     return Services.get().get(GroupsService.class).getGroups(user).get(0);
   } catch (IOException ex) {
     throw new AuthorizationException(ErrorCode.E0501, ex.getMessage(), ex);
   }
 }
Exemple #14
0
  /**
   * Provides functionality to test errors
   *
   * @param errorType the error type. (start.non-transient, end.non-transient)
   * @param externalStatus the external status to set.
   * @param signalValue the signal value to set.
   * @throws Exception
   */
  private void _testError(String errorType, String externalStatus, String signalValue)
      throws Exception {
    String workflowPath = getTestCaseFileUri("workflow.xml");
    Reader reader = IOUtils.getResourceAsReader("wf-ext-schema-valid.xml", -1);
    Writer writer = new FileWriter(new File(getTestCaseDir(), "workflow.xml"));
    IOUtils.copyCharStream(reader, writer);

    final DagEngine engine = new DagEngine("u");
    Configuration conf = new XConfiguration();
    conf.set(OozieClient.APP_PATH, workflowPath);
    conf.set(OozieClient.USER_NAME, getTestUser());

    conf.set(OozieClient.LOG_TOKEN, "t");
    conf.set("error", errorType);
    conf.set("external-status", externalStatus);
    conf.set("signal-value", signalValue);

    final String jobId = engine.submitJob(conf, true);

    final WorkflowStore store = Services.get().get(WorkflowStoreService.class).create();
    store.beginTrx();
    waitFor(
        5000,
        new Predicate() {
          public boolean evaluate() throws Exception {
            WorkflowJobBean bean = store.getWorkflow(jobId, false);
            return (bean.getWorkflowInstance().getStatus() == WorkflowInstance.Status.KILLED);
          }
        });
    assertEquals(WorkflowJob.Status.KILLED, engine.getJob(jobId).getStatus());
    store.commitTrx();
    store.closeTrx();
  }
Exemple #15
0
  /**
   * Rerun bundle job
   *
   * @param request servlet request
   * @param response servlet response
   * @param conf configration object
   * @throws XServletException
   */
  private void rerunBundleJob(
      HttpServletRequest request, HttpServletResponse response, Configuration conf)
      throws XServletException {
    JSONObject json = new JSONObject();
    BundleEngine bundleEngine =
        Services.get()
            .get(BundleEngineService.class)
            .getBundleEngine(getUser(request), getAuthToken(request));
    String jobId = getResourceName(request);

    String coordScope = request.getParameter(RestConstants.JOB_BUNDLE_RERUN_COORD_SCOPE_PARAM);
    String dateScope = request.getParameter(RestConstants.JOB_BUNDLE_RERUN_DATE_SCOPE_PARAM);
    String refresh = request.getParameter(RestConstants.JOB_COORD_RERUN_REFRESH_PARAM);
    String noCleanup = request.getParameter(RestConstants.JOB_COORD_RERUN_NOCLEANUP_PARAM);

    XLog.getLog(getClass())
        .info(
            "Rerun Bundle for jobId="
                + jobId
                + ", coordScope="
                + coordScope
                + ", dateScope="
                + dateScope
                + ", refresh="
                + refresh
                + ", noCleanup="
                + noCleanup);

    try {
      bundleEngine.reRun(
          jobId, coordScope, dateScope, Boolean.valueOf(refresh), Boolean.valueOf(noCleanup));
    } catch (BaseEngineException ex) {
      throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ex);
    }
  }
 /**
  * Load the list of admin users from {@link AuthorizationService#ADMIN_USERS_FILE}
  *
  * @throws ServiceException if the admin user list could not be loaded.
  */
 private void loadAdminUsers() throws ServiceException {
   String configDir = Services.get().get(ConfigurationService.class).getConfigDir();
   if (configDir != null) {
     File file = new File(configDir, ADMIN_USERS_FILE);
     if (file.exists()) {
       try {
         BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
         try {
           String line = br.readLine();
           while (line != null) {
             line = line.trim();
             if (line.length() > 0 && !line.startsWith("#")) {
               adminUsers.add(line);
             }
             line = br.readLine();
           }
         } catch (IOException ex) {
           throw new ServiceException(ErrorCode.E0160, file.getAbsolutePath(), ex);
         }
       } catch (FileNotFoundException ex) {
         throw new ServiceException(ErrorCode.E0160, ex);
       }
     } else {
       log.warn(
           "Admin users file not available in config dir [{0}], running without admin users",
           configDir);
     }
   } else {
     log.warn("Reading configuration from classpath, running without admin users");
   }
 }
Exemple #17
0
  /**
   * Basic test
   *
   * @throws Exception
   */
  public void testBasicSubmit() throws Exception {
    Configuration conf = new XConfiguration();
    String appPath = getTestCaseDir();
    String appXml =
        "<coordinator-app name=\"NAME\" frequency=\"${coord:days(1)}\" start=\"2009-02-01T01:00Z\" end=\"2009-02-03T23:59Z\" timezone=\"UTC\" "
            + "xmlns=\"uri:oozie:coordinator:0.1\"> <controls> <concurrency>2</concurrency> "
            + "<execution>LIFO</execution> </controls> <datasets> "
            + "<dataset name=\"a\" frequency=\"${coord:days(7)}\" initial-instance=\"2009-02-01T01:00Z\" "
            + "timezone=\"UTC\"> <uri-template>file:///tmp/coord/workflows/${YEAR}/${DAY}</uri-template> </dataset> "
            + "<dataset name=\"local_a\" frequency=\"${coord:days(7)}\" initial-instance=\"2009-02-01T01:00Z\" "
            + "timezone=\"UTC\"> <uri-template>file:///tmp/coord/workflows/${YEAR}/${DAY}</uri-template> </dataset> "
            + "</datasets> <input-events> "
            + "<data-in name=\"A\" dataset=\"a\"> <instance>${coord:latest(0)}</instance> </data-in>  "
            + "</input-events> "
            + "<output-events> <data-out name=\"LOCAL_A\" dataset=\"local_a\"> "
            + "<instance>${coord:current(-1)}</instance> </data-out> </output-events> <action> <workflow> <app-path>hdfs:///tmp/workflows/</app-path> "
            + "<configuration> <property> <name>inputA</name> <value>${coord:dataIn('A')}</value> </property> "
            + "<property> <name>inputB</name> <value>${coord:dataOut('LOCAL_A')}</value> "
            + "</property></configuration> </workflow> </action> </coordinator-app>";
    writeToFile(appXml, appPath);
    conf.set(OozieClient.COORDINATOR_APP_PATH, appPath);
    conf.set(OozieClient.USER_NAME, getTestUser());
    conf.set(OozieClient.GROUP_NAME, "other");
    CoordSubmitCommand sc = new CoordSubmitCommand(conf, "UNIT_TESTING");
    String jobId = sc.call();

    assertEquals(jobId.substring(jobId.length() - 2), "-C");
    CoordinatorJobBean job = checkCoordJobs(jobId);
    if (job != null) {
      assertEquals(
          job.getTimeout(),
          Services.get().getConf().getInt("oozie.service.coord.normal.default.timeout", -2));
    }
  }
  protected CoordinatorJobBean addRecordToCoordJobTable(
      CoordinatorJob.Status status, Date startTime, Date endTime, Date pauseTime, int timeout)
      throws Exception {
    CoordinatorJobBean coordJob = createCoordJob(status);
    coordJob.setStartTime(startTime);
    coordJob.setEndTime(endTime);
    coordJob.setPauseTime(pauseTime);
    coordJob.setFrequency(5);
    coordJob.setTimeUnit(Timeunit.MINUTE);
    coordJob.setTimeout(timeout);
    coordJob.setConcurrency(3);

    try {
      JPAService jpaService = Services.get().get(JPAService.class);
      assertNotNull(jpaService);
      CoordJobInsertJPAExecutor coordInsertCmd = new CoordJobInsertJPAExecutor(coordJob);
      jpaService.execute(coordInsertCmd);
    } catch (JPAExecutorException ex) {
      ex.printStackTrace();
      fail("Unable to insert the test coord job record to table");
      throw ex;
    }

    return coordJob;
  }
Exemple #19
0
 protected FileSystem getAppFileSystem(WorkflowJob workflow)
     throws HadoopAccessorException, IOException, URISyntaxException {
   URI uri = new URI(workflow.getAppPath());
   HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
   Configuration fsConf = has.createJobConf(uri.getAuthority());
   return has.createFileSystem(workflow.getUser(), uri, fsConf);
 }
  /**
   * Test : verify the PreconditionException is thrown when actionCheckDelay > 0
   *
   * @throws Exception
   */
  public void testActionCheckPreCondition1() throws Exception {
    Instrumentation inst = Services.get().get(InstrumentationService.class).get();

    WorkflowJobBean job =
        this.addRecordToWfJobTable(WorkflowJob.Status.RUNNING, WorkflowInstance.Status.RUNNING);
    WorkflowActionBean action =
        this.addRecordToWfActionTable(job.getId(), "1", WorkflowAction.Status.PREP);

    ActionCheckXCommand checkCmd = new ActionCheckXCommand(action.getId(), 10);

    long counterVal;

    try {
      counterVal =
          inst.getCounters()
              .get(XCommand.INSTRUMENTATION_GROUP)
              .get(checkCmd.getName() + ".preconditionfailed")
              .getValue();
    } catch (NullPointerException e) {
      // counter might be null
      counterVal = 0L;
    }

    assertEquals(0L, counterVal);

    checkCmd.call();

    // precondition failed because of actionCheckDelay > 0
    counterVal =
        inst.getCounters()
            .get(XCommand.INSTRUMENTATION_GROUP)
            .get(checkCmd.getName() + ".preconditionfailed")
            .getValue();
    assertEquals(1L, counterVal);
  }
Exemple #21
0
 public static ELEvaluator createELEvaluatorForGroup(Configuration conf, String group) {
   ELEvaluator eval = Services.get().get(ELService.class).createEvaluator(group);
   for (Map.Entry<String, String> entry : conf) {
     eval.setVariable(entry.getKey(), entry.getValue());
   }
   return eval;
 }
Exemple #22
0
  /**
   * v1 service implementation to get a list of workflows, with filtering or interested windows
   * embedded in the request object
   */
  private JSONObject getWorkflowJobs(HttpServletRequest request) throws XServletException {
    JSONObject json = new JSONObject();
    try {
      String filter = request.getParameter(RestConstants.JOBS_FILTER_PARAM);
      String startStr = request.getParameter(RestConstants.OFFSET_PARAM);
      String lenStr = request.getParameter(RestConstants.LEN_PARAM);
      int start = (startStr != null) ? Integer.parseInt(startStr) : 1;
      start = (start < 1) ? 1 : start;
      int len = (lenStr != null) ? Integer.parseInt(lenStr) : 50;
      len = (len < 1) ? 50 : len;
      DagEngine dagEngine =
          Services.get()
              .get(DagEngineService.class)
              .getDagEngine(getUser(request), getAuthToken(request));
      WorkflowsInfo jobs = dagEngine.getJobs(filter, start, len);
      List<WorkflowJobBean> jsonWorkflows = jobs.getWorkflows();
      json.put(JsonTags.WORKFLOWS_JOBS, WorkflowJobBean.toJSONArray(jsonWorkflows));
      json.put(JsonTags.WORKFLOWS_TOTAL, jobs.getTotal());
      json.put(JsonTags.WORKFLOWS_OFFSET, jobs.getStart());
      json.put(JsonTags.WORKFLOWS_LEN, jobs.getLen());

    } catch (DagEngineException ex) {
      throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ex);
    }

    return json;
  }
 /**
  * Check if the user belongs to the group or not.
  *
  * @param user user name.
  * @param group group name.
  * @return if the user belongs to the group or not.
  * @throws AuthorizationException thrown if the authorization query can not be performed.
  */
 protected boolean isUserInGroup(String user, String group) throws AuthorizationException {
   GroupsService groupsService = Services.get().get(GroupsService.class);
   try {
     return groupsService.getGroups(user).contains(group);
   } catch (IOException ex) {
     throw new AuthorizationException(ErrorCode.E0501, ex.getMessage(), ex);
   }
 }
Exemple #24
0
 /**
  * Return a {@link org.apache.oozie.client.OozieClient} for LocalOozie configured for a given
  * user.
  *
  * <p>The following methods of the client are NOP in the returned instance: {@link
  * org.apache.oozie.client.OozieClient#validateWSVersion}, {@link
  * org.apache.oozie.client.OozieClient#setHeader}, {@link
  * org.apache.oozie.client.OozieClient#getHeader}, {@link
  * org.apache.oozie.client.OozieClient#removeHeader}, {@link
  * org.apache.oozie.client.OozieClient#getHeaderNames} and {@link
  * org.apache.oozie.client.OozieClient#setSafeMode}.
  *
  * @param user user name to use in LocalOozie for running workflows.
  * @return a {@link org.apache.oozie.client.OozieClient} for LocalOozie configured for the given
  *     user.
  */
 public static OozieClient getClient(String user) {
   if (!localOozieActive) {
     throw new IllegalStateException("LocalOozie is not initialized");
   }
   ParamChecker.notEmpty(user, "user");
   DagEngine dagEngine = Services.get().get(DagEngineService.class).getDagEngine(user, "undef");
   return new LocalOozieClient(dagEngine);
 }
 private ConnectionContext getConnectionContext() {
   Configuration conf = services.getConf();
   String jmsProps = conf.get(JMSJobEventListener.JMS_CONNECTION_PROPERTIES);
   JMSConnectionInfo connInfo = new JMSConnectionInfo(jmsProps);
   JMSAccessorService jmsService = Services.get().get(JMSAccessorService.class);
   ConnectionContext jmsContext = jmsService.createConnectionContext(connInfo);
   return jmsContext;
 }
 private CoordinatorActionBean createCoordinatorActionBean(CoordinatorJob job) throws IOException {
   CoordinatorActionBean actionBean = new CoordinatorActionBean();
   String actionId = Services.get().get(UUIDService.class).generateChildId(job.getId(), "1");
   actionBean.setJobId(job.getId());
   actionBean.setId(actionId);
   Configuration jobConf = new XConfiguration(new StringReader(job.getConf()));
   actionBean.setRunConf(XmlUtils.prettyPrint(jobConf).toString());
   return actionBean;
 }
  public void testSubworkflowLib() throws Exception {
    XConfiguration protoConf = getBaseProtoConf();
    WorkflowJobBean workflow = createBaseWorkflow(protoConf, "W");
    FileSystem fs = getFileSystem();
    Path parentLibJar = new Path(getFsTestCaseDir(), "lib/parentLibrary.jar");
    fs.create(parentLibJar);
    assertTrue(fs.exists(parentLibJar));
    String defaultConf = workflow.getConf();
    XConfiguration newConf = new XConfiguration(new StringReader(defaultConf));
    newConf.set(OozieClient.LIBPATH, parentLibJar.getParent().toString());
    workflow.setConf(newConf.toXmlString());

    Path subWorkflowAppPath = new Path(getFsTestCaseDir().toString(), "subwf");
    Writer writer = new OutputStreamWriter(fs.create(new Path(subWorkflowAppPath, "workflow.xml")));
    writer.write(APP1);
    writer.close();
    Path subwfLibJar = new Path(subWorkflowAppPath, "lib/subwfLibrary.jar");
    fs.create(subwfLibJar);
    assertTrue(fs.exists(subwfLibJar));

    final WorkflowActionBean action = (WorkflowActionBean) workflow.getActions().get(0);
    action.setConf(
        "<sub-workflow xmlns='uri:oozie:workflow:0.1' name='subwf'>"
            + "      <app-path>"
            + subWorkflowAppPath
            + File.separator
            + "workflow.xml"
            + "</app-path>"
            + "</sub-workflow>");
    SubWorkflowActionExecutor subWorkflow = new SubWorkflowActionExecutor();
    subWorkflow.start(new Context(workflow, action), action);

    final OozieClient oozieClient =
        subWorkflow.getWorkflowClient(
            new Context(workflow, action), SubWorkflowActionExecutor.LOCAL);
    waitFor(
        JOB_TIMEOUT,
        new Predicate() {
          public boolean evaluate() throws Exception {
            return oozieClient.getJobInfo(action.getExternalId()).getStatus()
                == WorkflowJob.Status.SUCCEEDED;
          }
        });

    assertEquals(
        WorkflowJob.Status.SUCCEEDED, oozieClient.getJobInfo(action.getExternalId()).getStatus());
    subWorkflow.check(new Context(workflow, action), action);
    assertEquals(WorkflowAction.Status.DONE, action.getStatus());
    subWorkflow.end(new Context(workflow, action), action);
    assertEquals(WorkflowAction.Status.OK, action.getStatus());

    WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
    WorkflowJob wf = oozieClient.getJobInfo(action.getExternalId());
    Configuration childConf = new XConfiguration(new StringReader(wf.getConf()));
    childConf = wps.createProtoActionConf(childConf, "authToken", true);
    assertEquals(childConf.get(WorkflowAppService.APP_LIB_PATH_LIST), subwfLibJar.toString());
  }
  protected WorkflowActionBean createWorkflowActionSetPending(
      String wfId, WorkflowAction.Status status) throws Exception {
    WorkflowActionBean action = new WorkflowActionBean();
    String actionname = "testAction";
    action.setName(actionname);
    action.setCred("null");
    action.setId(Services.get().get(UUIDService.class).generateChildId(wfId, actionname));
    action.setJobId(wfId);
    action.setType("map-reduce");
    action.setTransition("transition");
    action.setStatus(status);
    action.setStartTime(new Date());
    action.setEndTime(new Date());
    action.setLastCheckTime(new Date());
    action.setPending();
    action.setExecutionPath("/");

    Path inputDir = new Path(getFsTestCaseDir(), "input");
    Path outputDir = new Path(getFsTestCaseDir(), "output");

    FileSystem fs = getFileSystem();
    Writer w = new OutputStreamWriter(fs.create(new Path(inputDir, "data.txt")));
    w.write("dummy\n");
    w.write("dummy\n");
    w.close();

    String actionXml =
        "<map-reduce>"
            + "<job-tracker>"
            + getJobTrackerUri()
            + "</job-tracker>"
            + "<name-node>"
            + getNameNodeUri()
            + "</name-node>"
            + "<prepare><delete path=\""
            + outputDir.toString()
            + "\"/></prepare>"
            + "<configuration>"
            + "<property><name>mapred.mapper.class</name><value>"
            + MapperReducerForTest.class.getName()
            + "</value></property>"
            + "<property><name>mapred.reducer.class</name><value>"
            + MapperReducerForTest.class.getName()
            + "</value></property>"
            + "<property><name>mapred.input.dir</name><value>"
            + inputDir.toString()
            + "</value></property>"
            + "<property><name>mapred.output.dir</name><value>"
            + outputDir.toString()
            + "</value></property>"
            + "</configuration>"
            + "</map-reduce>";
    action.setConf(actionXml);

    return action;
  }
 private void checkCoordActionsTimeout(String actionId, int expected) {
   try {
     JPAService jpaService = Services.get().get(JPAService.class);
     CoordinatorActionBean action = jpaService.execute(new CoordActionGetJPAExecutor(actionId));
     assertEquals(action.getTimeOut(), expected);
   } catch (JPAExecutorException se) {
     se.printStackTrace();
     fail("Action ID " + actionId + " was not stored properly in db");
   }
 }
 /**
  * Check whether a URI path exists
  *
  * @param sPath
  * @param conf
  * @return
  * @throws IOException
  */
 private static boolean isPathAvailable(
     String sPath, String user, String group, Configuration conf)
     throws IOException, HadoopAccessorException {
   // sPath += "/" + END_OF_OPERATION_INDICATOR_FILE;
   Path path = new Path(sPath);
   return Services.get()
       .get(HadoopAccessorService.class)
       .createFileSystem(user, group, path.toUri(), conf)
       .exists(path);
 }