Ejemplo n.º 1
0
 /**
  * Constructor to create the Coordinator Submit Command.
  *
  * @param conf : Configuration for Coordinator job
  * @param authToken : To be used for authentication
  */
 public CoordSubmitXCommand(Configuration conf, String authToken) {
   super("coord_submit", "coord_submit", 1);
   this.conf = ParamChecker.notNull(conf, "conf");
   this.authToken = ParamChecker.notEmpty(authToken, "authToken");
   this.bundleId = null;
   this.coordName = null;
 }
Ejemplo n.º 2
0
  /**
   * Resolve datasets using job configuration.
   *
   * @param dsElems : Data set XML element.
   * @throws CoordinatorJobException thrown if failed to resolve datasets
   */
  private void resolveDataSets(List<Element> dsElems) throws CoordinatorJobException {
    for (Element dsElem : dsElems) {
      // Setting up default TimeUnit and EndOFDuraion
      evalFreq.setVariable("timeunit", TimeUnit.MINUTE);
      evalFreq.setVariable("endOfDuration", TimeUnit.NONE);

      String val = resolveAttribute("frequency", dsElem, evalFreq);
      int ival = ParamChecker.checkInteger(val, "frequency");
      ParamChecker.checkGTZero(ival, "frequency");
      addAnAttribute(
          "freq_timeunit",
          dsElem,
          evalFreq.getVariable("timeunit") == null
              ? TimeUnit.MINUTE.toString()
              : ((TimeUnit) evalFreq.getVariable("timeunit")).toString());
      addAnAttribute(
          "end_of_duration",
          dsElem,
          evalFreq.getVariable("endOfDuration") == null
              ? TimeUnit.NONE.toString()
              : ((TimeUnit) evalFreq.getVariable("endOfDuration")).toString());
      val = resolveAttribute("initial-instance", dsElem, evalNofuncs);
      ParamChecker.checkUTC(val, "initial-instance");
      val = resolveAttribute("timezone", dsElem, evalNofuncs);
      ParamChecker.checkTimeZone(val, "timezone");
      resolveTagContents("uri-template", dsElem, evalNofuncs);
      resolveTagContents("done-flag", dsElem, evalNofuncs);
    }
  }
Ejemplo n.º 3
0
 public LiteWorkflowInstance(LiteWorkflowApp def, Configuration conf, String instanceId) {
   this();
   this.def = ParamChecker.notNull(def, "def");
   this.instanceId = ParamChecker.notNull(instanceId, "instanceId");
   this.conf = ParamChecker.notNull(conf, "conf");
   refreshLog();
   status = Status.PREP;
 }
Ejemplo n.º 4
0
 /**
  * Determine the date-time in UTC of n-th future available dataset instance from nominal Time but
  * not beyond the instance specified as 'instance.
  *
  * <p>It depends on:
  *
  * <p>1. Data set frequency
  *
  * <p>2. Data set Time unit (day, month, minute)
  *
  * <p>3. Data set Time zone/DST
  *
  * <p>4. End Day/Month flag
  *
  * <p>5. Data set initial instance
  *
  * <p>6. Action Creation Time
  *
  * <p>7. Existence of dataset's directory
  *
  * @param n :instance count
  *     <p>domain: n >= 0, n is integer
  * @param instance: How many future instance it should check? value should be >=0
  * @return date-time in UTC of the n-th instance
  *     <p>
  * @throws Exception
  */
 public static String ph3_coord_future(int n, int instance) throws Exception {
   ParamChecker.checkGEZero(n, "future:n");
   ParamChecker.checkGTZero(instance, "future:instance");
   if (isSyncDataSet()) { // For Sync Dataset
     return coord_future_sync(n, instance);
   } else {
     throw new UnsupportedOperationException("Asynchronous Dataset is not supported yet");
   }
 }
Ejemplo n.º 5
0
 /**
  * Calculate the difference of timezone offset in minutes between dataset and coordinator job.
  *
  * <p>Depends on:
  *
  * <p>1. Timezone of both dataset and job
  *
  * <p>2. Action creation Time
  *
  * @return difference in minutes (DataSet TZ Offset - Application TZ offset)
  */
 public static int ph2_coord_tzOffset() {
   Date actionCreationTime = getActionCreationtime();
   TimeZone dsTZ = ParamChecker.notNull(getDatasetTZ(), "DatasetTZ");
   TimeZone jobTZ = ParamChecker.notNull(getJobTZ(), "JobTZ");
   // Apply the TZ into Calendar object
   Calendar dsTime = Calendar.getInstance(dsTZ);
   dsTime.setTime(actionCreationTime);
   Calendar jobTime = Calendar.getInstance(jobTZ);
   jobTime.setTime(actionCreationTime);
   return (dsTime.get(Calendar.ZONE_OFFSET) - jobTime.get(Calendar.ZONE_OFFSET)) / (1000 * 60);
 }
Ejemplo n.º 6
0
 /**
  * Used in defining the frequency in 'minute' unit.
  *
  * <p>domain: <code> val &gt; 0</code> and should be integer.
  *
  * @param val frequency in number of minutes.
  * @return number of minutes and also set the frequency timeunit to "minute"
  */
 public static int ph1_coord_minutes(int val) {
   val = ParamChecker.checkGTZero(val, "n");
   ELEvaluator eval = ELEvaluator.getCurrent();
   eval.setVariable("timeunit", TimeUnit.MINUTE);
   eval.setVariable("endOfDuration", TimeUnit.NONE);
   return val;
 }
Ejemplo n.º 7
0
 /**
  * Return Job Name.
  *
  * <p>
  *
  * @return coordinator name
  */
 public static String ph2_coord_name() throws Exception {
   ELEvaluator eval = ELEvaluator.getCurrent();
   SyncCoordAction action =
       ParamChecker.notNull(
           (SyncCoordAction) eval.getVariable(COORD_ACTION), "Coordinator Action");
   return action.getName();
 }
Ejemplo n.º 8
0
 /**
  * Return nominal time or Action Creation Time.
  *
  * <p>
  *
  * @return coordinator action creation or materialization date time
  * @throws Exception if unable to format the Date object to String
  */
 public static String ph2_coord_nominalTime() throws Exception {
   ELEvaluator eval = ELEvaluator.getCurrent();
   SyncCoordAction action =
       ParamChecker.notNull(
           (SyncCoordAction) eval.getVariable(COORD_ACTION), "Coordinator Action");
   return DateUtils.formatDateUTC(action.getNominalTime());
 }
Ejemplo n.º 9
0
 /**
  * Used in defining the frequency in 'month' unit and specify the "end of month" property.
  *
  * <p>Every instance will start at first day of each month at 00:00 hour.
  *
  * <p>domain: <code> val &gt; 0</code> and should be integer.
  *
  * @param val: frequency in number of months.
  * @return number of months and also set the frequency timeunit to "month" and end_of_duration
  *     flag to "month"
  */
 public static int ph1_coord_endOfMonths(int val) {
   val = ParamChecker.checkGTZero(val, "n");
   ELEvaluator eval = ELEvaluator.getCurrent();
   eval.setVariable("timeunit", TimeUnit.MONTH);
   eval.setVariable("endOfDuration", TimeUnit.END_OF_MONTH);
   return val;
 }
Ejemplo n.º 10
0
 /**
  * Read the application XML and validate against coordinator Schema
  *
  * @return validated coordinator XML
  * @throws CoordinatorJobException thrown if unable to read or validate coordinator xml
  */
 private String readAndValidateXml() throws CoordinatorJobException {
   String appPath =
       ParamChecker.notEmpty(
           conf.get(OozieClient.COORDINATOR_APP_PATH), OozieClient.COORDINATOR_APP_PATH);
   String coordXml = readDefinition(appPath);
   validateXml(coordXml);
   return coordXml;
 }
Ejemplo n.º 11
0
 /**
  * The constructor for class {@link CoordRerunXCommand}
  *
  * @param jobId the job id
  * @param rerunType rerun type {@link RestConstants.JOB_COORD_RERUN_DATE} or {@link
  *     RestConstants.JOB_COORD_RERUN_ACTION}
  * @param scope the rerun scope for given rerunType separated by ","
  * @param refresh true if user wants to refresh input/output dataset urls
  * @param noCleanup false if user wants to cleanup output events for given rerun actions
  * @param failed true if user wants to rerun only failed nodes
  * @param actionRunConf configuration values for actions
  */
 public CoordRerunXCommand(
     String jobId,
     String rerunType,
     String scope,
     boolean refresh,
     boolean noCleanup,
     boolean failed,
     Configuration actionRunConf) {
   super("coord_rerun", "coord_rerun", 1);
   this.jobId = ParamChecker.notEmpty(jobId, "jobId");
   this.rerunType = ParamChecker.notEmpty(rerunType, "rerunType");
   this.scope = ParamChecker.notEmpty(scope, "scope");
   this.refresh = refresh;
   this.noCleanup = noCleanup;
   this.failed = failed;
   this.actionRunConf = actionRunConf;
 }
Ejemplo n.º 12
0
  /**
   * Merge default configuration with user-defined configuration.
   *
   * @throws CommandException thrown if failed to read or merge configurations
   */
  protected void mergeDefaultConfig() throws CommandException {
    Path configDefault = null;
    try {
      String coordAppPathStr = conf.get(OozieClient.COORDINATOR_APP_PATH);
      Path coordAppPath = new Path(coordAppPathStr);
      String user = ParamChecker.notEmpty(conf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
      String group =
          ParamChecker.notEmpty(conf.get(OozieClient.GROUP_NAME), OozieClient.GROUP_NAME);
      FileSystem fs =
          Services.get()
              .get(HadoopAccessorService.class)
              .createFileSystem(user, group, coordAppPath.toUri(), new Configuration());

      // app path could be a directory
      if (!fs.isFile(coordAppPath)) {
        configDefault = new Path(coordAppPath, CONFIG_DEFAULT);
      } else {
        configDefault = new Path(coordAppPath.getParent(), CONFIG_DEFAULT);
      }

      if (fs.exists(configDefault)) {
        Configuration defaultConf = new XConfiguration(fs.open(configDefault));
        PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
        XConfiguration.injectDefaults(defaultConf, conf);
      } else {
        LOG.info("configDefault Doesn't exist " + configDefault);
      }
      PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);

      // Resolving all variables in the job properties.
      // This ensures the Hadoop Configuration semantics is preserved.
      XConfiguration resolvedVarsConf = new XConfiguration();
      for (Map.Entry<String, String> entry : conf) {
        resolvedVarsConf.set(entry.getKey(), conf.get(entry.getKey()));
      }
      conf = resolvedVarsConf;
    } catch (IOException e) {
      throw new CommandException(
          ErrorCode.E0702,
          e.getMessage() + " : Problem reading default config " + configDefault,
          e);
    } catch (HadoopAccessorException e) {
      throw new CommandException(e);
    }
    LOG.debug("Merged CONF :" + XmlUtils.prettyPrint(conf).toString());
  }
Ejemplo n.º 13
0
 /**
  * Return a {@link org.apache.oozie.client.OozieClient} for LocalOozie configured for a given
  * user.
  *
  * <p>The following methods of the client are NOP in the returned instance: {@link
  * org.apache.oozie.client.OozieClient#validateWSVersion}, {@link
  * org.apache.oozie.client.OozieClient#setHeader}, {@link
  * org.apache.oozie.client.OozieClient#getHeader}, {@link
  * org.apache.oozie.client.OozieClient#removeHeader}, {@link
  * org.apache.oozie.client.OozieClient#getHeaderNames} and {@link
  * org.apache.oozie.client.OozieClient#setSafeMode}.
  *
  * @param user user name to use in LocalOozie for running workflows.
  * @return a {@link org.apache.oozie.client.OozieClient} for LocalOozie configured for the given
  *     user.
  */
 public static OozieClient getClient(String user) {
   if (!localOozieActive) {
     throw new IllegalStateException("LocalOozie is not initialized");
   }
   ParamChecker.notEmpty(user, "user");
   DagEngine dagEngine = Services.get().get(DagEngineService.class).getDagEngine(user, "undef");
   return new LocalOozieClient(dagEngine);
 }
Ejemplo n.º 14
0
  /**
   * Read coordinator definition.
   *
   * @param appPath application path.
   * @return coordinator definition.
   * @throws CoordinatorJobException thrown if the definition could not be read.
   */
  protected String readDefinition(String appPath) throws CoordinatorJobException {
    String user = ParamChecker.notEmpty(conf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
    String group = ParamChecker.notEmpty(conf.get(OozieClient.GROUP_NAME), OozieClient.GROUP_NAME);
    // Configuration confHadoop = CoordUtils.getHadoopConf(conf);
    try {
      URI uri = new URI(appPath);
      LOG.debug("user ="******" group =" + group);
      FileSystem fs =
          Services.get()
              .get(HadoopAccessorService.class)
              .createFileSystem(user, group, uri, new Configuration());
      Path appDefPath = null;

      // app path could be a directory
      Path path = new Path(uri.getPath());
      // check file exists for dataset include file, app xml already checked
      if (!fs.exists(path)) {
        throw new URISyntaxException(path.toString(), "path not existed : " + path.toString());
      }
      if (!fs.isFile(path)) {
        appDefPath = new Path(path, COORDINATOR_XML_FILE);
      } else {
        appDefPath = path;
      }

      Reader reader = new InputStreamReader(fs.open(appDefPath));
      StringWriter writer = new StringWriter();
      IOUtils.copyCharStream(reader, writer);
      return writer.toString();
    } catch (IOException ex) {
      LOG.warn("IOException :" + XmlUtils.prettyPrint(conf), ex);
      throw new CoordinatorJobException(ErrorCode.E1001, ex.getMessage(), ex);
    } catch (URISyntaxException ex) {
      LOG.warn("URISyException :" + ex.getMessage());
      throw new CoordinatorJobException(ErrorCode.E1002, appPath, ex.getMessage(), ex);
    } catch (HadoopAccessorException ex) {
      throw new CoordinatorJobException(ex);
    } catch (Exception ex) {
      LOG.warn("Exception :", ex);
      throw new CoordinatorJobException(ErrorCode.E1001, ex.getMessage(), ex);
    }
  }
Ejemplo n.º 15
0
 /**
  * Check if all given actions are eligible to rerun.
  *
  * @param coordActions list of CoordinatorActionBean
  * @return true if all actions are eligible to rerun
  */
 private static boolean checkAllActionsRunnable(List<CoordinatorActionBean> coordActions) {
   ParamChecker.notNull(coordActions, "Coord actions to be rerun");
   boolean ret = false;
   for (CoordinatorActionBean coordAction : coordActions) {
     ret = true;
     if (!coordAction.isTerminalStatus()) {
       ret = false;
       break;
     }
   }
   return ret;
 }
Ejemplo n.º 16
0
  /**
   * Get the currently missing and available dependencies after checking the list of known missing
   * dependencies against the source.
   *
   * @param missingDependencies known missing dependencies
   * @param actionConf Configuration for the action
   * @param stopOnFirstMissing Does not continue check for the rest of list if there is a missing
   *     dependency
   * @return ActionDependency which has the list of missing and available dependencies
   * @throws CommandException
   */
  public static ActionDependency checkForAvailability(
      String[] missingDependencies, Configuration actionConf, boolean stopOnFirstMissing)
      throws CommandException {
    final XLog LOG =
        XLog.getLog(DependencyChecker.class); // OOZIE-1251. Don't initialize as static variable.
    String user =
        ParamChecker.notEmpty(actionConf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
    List<String> missingDeps = new ArrayList<String>();
    List<String> availableDeps = new ArrayList<String>();
    URIHandlerService uriService = Services.get().get(URIHandlerService.class);
    boolean continueChecking = true;
    try {
      for (int index = 0; index < missingDependencies.length; index++) {
        if (continueChecking) {
          String dependency = missingDependencies[index];

          URI uri = new URI(dependency);
          URIHandler uriHandler = uriService.getURIHandler(uri);
          LOG.debug("Checking for the availability of dependency [{0}] ", dependency);
          if (uriHandler.exists(uri, actionConf, user)) {
            LOG.debug("Dependency [{0}] is available", dependency);
            availableDeps.add(dependency);
          } else {
            LOG.debug("Dependency [{0}] is missing", dependency);
            missingDeps.add(dependency);
            if (stopOnFirstMissing) {
              continueChecking = false;
            }
          }

        } else {
          missingDeps.add(missingDependencies[index]);
        }
      }
    } catch (URISyntaxException e) {
      throw new CommandException(ErrorCode.E0906, e.getMessage(), e);
    } catch (URIHandlerException e) {
      throw new CommandException(e);
    }
    return new ActionDependency(missingDeps, availableDeps);
  }
 public CoordActionNotificationXCommand(CoordinatorActionBean actionBean) {
   super("coord_action_notification", "coord_action_notification", 0);
   ParamChecker.notNull(actionBean, "Action Bean");
   this.actionBean = actionBean;
 }
 public BundleJobSuspendXCommand(String id) {
   super("bundle_suspend", "bundle_suspend", 1);
   this.jobId = ParamChecker.notEmpty(id, "id");
 }
Ejemplo n.º 19
0
  /**
   * @param offset
   * @return n-th available latest instance Date-Time for SYNC data-set
   * @throws Exception
   */
  private static String coord_latest_sync(int offset) throws Exception {
    if (offset > 0) {
      throw new RuntimeException(
          "For latest there is no meaning " + "of positive instance. n should be <=0" + offset);
    }
    ELEvaluator eval = ELEvaluator.getCurrent();
    String retVal = "";
    int datasetFrequency = (int) getDSFrequency(); // in minutes
    TimeUnit dsTimeUnit = getDSTimeUnit();
    int[] instCount = new int[1];
    Calendar nominalInstanceCal = getCurrentInstance(getActualTime(), instCount);
    if (nominalInstanceCal != null) {
      Calendar initInstance = getInitialInstanceCal();
      SyncCoordDataset ds = (SyncCoordDataset) eval.getVariable(DATASET);
      if (ds == null) {
        throw new RuntimeException("Associated Dataset should be defined with key " + DATASET);
      }
      String uriTemplate = ds.getUriTemplate();
      Configuration conf = (Configuration) eval.getVariable(CONFIGURATION);
      if (conf == null) {
        throw new RuntimeException(
            "Associated Configuration should be defined with key " + CONFIGURATION);
      }
      int available = 0;
      boolean resolved = false;
      String user =
          ParamChecker.notEmpty(
              (String) eval.getVariable(OozieClient.USER_NAME), OozieClient.USER_NAME);
      String group =
          ParamChecker.notEmpty(
              (String) eval.getVariable(OozieClient.GROUP_NAME), OozieClient.GROUP_NAME);
      String doneFlag = ds.getDoneFlag();
      while (nominalInstanceCal.compareTo(initInstance) >= 0) {
        ELEvaluator uriEval = getUriEvaluator(nominalInstanceCal);
        String uriPath = uriEval.evaluate(uriTemplate, String.class);
        String pathWithDoneFlag = uriPath;
        if (doneFlag.length() > 0) {
          pathWithDoneFlag += "/" + doneFlag;
        }
        if (isPathAvailable(pathWithDoneFlag, user, group, conf)) {
          XLog.getLog(CoordELFunctions.class)
              .debug("Found latest(" + available + "): " + pathWithDoneFlag);
          if (available == offset) {
            XLog.getLog(CoordELFunctions.class).debug("Found Latest File: " + pathWithDoneFlag);
            resolved = true;
            retVal = DateUtils.formatDateUTC(nominalInstanceCal);
            eval.setVariable("resolved_path", uriPath);
            break;
          }

          available--;
        }
        // nominalInstanceCal.add(dsTimeUnit.getCalendarUnit(),
        // -datasetFrequency);
        nominalInstanceCal = (Calendar) initInstance.clone();
        instCount[0]--;
        nominalInstanceCal.add(dsTimeUnit.getCalendarUnit(), instCount[0] * datasetFrequency);
        // DateUtils.moveToEnd(nominalInstanceCal, getDSEndOfFlag());
      }
      if (!resolved) {
        // return unchanged latest function with variable 'is_resolved'
        // to 'false'
        eval.setVariable("is_resolved", Boolean.FALSE);
        retVal = "${coord:latest(" + offset + ")}";
      } else {
        eval.setVariable("is_resolved", Boolean.TRUE);
      }
    } else { // No feasible nominal time
      eval.setVariable("is_resolved", Boolean.FALSE);
    }
    return retVal;
  }
Ejemplo n.º 20
0
  /**
   * Resolve basic entities using job Configuration.
   *
   * @param conf :Job configuration
   * @param appXml : Original job XML
   * @param coordJob : Coordinator job bean to be populated.
   * @return Resolved job XML element.
   * @throws CoordinatorJobException thrown if failed to resolve basic entities
   * @throws Exception thrown if failed to resolve basic entities
   */
  @SuppressWarnings("unchecked")
  protected Element resolveInitial(Configuration conf, String appXml, CoordinatorJobBean coordJob)
      throws CoordinatorJobException, Exception {
    Element eAppXml = XmlUtils.parseXml(appXml);
    // job's main attributes
    // frequency
    String val = resolveAttribute("frequency", eAppXml, evalFreq);
    int ival = ParamChecker.checkInteger(val, "frequency");
    ParamChecker.checkGTZero(ival, "frequency");
    coordJob.setFrequency(ival);
    TimeUnit tmp =
        (evalFreq.getVariable("timeunit") == null)
            ? TimeUnit.MINUTE
            : ((TimeUnit) evalFreq.getVariable("timeunit"));
    addAnAttribute("freq_timeunit", eAppXml, tmp.toString());
    // TimeUnit
    coordJob.setTimeUnit(CoordinatorJob.Timeunit.valueOf(tmp.toString()));
    // End Of Duration
    tmp =
        evalFreq.getVariable("endOfDuration") == null
            ? TimeUnit.NONE
            : ((TimeUnit) evalFreq.getVariable("endOfDuration"));
    addAnAttribute("end_of_duration", eAppXml, tmp.toString());
    // coordJob.setEndOfDuration(tmp) // TODO: Add new attribute in Job bean

    // Application name
    if (this.coordName == null) {
      val = resolveAttribute("name", eAppXml, evalNofuncs);
      coordJob.setAppName(val);
    } else {
      // this coord job is created from bundle
      coordJob.setAppName(this.coordName);
    }

    // start time
    val = resolveAttribute("start", eAppXml, evalNofuncs);
    ParamChecker.checkUTC(val, "start");
    coordJob.setStartTime(DateUtils.parseDateUTC(val));
    // end time
    val = resolveAttribute("end", eAppXml, evalNofuncs);
    ParamChecker.checkUTC(val, "end");
    coordJob.setEndTime(DateUtils.parseDateUTC(val));
    // Time zone
    val = resolveAttribute("timezone", eAppXml, evalNofuncs);
    ParamChecker.checkTimeZone(val, "timezone");
    coordJob.setTimeZone(val);

    // controls
    val =
        resolveTagContents(
            "timeout", eAppXml.getChild("controls", eAppXml.getNamespace()), evalNofuncs);
    if (val == "") {
      val = Services.get().getConf().get(CONF_DEFAULT_TIMEOUT_NORMAL);
    }

    ival = ParamChecker.checkInteger(val, "timeout");
    if (ival < 0 || ival > Services.get().getConf().getInt(CONF_DEFAULT_MAX_TIMEOUT, 129600)) {
      ival = Services.get().getConf().getInt(CONF_DEFAULT_MAX_TIMEOUT, 129600);
    }
    coordJob.setTimeout(ival);

    val =
        resolveTagContents(
            "concurrency", eAppXml.getChild("controls", eAppXml.getNamespace()), evalNofuncs);
    if (val == null || val.isEmpty()) {
      val = Services.get().getConf().get(CONF_DEFAULT_CONCURRENCY, "-1");
    }
    ival = ParamChecker.checkInteger(val, "concurrency");
    coordJob.setConcurrency(ival);

    val =
        resolveTagContents(
            "throttle", eAppXml.getChild("controls", eAppXml.getNamespace()), evalNofuncs);
    if (val == null || val.isEmpty()) {
      int defaultThrottle = Services.get().getConf().getInt(CONF_DEFAULT_THROTTLE, 12);
      ival = defaultThrottle;
    } else {
      ival = ParamChecker.checkInteger(val, "throttle");
    }
    int maxQueue = Services.get().getConf().getInt(CONF_QUEUE_SIZE, 10000);
    float factor = Services.get().getConf().getFloat(CONF_MAT_THROTTLING_FACTOR, 0.10f);
    int maxThrottle = (int) (maxQueue * factor);
    if (ival > maxThrottle || ival < 1) {
      ival = maxThrottle;
    }
    LOG.debug("max throttle " + ival);
    coordJob.setMatThrottling(ival);

    val =
        resolveTagContents(
            "execution", eAppXml.getChild("controls", eAppXml.getNamespace()), evalNofuncs);
    if (val == "") {
      val = Execution.FIFO.toString();
    }
    coordJob.setExecution(Execution.valueOf(val));
    String[] acceptedVals = {
      Execution.LIFO.toString(), Execution.FIFO.toString(), Execution.LAST_ONLY.toString()
    };
    ParamChecker.isMember(val, acceptedVals, "execution");

    // datasets
    resolveTagContents(
        "include", eAppXml.getChild("datasets", eAppXml.getNamespace()), evalNofuncs);
    // for each data set
    resolveDataSets(eAppXml);
    HashMap<String, String> dataNameList = new HashMap<String, String>();
    resolveIOEvents(eAppXml, dataNameList);

    resolveTagContents(
        "app-path",
        eAppXml
            .getChild("action", eAppXml.getNamespace())
            .getChild("workflow", eAppXml.getNamespace()),
        evalNofuncs);
    // TODO: If action or workflow tag is missing, NullPointerException will
    // occur
    Element configElem =
        eAppXml
            .getChild("action", eAppXml.getNamespace())
            .getChild("workflow", eAppXml.getNamespace())
            .getChild("configuration", eAppXml.getNamespace());
    evalData =
        CoordELEvaluator.createELEvaluatorForDataEcho(conf, "coord-job-submit-data", dataNameList);
    if (configElem != null) {
      for (Element propElem :
          (List<Element>) configElem.getChildren("property", configElem.getNamespace())) {
        resolveTagContents("name", propElem, evalData);
        // Want to check the data-integrity but don't want to modify the
        // XML
        // for properties only
        Element tmpProp = (Element) propElem.clone();
        resolveTagContents("value", tmpProp, evalData);
      }
    }
    resolveSLA(eAppXml, coordJob);
    return eAppXml;
  }
Ejemplo n.º 21
0
 public BulkJPAExecutor(Map<String, List<String>> bulkFilter, int start, int len) {
   ParamChecker.notNull(bulkFilter, "bulkFilter");
   this.bulkFilter = bulkFilter;
   this.start = start;
   this.len = len;
 }
Ejemplo n.º 22
0
  public synchronized boolean signal(String executionPath, String signalValue)
      throws WorkflowException {
    ParamChecker.notEmpty(executionPath, "executionPath");
    ParamChecker.notNull(signalValue, "signalValue");
    log.debug(
        XLog.STD,
        "Signaling job execution path [{0}] signal value [{1}]",
        executionPath,
        signalValue);
    if (status != Status.RUNNING) {
      throw new WorkflowException(ErrorCode.E0716);
    }
    NodeInstance nodeJob = executionPaths.get(executionPath);
    if (nodeJob == null) {
      status = Status.FAILED;
      log.error("invalid execution path [{0}]", executionPath);
    }
    NodeDef nodeDef = null;
    if (!status.isEndState()) {
      nodeDef = def.getNode(nodeJob.nodeName);
      if (nodeDef == null) {
        status = Status.FAILED;
        log.error("invalid transition [{0}]", nodeJob.nodeName);
      }
    }
    if (!status.isEndState()) {
      NodeHandler nodeHandler = newInstance(nodeDef.getHandlerClass());
      boolean exiting = true;

      Context context = new Context(nodeDef, executionPath, signalValue);
      if (!nodeJob.started) {
        try {
          nodeHandler.loopDetection(context);
          exiting = nodeHandler.enter(context);
          nodeJob.started = true;
        } catch (WorkflowException ex) {
          status = Status.FAILED;
          List<String> killedNodes = terminateNodes(Status.KILLED);
          if (killedNodes.size() > 1) {
            log.warn(
                XLog.STD,
                "Workflow completed [{0}], killing [{1}] running nodes",
                status,
                killedNodes.size());
          }
          throw ex;
        }
      }

      if (exiting) {
        List<String> pathsToStart = new ArrayList<String>();
        List<String> fullTransitions;
        try {
          fullTransitions = nodeHandler.multiExit(context);
          int last = fullTransitions.size() - 1;
          // TEST THIS
          if (last >= 0) {
            String transitionTo = getTransitionNode(fullTransitions.get(last));
            if (nodeDef instanceof ForkNodeDef) {
              transitionTo = "*"; // WF action cannot hold all transitions for a fork.
              // transitions are hardcoded in the WF app.
            }
            persistentVars.put(
                nodeDef.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + TRANSITION_TO,
                transitionTo);
          }
        } catch (WorkflowException ex) {
          status = Status.FAILED;
          throw ex;
        }

        if (context.status == Status.KILLED) {
          status = Status.KILLED;
          log.debug(XLog.STD, "Completing job, kill node [{0}]", nodeJob.nodeName);
        } else {
          if (context.status == Status.FAILED) {
            status = Status.FAILED;
            log.debug(XLog.STD, "Completing job, fail node [{0}]", nodeJob.nodeName);
          } else {
            if (context.status == Status.SUCCEEDED) {
              status = Status.SUCCEEDED;
              log.debug(XLog.STD, "Completing job, end node [{0}]", nodeJob.nodeName);
            }
            /*
                            else if (context.status == Status.SUSPENDED) {
                                status = Status.SUSPENDED;
                                log.debug(XLog.STD, "Completing job, end node [{0}]", nodeJob.nodeName);
                            }
            */
            else {
              for (String fullTransition : fullTransitions) {
                // this is the whole trick for forking, we need the
                // executionpath and the transition
                // in the case of no forking last element of
                // executionpath is different from transition
                // in the case of forking they are the same

                log.debug(
                    XLog.STD,
                    "Exiting node [{0}] with transition[{1}]",
                    nodeJob.nodeName,
                    fullTransition);

                String execPathFromTransition = getExecutionPath(fullTransition);
                String transition = getTransitionNode(fullTransition);
                def.validateTransition(nodeJob.nodeName, transition);

                NodeInstance nodeJobInPath = executionPaths.get(execPathFromTransition);
                if ((nodeJobInPath == null) || (!transition.equals(nodeJobInPath.nodeName))) {
                  // TODO explain this IF better
                  // If the WfJob is signaled with the parent
                  // execution executionPath again
                  // The Fork node will execute again.. and replace
                  // the Node WorkflowJobBean
                  // so this is required to prevent that..
                  // Question : Should we throw an error in this case
                  // ??
                  executionPaths.put(execPathFromTransition, new NodeInstance(transition));
                  pathsToStart.add(execPathFromTransition);
                }
              }
              // signal all new synch transitions
              for (String pathToStart : pathsToStart) {
                signal(pathToStart, "::synch::");
              }
            }
          }
        }
      }
    }
    if (status.isEndState()) {
      if (status == Status.FAILED) {
        List<String> failedNodes = terminateNodes(status);
        log.warn(
            XLog.STD,
            "Workflow completed [{0}], failing [{1}] running nodes",
            status,
            failedNodes.size());
      } else {
        List<String> killedNodes = terminateNodes(Status.KILLED);

        if (killedNodes.size() > 1) {
          log.warn(
              XLog.STD,
              "Workflow completed [{0}], killing [{1}] running nodes",
              status,
              killedNodes.size());
        }
      }
    }
    return status.isEndState();
  }
Ejemplo n.º 23
0
 /**
  * @param id bundle job id
  * @param changeValue change value
  * @throws CommandException thrown if failed to change bundle
  */
 public BundleJobChangeXCommand(String id, String changeValue) throws CommandException {
   super("bundle_change", "bundle_change", 1);
   this.jobId = ParamChecker.notEmpty(id, "id");
   this.changeValue = ParamChecker.notEmpty(changeValue, "changeValue");
 }
Ejemplo n.º 24
0
 public SignalXCommand(String jobId, String actionId) {
   this("signal", 1, jobId);
   this.actionId = ParamChecker.notEmpty(actionId, "actionId");
 }
Ejemplo n.º 25
0
 public SignalXCommand(String name, int priority, String jobId) {
   super(name, name, priority);
   this.jobId = ParamChecker.notEmpty(jobId, "jobId");
 }