// Effective time will be right after the last running instance.
 private String getEffectiveTime(
     Entity entity, String cluster, Collection<InstanceState> instances) throws FalconException {
   if (instances == null || instances.isEmpty()) {
     return SchemaHelper.formatDateUTC(DateUtil.now());
   } else {
     List<InstanceState> instanceList = new ArrayList(instances);
     Collections.sort(
         instanceList,
         new Comparator<InstanceState>() {
           @Override
           public int compare(InstanceState x, InstanceState y) {
             return (x.getInstance().getInstanceSequence() < y.getInstance().getInstanceSequence())
                 ? -1
                 : (x.getInstance().getInstanceSequence() == y.getInstance().getInstanceSequence()
                     ? 0
                     : 1);
           }
         });
     // Get the last element as the list is sorted in ascending order
     Date lastRunningInstanceTime =
         instanceList.get(instanceList.size() - 1).getInstance().getInstanceTime().toDate();
     Cluster clusterEntity = ConfigurationStore.get().get(EntityType.CLUSTER, cluster);
     // Offset the time by a few seconds, else nextStartTime will be same as the reference time.
     Date effectiveTime =
         EntityUtil.getNextStartTime(
             entity, clusterEntity, DateUtil.offsetTime(lastRunningInstanceTime, 10));
     return SchemaHelper.formatDateUTC(effectiveTime);
   }
 }
  private void instrumentAlert(WorkflowExecutionContext context) {
    String clusterName = context.getClusterName();
    String entityName = context.getEntityName();
    String entityType = context.getEntityType();
    String operation = context.getOperation().name();
    String workflowId = context.getWorkflowId();
    String workflowUser = context.getWorkflowUser();
    String nominalTime = context.getNominalTimeAsISO8601();
    String runId = String.valueOf(context.getWorkflowRunId());
    Date now = new Date();
    // Start and/or End time may not be set in case of workflow suspend
    Date endTime;
    if (context.getWorkflowEndTime() == 0) {
      endTime = now;
    } else {
      endTime = new Date(context.getWorkflowEndTime());
    }

    Date startTime;
    if (context.getWorkflowStartTime() == 0) {
      startTime = now;
    } else {
      startTime = new Date(context.getWorkflowStartTime());
    }
    Long duration = (endTime.getTime() - startTime.getTime()) * 1000000;

    if (context.hasWorkflowFailed()) {
      GenericAlert.instrumentFailedInstance(
          clusterName,
          entityType,
          entityName,
          nominalTime,
          workflowId,
          workflowUser,
          runId,
          operation,
          SchemaHelper.formatDateUTC(startTime),
          "",
          "",
          duration);
    } else {
      GenericAlert.instrumentSucceededInstance(
          clusterName,
          entityType,
          entityName,
          nominalTime,
          workflowId,
          workflowUser,
          runId,
          operation,
          SchemaHelper.formatDateUTC(startTime),
          duration);
    }
  }
 @Override
 public String touch(Entity entity, String cluster, Boolean skipDryRun) throws FalconException {
   EntityID id = new EntityID(entity);
   // Ideally state store should have all entities, but, check anyway.
   if (STATE_STORE.entityExists(id)) {
     Date endTime = EntityUtil.getEndTime(entity, cluster);
     if (endTime.before(DateUtil.now())) {
       throw new FalconException(
           "Entity's end time "
               + SchemaHelper.formatDateUTC(endTime)
               + " is before current time. Entity can't be touch-ed as it has completed.");
     }
     Collection<InstanceState> instances =
         STATE_STORE.getExecutionInstances(entity, cluster, InstanceState.getRunningStates());
     // touch should happen irrespective of the state the entity is in.
     DAGEngineFactory.getDAGEngine(cluster)
         .touch(entity, (skipDryRun == null) ? Boolean.FALSE : skipDryRun);
     StringBuilder builder = new StringBuilder();
     builder
         .append(entity.toShortString())
         .append("/Effective Time: ")
         .append(getEffectiveTime(entity, cluster, instances));
     return builder.toString();
   }
   throw new FalconException("Could not find entity " + id + " in state store.");
 }
  @Override
  public List<Properties> buildCoords(Cluster cluster, Path buildPath) throws FalconException {
    org.apache.falcon.entity.v0.feed.Cluster feedCluster =
        FeedHelper.getCluster(entity, cluster.getName());
    if (feedCluster == null) {
      return null;
    }

    COORDINATORAPP coord = new COORDINATORAPP();
    String coordName = getEntityName();
    coord.setName(coordName);
    Date endDate = feedCluster.getValidity().getEnd();
    coord.setEnd(SchemaHelper.formatDateUTC(endDate));
    if (feedCluster.getValidity().getEnd().before(new Date())) {
      Date startDate = DateUtils.addMinutes(endDate, -1);
      coord.setStart(SchemaHelper.formatDateUTC(startDate));
    } else {
      coord.setStart(SchemaHelper.formatDateUTC(new Date()));
    }
    coord.setTimezone(entity.getTimezone().getID());
    TimeUnit timeUnit = entity.getFrequency().getTimeUnit();
    if (timeUnit == TimeUnit.hours || timeUnit == TimeUnit.minutes) {
      coord.setFrequency("${coord:hours(6)}");
    } else {
      coord.setFrequency("${coord:days(1)}");
    }

    Path coordPath = getBuildPath(buildPath);
    Properties props = createCoordDefaultConfiguration(coordName);

    WORKFLOW workflow = new WORKFLOW();
    Properties wfProps =
        OozieOrchestrationWorkflowBuilder.get(entity, cluster, Tag.RETENTION)
            .build(cluster, coordPath);
    workflow.setAppPath(getStoragePath(wfProps.getProperty(OozieEntityBuilder.ENTITY_PATH)));
    props.putAll(getProperties(coordPath, coordName));
    // Add the custom properties set in feed. Else, dryrun won't catch any missing props.
    props.putAll(EntityUtil.getEntityProperties(entity));
    workflow.setConfiguration(getConfig(props));
    ACTION action = new ACTION();
    action.setWorkflow(workflow);

    coord.setAction(action);

    Path marshalPath = marshal(cluster, coord, coordPath);
    return Arrays.asList(getProperties(marshalPath, coordName));
  }
  private static void bindClusterProperties(
      final Cluster cluster, final Properties extensionProperties) {
    String clusterName =
        extensionProperties.getProperty(ExtensionProperties.CLUSTER_NAME.getName());
    if (StringUtils.isNotEmpty(clusterName)) {
      cluster.setName(clusterName);
    }
    String clusterStartValidity =
        extensionProperties.getProperty(ExtensionProperties.VALIDITY_START.getName());
    if (StringUtils.isNotEmpty(clusterStartValidity)) {
      cluster.getValidity().setStart(SchemaHelper.parseDateUTC(clusterStartValidity));
    }

    String clusterEndValidity =
        extensionProperties.getProperty(ExtensionProperties.VALIDITY_END.getName());
    if (StringUtils.isNotEmpty(clusterEndValidity)) {
      cluster.getValidity().setEnd(SchemaHelper.parseDateUTC(clusterEndValidity));
    }
  }
示例#6
0
 private Date validateTime(String time) throws FalconCLIException {
   if (time != null && !time.isEmpty()) {
     try {
       return SchemaHelper.parseDateUTC(time);
     } catch (Exception e) {
       throw new FalconCLIException("Time " + time + " is not valid", e);
     }
   }
   return null;
 }
  @Override
  public String update(Entity oldEntity, Entity newEntity, String cluster, Boolean skipDryRun)
      throws FalconException {
    org.apache.falcon.entity.v0.cluster.Cluster clusterEntity =
        ConfigurationStore.get().get(EntityType.CLUSTER, cluster);
    boolean entityUpdated =
        UpdateHelper.isEntityUpdated(
            oldEntity,
            newEntity,
            cluster,
            EntityUtil.getLatestStagingPath(clusterEntity, oldEntity));
    StringBuilder result = new StringBuilder();
    if (!entityUpdated) {
      // Ideally should throw an exception, but, keeping it backward-compatible.
      LOG.warn(
          "No relevant updates detected in the new entity definition for entity {}!",
          newEntity.getName());
      return result.toString();
    }

    Date oldEndTime = EntityUtil.getEndTime(oldEntity, cluster);
    Date newEndTime = EntityUtil.getEndTime(newEntity, cluster);
    if (newEndTime.before(DateUtil.now()) || newEndTime.before(oldEndTime)) {
      throw new FalconException(
          "New Entity's end time "
              + SchemaHelper.formatDateUTC(newEndTime)
              + " is before current time or before old end time. Entity can't be updated.");
    }

    // The steps required are the same as touch.
    DAGEngineFactory.getDAGEngine(cluster)
        .touch(newEntity, (skipDryRun == null) ? Boolean.FALSE : skipDryRun);
    // Additionally, update the executor.
    // The update will kick in for new instances created and not for READY/WAITING instances, as
    // with Oozie.
    Collection<InstanceState> instances = new ArrayList<>();
    instances.add(STATE_STORE.getLastExecutionInstance(oldEntity, cluster));
    EXECUTION_SERVICE.getEntityExecutor(oldEntity, cluster).update(newEntity);

    result
        .append(newEntity.toShortString())
        .append("/Effective Time: ")
        .append(getEffectiveTime(newEntity, cluster, instances));
    return result.toString();
  }
  private static String getFileSystemFeedInstanceName(
      String feedInstancePath, Feed feed, Cluster cluster, String nominalTime)
      throws FalconException {
    Storage rawStorage = FeedHelper.createStorage(cluster, feed);
    String feedPathTemplate = rawStorage.getUriTemplate(LocationType.DATA);
    String instance = feedInstancePath;

    String[] elements = FeedDataPath.PATTERN.split(feedPathTemplate);
    for (String element : elements) {
      instance = instance.replaceFirst(element, "");
    }

    return StringUtils.isEmpty(instance)
        ? feed.getName() + "/" + nominalTime
        : feed.getName()
            + "/"
            + SchemaHelper.formatDateUTCToISO8601(instance, FEED_INSTANCE_FORMAT);
  }
  private InstancesResult doJobAction(
      JobAction action,
      Entity entity,
      Date start,
      Date end,
      Properties props,
      List<LifeCycle> lifeCycles,
      boolean isForced)
      throws FalconException {
    Set<String> clusters = EntityUtil.getClustersDefinedInColos(entity);
    List<String> clusterList = getIncludedClusters(props, FALCON_INSTANCE_ACTION_CLUSTERS);
    APIResult.Status overallStatus = APIResult.Status.SUCCEEDED;
    int instanceCount = 0;

    Collection<InstanceState.STATE> states;
    switch (action) {
      case KILL:
      case SUSPEND:
        states = InstanceState.getActiveStates();
        break;
      case RESUME:
        states = new ArrayList<>();
        states.add(InstanceState.STATE.SUSPENDED);
        break;
      case PARAMS:
        // Applicable only for running and finished jobs.
        states = InstanceState.getRunningStates();
        states.addAll(InstanceState.getTerminalStates());
        states.add(InstanceState.STATE.SUSPENDED);
        break;
      case STATUS:
        states = InstanceState.getActiveStates();
        states.addAll(InstanceState.getTerminalStates());
        states.add(InstanceState.STATE.SUSPENDED);
        break;
      case RERUN:
        // Applicable only for terminated States
        states = InstanceState.getTerminalStates();
        break;
      default:
        throw new IllegalArgumentException("Unhandled action " + action);
    }

    List<ExecutionInstance> instancesToActOn = new ArrayList<>();
    for (String cluster : clusters) {
      if (clusterList.size() != 0 && !clusterList.contains(cluster)) {
        continue;
      }
      LOG.debug("Retrieving instances for cluster : {} for action {}", cluster, action);
      Collection<InstanceState> instances =
          STATE_STORE.getExecutionInstances(
              entity, cluster, states, new DateTime(start), new DateTime(end));
      for (InstanceState state : instances) {
        instancesToActOn.add(state.getInstance());
      }
    }

    // To ensure compatibility with OozieWorkflowEngine.
    // Also because users would like to see the most recent instances first.
    sortInstancesDescBySequence(instancesToActOn);

    List<InstancesResult.Instance> instances = new ArrayList<>();
    for (ExecutionInstance ins : instancesToActOn) {
      instanceCount++;
      String instanceTimeStr = SchemaHelper.formatDateUTC(ins.getInstanceTime().toDate());

      InstancesResult.Instance instance = null;
      try {
        instance = performAction(ins.getCluster(), entity, action, ins, props, isForced);
        instance.instance = instanceTimeStr;
      } catch (FalconException e) {
        LOG.warn("Unable to perform action {} on cluster", action, e);
        instance = new InstancesResult.Instance(ins.getCluster(), instanceTimeStr, null);
        instance.status = InstancesResult.WorkflowStatus.ERROR;
        instance.details = e.getMessage();
        overallStatus = APIResult.Status.PARTIAL;
      }
      instances.add(instance);
    }
    if (instanceCount < 2 && overallStatus == APIResult.Status.PARTIAL) {
      overallStatus = APIResult.Status.FAILED;
    }
    InstancesResult instancesResult = new InstancesResult(overallStatus, action.name());
    instancesResult.setInstances(instances.toArray(new InstancesResult.Instance[instances.size()]));
    return instancesResult;
  }