@Override
 public String touch(Entity entity, String cluster, Boolean skipDryRun) throws FalconException {
   EntityID id = new EntityID(entity);
   // Ideally state store should have all entities, but, check anyway.
   if (STATE_STORE.entityExists(id)) {
     Date endTime = EntityUtil.getEndTime(entity, cluster);
     if (endTime.before(DateUtil.now())) {
       throw new FalconException(
           "Entity's end time "
               + SchemaHelper.formatDateUTC(endTime)
               + " is before current time. Entity can't be touch-ed as it has completed.");
     }
     Collection<InstanceState> instances =
         STATE_STORE.getExecutionInstances(entity, cluster, InstanceState.getRunningStates());
     // touch should happen irrespective of the state the entity is in.
     DAGEngineFactory.getDAGEngine(cluster)
         .touch(entity, (skipDryRun == null) ? Boolean.FALSE : skipDryRun);
     StringBuilder builder = new StringBuilder();
     builder
         .append(entity.toShortString())
         .append("/Effective Time: ")
         .append(getEffectiveTime(entity, cluster, instances));
     return builder.toString();
   }
   throw new FalconException("Could not find entity " + id + " in state store.");
 }
 @Override
 public InstancesResult getJobDetails(String cluster, String jobId) throws FalconException {
   InstancesResult.Instance[] instances = new InstancesResult.Instance[1];
   InstancesResult result =
       new InstancesResult(APIResult.Status.SUCCEEDED, "Instance for workflow id:" + jobId);
   instances[0] = DAGEngineFactory.getDAGEngine(cluster).info(jobId);
   result.setInstances(instances);
   return result;
 }
  @Override
  public String update(Entity oldEntity, Entity newEntity, String cluster, Boolean skipDryRun)
      throws FalconException {
    org.apache.falcon.entity.v0.cluster.Cluster clusterEntity =
        ConfigurationStore.get().get(EntityType.CLUSTER, cluster);
    boolean entityUpdated =
        UpdateHelper.isEntityUpdated(
            oldEntity,
            newEntity,
            cluster,
            EntityUtil.getLatestStagingPath(clusterEntity, oldEntity));
    StringBuilder result = new StringBuilder();
    if (!entityUpdated) {
      // Ideally should throw an exception, but, keeping it backward-compatible.
      LOG.warn(
          "No relevant updates detected in the new entity definition for entity {}!",
          newEntity.getName());
      return result.toString();
    }

    Date oldEndTime = EntityUtil.getEndTime(oldEntity, cluster);
    Date newEndTime = EntityUtil.getEndTime(newEntity, cluster);
    if (newEndTime.before(DateUtil.now()) || newEndTime.before(oldEndTime)) {
      throw new FalconException(
          "New Entity's end time "
              + SchemaHelper.formatDateUTC(newEndTime)
              + " is before current time or before old end time. Entity can't be updated.");
    }

    // The steps required are the same as touch.
    DAGEngineFactory.getDAGEngine(cluster)
        .touch(newEntity, (skipDryRun == null) ? Boolean.FALSE : skipDryRun);
    // Additionally, update the executor.
    // The update will kick in for new instances created and not for READY/WAITING instances, as
    // with Oozie.
    Collection<InstanceState> instances = new ArrayList<>();
    instances.add(STATE_STORE.getLastExecutionInstance(oldEntity, cluster));
    EXECUTION_SERVICE.getEntityExecutor(oldEntity, cluster).update(newEntity);

    result
        .append(newEntity.toShortString())
        .append("/Effective Time: ")
        .append(getEffectiveTime(newEntity, cluster, instances));
    return result.toString();
  }
 @Override
 public void dryRun(Entity entity, String clusterName, Boolean skipDryRun) throws FalconException {
   DAGEngineFactory.getDAGEngine(clusterName).submit(entity);
 }
 @Override
 public boolean isAlive(Cluster cluster) throws FalconException {
   return DAGEngineFactory.getDAGEngine(cluster).isAlive();
 }
 @Override
 public Properties getWorkflowProperties(String cluster, String jobId) throws FalconException {
   return DAGEngineFactory.getDAGEngine(cluster).getConfiguration(jobId);
 }
 @Override
 public String getWorkflowStatus(String cluster, String jobId) throws FalconException {
   return DAGEngineFactory.getDAGEngine(cluster).info(jobId).getStatus().name();
 }
 private InstancesResult.Instance performAction(
     String cluster,
     Entity entity,
     JobAction action,
     ExecutionInstance instance,
     Properties userProps,
     boolean isForced)
     throws FalconException {
   EntityExecutor executor = EXECUTION_SERVICE.getEntityExecutor(entity, cluster);
   InstancesResult.Instance instanceInfo = null;
   LOG.debug("Retrieving information for {} for action {}", instance.getId(), action);
   if (StringUtils.isNotEmpty(instance.getExternalID())) {
     instanceInfo = DAGEngineFactory.getDAGEngine(cluster).info(instance.getExternalID());
   } else {
     instanceInfo = new InstancesResult.Instance();
   }
   switch (action) {
     case KILL:
       executor.kill(instance);
       populateInstanceInfo(instanceInfo, instance);
       break;
     case SUSPEND:
       executor.suspend(instance);
       populateInstanceInfo(instanceInfo, instance);
       break;
     case RESUME:
       executor.resume(instance);
       populateInstanceInfo(instanceInfo, instance);
       break;
     case RERUN:
       executor.rerun(instance, userProps, isForced);
       populateInstanceInfo(instanceInfo, instance);
       break;
     case STATUS:
       // Mask wfParams
       instanceInfo.wfParams = null;
       if (StringUtils.isNotEmpty(instance.getExternalID())) {
         List<InstancesResult.InstanceAction> instanceActions =
             DAGEngineFactory.getDAGEngine(cluster).getJobDetails(instance.getExternalID());
         instanceInfo.actions =
             instanceActions.toArray(new InstancesResult.InstanceAction[instanceActions.size()]);
         // If not scheduled externally yet, get details from state
       } else {
         populateInstanceInfo(instanceInfo, instance);
       }
       break;
     case PARAMS:
       // Mask details, log
       instanceInfo.details = null;
       instanceInfo.logFile = null;
       Properties props =
           DAGEngineFactory.getDAGEngine(cluster).getConfiguration(instance.getExternalID());
       InstancesResult.KeyValuePair[] keyValuePairs =
           new InstancesResult.KeyValuePair[props.size()];
       int i = 0;
       for (String name : props.stringPropertyNames()) {
         keyValuePairs[i++] = new InstancesResult.KeyValuePair(name, props.getProperty(name));
       }
       instanceInfo.wfParams = keyValuePairs;
       break;
     default:
       throw new IllegalArgumentException("Unhandled action " + action);
   }
   return instanceInfo;
 }