@Override
 public String touch(Entity entity, String cluster, Boolean skipDryRun) throws FalconException {
   EntityID id = new EntityID(entity);
   // Ideally state store should have all entities, but, check anyway.
   if (STATE_STORE.entityExists(id)) {
     Date endTime = EntityUtil.getEndTime(entity, cluster);
     if (endTime.before(DateUtil.now())) {
       throw new FalconException(
           "Entity's end time "
               + SchemaHelper.formatDateUTC(endTime)
               + " is before current time. Entity can't be touch-ed as it has completed.");
     }
     Collection<InstanceState> instances =
         STATE_STORE.getExecutionInstances(entity, cluster, InstanceState.getRunningStates());
     // touch should happen irrespective of the state the entity is in.
     DAGEngineFactory.getDAGEngine(cluster)
         .touch(entity, (skipDryRun == null) ? Boolean.FALSE : skipDryRun);
     StringBuilder builder = new StringBuilder();
     builder
         .append(entity.toShortString())
         .append("/Effective Time: ")
         .append(getEffectiveTime(entity, cluster, instances));
     return builder.toString();
   }
   throw new FalconException("Could not find entity " + id + " in state store.");
 }
 @Override
 public boolean isActive(Entity entity) throws FalconException {
   EntityID id = new EntityID(entity);
   // Ideally state store should have all entities, but, check anyway.
   if (STATE_STORE.entityExists(id)) {
     return STATE_STORE.getEntity(id).getCurrentState() != EntityState.STATE.SUBMITTED;
   }
   return false;
 }
  @Test
  public void testInstanceInsertionAndUpdate() throws Exception {
    storeEntity(EntityType.CLUSTER, "testCluster");
    storeEntity(EntityType.FEED, "clicksFeed");
    storeEntity(EntityType.FEED, "clicksSummary");
    EntityState entityState = getEntityState(EntityType.PROCESS, "process");
    ExecutionInstance executionInstance =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            System.currentTimeMillis(),
            "cluster",
            System.currentTimeMillis());
    InstanceState instanceState = new InstanceState(executionInstance);
    initInstanceState(instanceState);
    stateStore.putExecutionInstance(instanceState);
    InstanceID instanceID = new InstanceID(instanceState.getInstance());
    InstanceState actualInstanceState = stateStore.getExecutionInstance(instanceID);
    Assert.assertEquals(actualInstanceState, instanceState);

    instanceState.setCurrentState(InstanceState.STATE.RUNNING);
    Predicate predicate = new Predicate(Predicate.TYPE.DATA);
    instanceState.getInstance().getAwaitingPredicates().add(predicate);

    stateStore.updateExecutionInstance(instanceState);
    actualInstanceState = stateStore.getExecutionInstance(instanceID);
    Assert.assertEquals(actualInstanceState, instanceState);

    try {
      stateStore.putExecutionInstance(instanceState);
      Assert.fail("Exception must have been thrown");
    } catch (StateStoreException e) {
      // no op
    }

    stateStore.deleteExecutionInstance(instanceID);

    try {
      stateStore.getExecutionInstance(instanceID);
      Assert.fail("Exception must have been thrown");
    } catch (StateStoreException e) {
      // no op
    }

    try {
      stateStore.deleteExecutionInstance(instanceID);
      Assert.fail("Exception must have been thrown");
    } catch (StateStoreException e) {
      // no op
    }

    try {
      stateStore.updateExecutionInstance(instanceState);
      Assert.fail("Exception must have been thrown");
    } catch (StateStoreException e) {
      // no op
    }
  }
  @Test
  public void testGetExecutionSummaryWithRange() throws Exception {
    storeEntity(EntityType.CLUSTER, "testCluster");
    storeEntity(EntityType.FEED, "clicksFeed");
    storeEntity(EntityType.FEED, "clicksSummary");

    long instance1Time = System.currentTimeMillis() - 180000;
    long instance2Time = System.currentTimeMillis();
    EntityState entityState = getEntityState(EntityType.PROCESS, "clicksProcess");
    ExecutionInstance processExecutionInstance1 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            instance1Time,
            "cluster1",
            instance1Time);
    InstanceState instanceState1 = new InstanceState(processExecutionInstance1);
    instanceState1.setCurrentState(InstanceState.STATE.RUNNING);

    ExecutionInstance processExecutionInstance2 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            instance2Time,
            "cluster1",
            instance2Time);
    InstanceState instanceState2 = new InstanceState(processExecutionInstance2);
    instanceState2.setCurrentState(InstanceState.STATE.SUCCEEDED);

    stateStore.putExecutionInstance(instanceState1);
    stateStore.putExecutionInstance(instanceState2);

    Map<InstanceState.STATE, Long> summary =
        stateStore.getExecutionInstanceSummary(
            entityState.getEntity(),
            "cluster1",
            new DateTime(instance1Time),
            new DateTime(instance1Time + 60000));
    Assert.assertEquals(summary.size(), 1);
    Assert.assertEquals(summary.get(InstanceState.STATE.RUNNING).longValue(), 1L);

    summary =
        stateStore.getExecutionInstanceSummary(
            entityState.getEntity(),
            "cluster1",
            new DateTime(instance2Time),
            new DateTime(instance2Time + 60000));
    Assert.assertEquals(summary.size(), 1);
    Assert.assertEquals(summary.get(InstanceState.STATE.SUCCEEDED).longValue(), 1L);
  }
 @Override
 public boolean isSuspended(Entity entity) throws FalconException {
   return STATE_STORE
       .getEntity(new EntityID(entity))
       .getCurrentState()
       .equals(EntityState.STATE.SUSPENDED);
 }
  @Override
  public InstancesSummaryResult getSummary(
      Entity entity, Date start, Date end, List<LifeCycle> lifeCycles) throws FalconException {
    Set<String> clusters = EntityUtil.getClustersDefinedInColos(entity);
    List<InstancesSummaryResult.InstanceSummary> instanceSummaries = new ArrayList<>();

    // Iterate over entity clusters
    for (String cluster : clusters) {
      LOG.debug("Retrieving summary of instances for cluster : {}", cluster);
      Map<InstanceState.STATE, Long> summaries =
          STATE_STORE.getExecutionInstanceSummary(
              entity, cluster, new DateTime(start), new DateTime(end));
      Map<String, Long> summaryMap = new HashMap<>();
      // Iterate over the map and convert STATE to String
      for (Map.Entry<InstanceState.STATE, Long> summary : summaries.entrySet()) {
        summaryMap.put(summary.getKey().name(), summary.getValue());
      }
      instanceSummaries.add(new InstancesSummaryResult.InstanceSummary(cluster, summaryMap));
    }

    InstancesSummaryResult instancesSummaryResult =
        new InstancesSummaryResult(APIResult.Status.SUCCEEDED, JobAction.SUMMARY.name());
    instancesSummaryResult.setInstancesSummary(
        instanceSummaries.toArray(
            new InstancesSummaryResult.InstanceSummary[instanceSummaries.size()]));
    return instancesSummaryResult;
  }
 // Populates the InstancesResult.Instance instance using ExecutionInstance
 private void populateInstanceInfo(
     InstancesResult.Instance instanceInfo, ExecutionInstance instance)
     throws StateStoreException {
   instanceInfo.cluster = instance.getCluster();
   InstanceState.STATE state =
       STATE_STORE.getExecutionInstance(instance.getId()).getCurrentState();
   switch (state) {
     case SUCCEEDED:
       instanceInfo.status = InstancesResult.WorkflowStatus.SUCCEEDED;
       break;
     case FAILED:
       instanceInfo.status = InstancesResult.WorkflowStatus.FAILED;
       break;
     case KILLED:
       instanceInfo.status = InstancesResult.WorkflowStatus.KILLED;
       break;
     case READY:
       instanceInfo.status = InstancesResult.WorkflowStatus.READY;
       break;
     case WAITING:
       instanceInfo.status = InstancesResult.WorkflowStatus.WAITING;
       break;
     case SUSPENDED:
       instanceInfo.status = InstancesResult.WorkflowStatus.SUSPENDED;
       break;
     case RUNNING:
       instanceInfo.status = InstancesResult.WorkflowStatus.RUNNING;
       break;
     default:
       instanceInfo.status = InstancesResult.WorkflowStatus.UNDEFINED;
       break;
   }
   // Mask wfParams by default
   instanceInfo.wfParams = null;
 }
  @Test
  public void testInsertRetrieveAndUpdate() throws Exception {
    EntityState entityState = getEntityState(EntityType.PROCESS, "process");
    stateStore.putEntity(entityState);
    EntityID entityID = new EntityID(entityState.getEntity());
    EntityState actualEntityState = stateStore.getEntity(entityID);
    Assert.assertEquals(actualEntityState.getEntity(), entityState.getEntity());
    Assert.assertEquals(actualEntityState.getCurrentState(), entityState.getCurrentState());
    try {
      stateStore.putEntity(entityState);
      Assert.fail("Exception must have been thrown");
    } catch (StateStoreException e) {
      // no op
    }

    entityState.setCurrentState(EntityState.STATE.SCHEDULED);
    stateStore.updateEntity(entityState);
    actualEntityState = stateStore.getEntity(entityID);
    Assert.assertEquals(actualEntityState.getEntity(), entityState.getEntity());
    Assert.assertEquals(actualEntityState.getCurrentState(), entityState.getCurrentState());

    stateStore.deleteEntity(entityID);
    boolean entityExists = stateStore.entityExists(entityID);
    Assert.assertEquals(entityExists, false);

    try {
      stateStore.getEntity(entityID);
      Assert.fail("Exception must have been thrown");
    } catch (StateStoreException e) {
      // no op
    }

    try {
      stateStore.updateEntity(entityState);
      Assert.fail("Exception must have been thrown");
    } catch (StateStoreException e) {
      // no op
    }

    try {
      stateStore.deleteEntity(entityID);
      Assert.fail("Exception must have been thrown");
    } catch (StateStoreException e) {
      // no op
    }
  }
 @Override
 public void reRun(String cluster, String jobId, Properties props, boolean isForced)
     throws FalconException {
   InstanceState instanceState = STATE_STORE.getExecutionInstance(jobId);
   ExecutionInstance instance = instanceState.getInstance();
   EntityExecutor executor = EXECUTION_SERVICE.getEntityExecutor(instance.getEntity(), cluster);
   executor.rerun(instance, props, isForced);
 }
Example #10
0
 @AfterTest
 public void cleanUpTables() throws StateStoreException {
   try {
     stateStore.deleteEntities();
   } catch (Exception e) {
     // ignore
   }
 }
Example #11
0
  @Test
  public void testGetEntities() throws Exception {
    EntityState entityState1 = getEntityState(EntityType.PROCESS, "process1");
    EntityState entityState2 = getEntityState(EntityType.PROCESS, "process2");
    EntityState entityState3 = getEntityState(EntityType.FEED, "feed1");

    Collection<EntityState> result = stateStore.getAllEntities();
    Assert.assertEquals(result.size(), 0);

    stateStore.putEntity(entityState1);
    stateStore.putEntity(entityState2);
    stateStore.putEntity(entityState3);

    result = stateStore.getAllEntities();
    Assert.assertEquals(result.size(), 3);

    Collection<Entity> entities = stateStore.getEntities(EntityState.STATE.SUBMITTED);
    Assert.assertEquals(entities.size(), 3);
  }
Example #12
0
  @Test
  public void testCascadingDelete() throws Exception {
    storeEntity(EntityType.CLUSTER, "testCluster");
    storeEntity(EntityType.FEED, "clicksFeed");
    storeEntity(EntityType.FEED, "clicksSummary");
    EntityState entityState = getEntityState(EntityType.PROCESS, "process1");
    stateStore.putEntity(entityState);
    ExecutionInstance processExecutionInstance1 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            System.currentTimeMillis() - 60000,
            "cluster1",
            System.currentTimeMillis() - 60000);
    InstanceState instanceState1 = new InstanceState(processExecutionInstance1);
    instanceState1.setCurrentState(InstanceState.STATE.READY);

    ExecutionInstance processExecutionInstance2 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            System.currentTimeMillis(),
            "cluster1",
            System.currentTimeMillis());
    InstanceState instanceState2 = new InstanceState(processExecutionInstance2);
    instanceState2.setCurrentState(InstanceState.STATE.RUNNING);

    stateStore.putExecutionInstance(instanceState1);
    stateStore.putExecutionInstance(instanceState2);

    Collection<InstanceState> instances =
        stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1");
    Assert.assertEquals(instances.size(), 2);

    stateStore.deleteEntity(new EntityID(entityState.getEntity()));
    deleteEntity(EntityType.PROCESS, "process1");

    instances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1");
    Assert.assertEquals(instances.size(), 0);
  }
Example #13
0
  @Test
  public void testGetInstanceFromExternalID() throws Exception {
    storeEntity(EntityType.CLUSTER, "testCluster");
    storeEntity(EntityType.FEED, "clicksFeed");
    storeEntity(EntityType.FEED, "clicksSummary");

    long instance1Time = System.currentTimeMillis() - 180000;
    long instance2Time = System.currentTimeMillis();
    EntityState entityState = getEntityState(EntityType.PROCESS, "processext");
    ExecutionInstance processExecutionInstance1 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            instance1Time,
            "cluster1",
            instance1Time);
    processExecutionInstance1.setExternalID("external_id_1");
    InstanceState instanceState1 = new InstanceState(processExecutionInstance1);
    instanceState1.setCurrentState(InstanceState.STATE.RUNNING);

    ExecutionInstance processExecutionInstance2 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            instance2Time,
            "cluster1",
            instance2Time);
    processExecutionInstance2.setExternalID("external_id_2");
    InstanceState instanceState2 = new InstanceState(processExecutionInstance2);
    instanceState2.setCurrentState(InstanceState.STATE.RUNNING);

    stateStore.putExecutionInstance(instanceState1);
    stateStore.putExecutionInstance(instanceState2);

    InstanceState actualInstanceState = stateStore.getExecutionInstance("external_id_1");
    Assert.assertEquals(actualInstanceState.getInstance(), processExecutionInstance1);

    actualInstanceState = stateStore.getExecutionInstance("external_id_2");
    Assert.assertEquals(actualInstanceState.getInstance(), processExecutionInstance2);
  }
  @Override
  public String update(Entity oldEntity, Entity newEntity, String cluster, Boolean skipDryRun)
      throws FalconException {
    org.apache.falcon.entity.v0.cluster.Cluster clusterEntity =
        ConfigurationStore.get().get(EntityType.CLUSTER, cluster);
    boolean entityUpdated =
        UpdateHelper.isEntityUpdated(
            oldEntity,
            newEntity,
            cluster,
            EntityUtil.getLatestStagingPath(clusterEntity, oldEntity));
    StringBuilder result = new StringBuilder();
    if (!entityUpdated) {
      // Ideally should throw an exception, but, keeping it backward-compatible.
      LOG.warn(
          "No relevant updates detected in the new entity definition for entity {}!",
          newEntity.getName());
      return result.toString();
    }

    Date oldEndTime = EntityUtil.getEndTime(oldEntity, cluster);
    Date newEndTime = EntityUtil.getEndTime(newEntity, cluster);
    if (newEndTime.before(DateUtil.now()) || newEndTime.before(oldEndTime)) {
      throw new FalconException(
          "New Entity's end time "
              + SchemaHelper.formatDateUTC(newEndTime)
              + " is before current time or before old end time. Entity can't be updated.");
    }

    // The steps required are the same as touch.
    DAGEngineFactory.getDAGEngine(cluster)
        .touch(newEntity, (skipDryRun == null) ? Boolean.FALSE : skipDryRun);
    // Additionally, update the executor.
    // The update will kick in for new instances created and not for READY/WAITING instances, as
    // with Oozie.
    Collection<InstanceState> instances = new ArrayList<>();
    instances.add(STATE_STORE.getLastExecutionInstance(oldEntity, cluster));
    EXECUTION_SERVICE.getEntityExecutor(oldEntity, cluster).update(newEntity);

    result
        .append(newEntity.toShortString())
        .append("/Effective Time: ")
        .append(getEffectiveTime(newEntity, cluster, instances));
    return result.toString();
  }
  @Override
  public InstancesResult getRunningInstances(Entity entity, List<LifeCycle> lifeCycles)
      throws FalconException {
    Set<String> clusters = EntityUtil.getClustersDefinedInColos(entity);
    List<InstancesResult.Instance> runInstances = new ArrayList<>();

    for (String cluster : clusters) {
      Collection<InstanceState> instances =
          STATE_STORE.getExecutionInstances(entity, cluster, InstanceState.getRunningStates());
      for (InstanceState state : instances) {
        String instanceTimeStr = state.getInstance().getInstanceTime().toString();
        InstancesResult.Instance instance =
            new InstancesResult.Instance(
                cluster, instanceTimeStr, InstancesResult.WorkflowStatus.RUNNING);
        instance.startTime = state.getInstance().getActualStart().toDate();
        runInstances.add(instance);
      }
    }
    InstancesResult result = new InstancesResult(APIResult.Status.SUCCEEDED, "Running Instances");
    result.setInstances(runInstances.toArray(new InstancesResult.Instance[runInstances.size()]));
    return result;
  }
Example #16
0
  @Test
  public void testBulkInstanceOperations() throws Exception {
    storeEntity(EntityType.CLUSTER, "testCluster");
    storeEntity(EntityType.FEED, "clicksFeed");
    storeEntity(EntityType.FEED, "clicksSummary");
    EntityState entityState = getEntityState(EntityType.PROCESS, "process1");
    ExecutionInstance processExecutionInstance1 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            System.currentTimeMillis() - 60000,
            "cluster1",
            System.currentTimeMillis() - 60000);
    InstanceState instanceState1 = new InstanceState(processExecutionInstance1);
    instanceState1.setCurrentState(InstanceState.STATE.READY);

    ExecutionInstance processExecutionInstance2 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            System.currentTimeMillis(),
            "cluster1",
            System.currentTimeMillis());
    InstanceState instanceState2 = new InstanceState(processExecutionInstance2);
    instanceState2.setCurrentState(InstanceState.STATE.RUNNING);

    ExecutionInstance processExecutionInstance3 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            System.currentTimeMillis(),
            "cluster2",
            System.currentTimeMillis());
    InstanceState instanceState3 = new InstanceState(processExecutionInstance3);
    instanceState3.setCurrentState(InstanceState.STATE.READY);

    stateStore.putExecutionInstance(instanceState1);
    stateStore.putExecutionInstance(instanceState2);
    stateStore.putExecutionInstance(instanceState3);

    Collection<InstanceState> actualInstances =
        stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1");
    Assert.assertEquals(actualInstances.size(), 2);
    Assert.assertEquals(actualInstances.toArray()[0], instanceState1);
    Assert.assertEquals(actualInstances.toArray()[1], instanceState2);

    actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster2");
    Assert.assertEquals(actualInstances.size(), 1);
    Assert.assertEquals(actualInstances.toArray()[0], instanceState3);

    List<InstanceState.STATE> states = new ArrayList<>();
    states.add(InstanceState.STATE.READY);

    actualInstances = stateStore.getExecutionInstances(entityState.getEntity(), "cluster1", states);
    Assert.assertEquals(actualInstances.size(), 1);
    Assert.assertEquals(actualInstances.toArray()[0], instanceState1);

    EntityClusterID entityClusterID = new EntityClusterID(entityState.getEntity(), "testCluster");
    actualInstances = stateStore.getExecutionInstances(entityClusterID, states);
    Assert.assertEquals(actualInstances.size(), 2);
    Assert.assertEquals(actualInstances.toArray()[0], instanceState1);
    Assert.assertEquals(actualInstances.toArray()[1], instanceState3);

    states.add(InstanceState.STATE.RUNNING);
    actualInstances = stateStore.getExecutionInstances(entityState.getEntity(), "cluster1", states);
    Assert.assertEquals(actualInstances.size(), 2);
    Assert.assertEquals(actualInstances.toArray()[0], instanceState1);
    Assert.assertEquals(actualInstances.toArray()[1], instanceState2);

    InstanceState lastInstanceState =
        stateStore.getLastExecutionInstance(entityState.getEntity(), "cluster1");
    Assert.assertEquals(lastInstanceState, instanceState2);

    InstanceID instanceKey = new InstanceID(instanceState3.getInstance());
    stateStore.deleteExecutionInstance(instanceKey);

    actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster2");
    Assert.assertEquals(actualInstances.size(), 0);

    actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1");
    Assert.assertEquals(actualInstances.size(), 2);

    stateStore.putExecutionInstance(instanceState3);

    actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster2");
    Assert.assertEquals(actualInstances.size(), 1);

    stateStore.deleteExecutionInstances(entityClusterID.getEntityID());
    actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster1");
    Assert.assertEquals(actualInstances.size(), 0);

    actualInstances = stateStore.getAllExecutionInstances(entityState.getEntity(), "cluster2");
    Assert.assertEquals(actualInstances.size(), 0);
  }
  private InstancesResult doJobAction(
      JobAction action,
      Entity entity,
      Date start,
      Date end,
      Properties props,
      List<LifeCycle> lifeCycles,
      boolean isForced)
      throws FalconException {
    Set<String> clusters = EntityUtil.getClustersDefinedInColos(entity);
    List<String> clusterList = getIncludedClusters(props, FALCON_INSTANCE_ACTION_CLUSTERS);
    APIResult.Status overallStatus = APIResult.Status.SUCCEEDED;
    int instanceCount = 0;

    Collection<InstanceState.STATE> states;
    switch (action) {
      case KILL:
      case SUSPEND:
        states = InstanceState.getActiveStates();
        break;
      case RESUME:
        states = new ArrayList<>();
        states.add(InstanceState.STATE.SUSPENDED);
        break;
      case PARAMS:
        // Applicable only for running and finished jobs.
        states = InstanceState.getRunningStates();
        states.addAll(InstanceState.getTerminalStates());
        states.add(InstanceState.STATE.SUSPENDED);
        break;
      case STATUS:
        states = InstanceState.getActiveStates();
        states.addAll(InstanceState.getTerminalStates());
        states.add(InstanceState.STATE.SUSPENDED);
        break;
      case RERUN:
        // Applicable only for terminated States
        states = InstanceState.getTerminalStates();
        break;
      default:
        throw new IllegalArgumentException("Unhandled action " + action);
    }

    List<ExecutionInstance> instancesToActOn = new ArrayList<>();
    for (String cluster : clusters) {
      if (clusterList.size() != 0 && !clusterList.contains(cluster)) {
        continue;
      }
      LOG.debug("Retrieving instances for cluster : {} for action {}", cluster, action);
      Collection<InstanceState> instances =
          STATE_STORE.getExecutionInstances(
              entity, cluster, states, new DateTime(start), new DateTime(end));
      for (InstanceState state : instances) {
        instancesToActOn.add(state.getInstance());
      }
    }

    // To ensure compatibility with OozieWorkflowEngine.
    // Also because users would like to see the most recent instances first.
    sortInstancesDescBySequence(instancesToActOn);

    List<InstancesResult.Instance> instances = new ArrayList<>();
    for (ExecutionInstance ins : instancesToActOn) {
      instanceCount++;
      String instanceTimeStr = SchemaHelper.formatDateUTC(ins.getInstanceTime().toDate());

      InstancesResult.Instance instance = null;
      try {
        instance = performAction(ins.getCluster(), entity, action, ins, props, isForced);
        instance.instance = instanceTimeStr;
      } catch (FalconException e) {
        LOG.warn("Unable to perform action {} on cluster", action, e);
        instance = new InstancesResult.Instance(ins.getCluster(), instanceTimeStr, null);
        instance.status = InstancesResult.WorkflowStatus.ERROR;
        instance.details = e.getMessage();
        overallStatus = APIResult.Status.PARTIAL;
      }
      instances.add(instance);
    }
    if (instanceCount < 2 && overallStatus == APIResult.Status.PARTIAL) {
      overallStatus = APIResult.Status.FAILED;
    }
    InstancesResult instancesResult = new InstancesResult(overallStatus, action.name());
    instancesResult.setInstances(instances.toArray(new InstancesResult.Instance[instances.size()]));
    return instancesResult;
  }
 @Override
 public boolean isCompleted(Entity entity) throws FalconException {
   return STATE_STORE.isEntityCompleted(new EntityID(entity));
 }
Example #19
0
  @Test
  public void testGetExecutionInstancesWithRange() throws Exception {
    storeEntity(EntityType.CLUSTER, "testCluster");
    storeEntity(EntityType.FEED, "clicksFeed");
    storeEntity(EntityType.FEED, "clicksSummary");

    long instance1Time = System.currentTimeMillis() - 180000;
    long instance2Time = System.currentTimeMillis();
    EntityState entityState = getEntityState(EntityType.PROCESS, "process1");
    ExecutionInstance processExecutionInstance1 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            instance1Time,
            "cluster1",
            instance1Time);
    InstanceState instanceState1 = new InstanceState(processExecutionInstance1);
    instanceState1.setCurrentState(InstanceState.STATE.RUNNING);

    ExecutionInstance processExecutionInstance2 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            instance2Time,
            "cluster1",
            instance2Time);
    InstanceState instanceState2 = new InstanceState(processExecutionInstance2);
    instanceState2.setCurrentState(InstanceState.STATE.RUNNING);

    ExecutionInstance processExecutionInstance3 =
        BeanMapperUtil.getExecutionInstance(
            entityState.getEntity().getEntityType(),
            entityState.getEntity(),
            instance2Time,
            "cluster2",
            instance2Time);
    InstanceState instanceState3 = new InstanceState(processExecutionInstance3);
    instanceState3.setCurrentState(InstanceState.STATE.RUNNING);

    stateStore.putExecutionInstance(instanceState1);
    stateStore.putExecutionInstance(instanceState2);
    stateStore.putExecutionInstance(instanceState3);

    List<InstanceState.STATE> states = new ArrayList<>();
    states.add(InstanceState.STATE.RUNNING);

    Collection<InstanceState> actualInstances =
        stateStore.getExecutionInstances(
            entityState.getEntity(),
            "cluster1",
            states,
            new DateTime(instance1Time),
            new DateTime(instance1Time + 60000));
    Assert.assertEquals(actualInstances.size(), 1);
    Assert.assertEquals(actualInstances.toArray()[0], instanceState1);

    actualInstances =
        stateStore.getExecutionInstances(
            entityState.getEntity(),
            "cluster1",
            states,
            new DateTime(instance2Time),
            new DateTime(instance2Time + 60000));
    Assert.assertEquals(actualInstances.size(), 1);
    Assert.assertEquals(actualInstances.toArray()[0], instanceState2);

    // Ensure we can get instances for a different cluster
    actualInstances =
        stateStore.getExecutionInstances(
            entityState.getEntity(),
            "cluster2",
            states,
            new DateTime(instance2Time),
            new DateTime(instance2Time + 60000));
    Assert.assertEquals(actualInstances.size(), 1);
    Assert.assertEquals(actualInstances.toArray()[0], instanceState3);
  }