// Populates the InstancesResult.Instance instance using ExecutionInstance private void populateInstanceInfo( InstancesResult.Instance instanceInfo, ExecutionInstance instance) throws StateStoreException { instanceInfo.cluster = instance.getCluster(); InstanceState.STATE state = STATE_STORE.getExecutionInstance(instance.getId()).getCurrentState(); switch (state) { case SUCCEEDED: instanceInfo.status = InstancesResult.WorkflowStatus.SUCCEEDED; break; case FAILED: instanceInfo.status = InstancesResult.WorkflowStatus.FAILED; break; case KILLED: instanceInfo.status = InstancesResult.WorkflowStatus.KILLED; break; case READY: instanceInfo.status = InstancesResult.WorkflowStatus.READY; break; case WAITING: instanceInfo.status = InstancesResult.WorkflowStatus.WAITING; break; case SUSPENDED: instanceInfo.status = InstancesResult.WorkflowStatus.SUSPENDED; break; case RUNNING: instanceInfo.status = InstancesResult.WorkflowStatus.RUNNING; break; default: instanceInfo.status = InstancesResult.WorkflowStatus.UNDEFINED; break; } // Mask wfParams by default instanceInfo.wfParams = null; }
@Override public void reRun(String cluster, String jobId, Properties props, boolean isForced) throws FalconException { InstanceState instanceState = STATE_STORE.getExecutionInstance(jobId); ExecutionInstance instance = instanceState.getInstance(); EntityExecutor executor = EXECUTION_SERVICE.getEntityExecutor(instance.getEntity(), cluster); executor.rerun(instance, props, isForced); }
@Test public void testGetInstanceFromExternalID() throws Exception { storeEntity(EntityType.CLUSTER, "testCluster"); storeEntity(EntityType.FEED, "clicksFeed"); storeEntity(EntityType.FEED, "clicksSummary"); long instance1Time = System.currentTimeMillis() - 180000; long instance2Time = System.currentTimeMillis(); EntityState entityState = getEntityState(EntityType.PROCESS, "processext"); ExecutionInstance processExecutionInstance1 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance1Time, "cluster1", instance1Time); processExecutionInstance1.setExternalID("external_id_1"); InstanceState instanceState1 = new InstanceState(processExecutionInstance1); instanceState1.setCurrentState(InstanceState.STATE.RUNNING); ExecutionInstance processExecutionInstance2 = BeanMapperUtil.getExecutionInstance( entityState.getEntity().getEntityType(), entityState.getEntity(), instance2Time, "cluster1", instance2Time); processExecutionInstance2.setExternalID("external_id_2"); InstanceState instanceState2 = new InstanceState(processExecutionInstance2); instanceState2.setCurrentState(InstanceState.STATE.RUNNING); stateStore.putExecutionInstance(instanceState1); stateStore.putExecutionInstance(instanceState2); InstanceState actualInstanceState = stateStore.getExecutionInstance("external_id_1"); Assert.assertEquals(actualInstanceState.getInstance(), processExecutionInstance1); actualInstanceState = stateStore.getExecutionInstance("external_id_2"); Assert.assertEquals(actualInstanceState.getInstance(), processExecutionInstance2); }
private InstancesResult.Instance performAction( String cluster, Entity entity, JobAction action, ExecutionInstance instance, Properties userProps, boolean isForced) throws FalconException { EntityExecutor executor = EXECUTION_SERVICE.getEntityExecutor(entity, cluster); InstancesResult.Instance instanceInfo = null; LOG.debug("Retrieving information for {} for action {}", instance.getId(), action); if (StringUtils.isNotEmpty(instance.getExternalID())) { instanceInfo = DAGEngineFactory.getDAGEngine(cluster).info(instance.getExternalID()); } else { instanceInfo = new InstancesResult.Instance(); } switch (action) { case KILL: executor.kill(instance); populateInstanceInfo(instanceInfo, instance); break; case SUSPEND: executor.suspend(instance); populateInstanceInfo(instanceInfo, instance); break; case RESUME: executor.resume(instance); populateInstanceInfo(instanceInfo, instance); break; case RERUN: executor.rerun(instance, userProps, isForced); populateInstanceInfo(instanceInfo, instance); break; case STATUS: // Mask wfParams instanceInfo.wfParams = null; if (StringUtils.isNotEmpty(instance.getExternalID())) { List<InstancesResult.InstanceAction> instanceActions = DAGEngineFactory.getDAGEngine(cluster).getJobDetails(instance.getExternalID()); instanceInfo.actions = instanceActions.toArray(new InstancesResult.InstanceAction[instanceActions.size()]); // If not scheduled externally yet, get details from state } else { populateInstanceInfo(instanceInfo, instance); } break; case PARAMS: // Mask details, log instanceInfo.details = null; instanceInfo.logFile = null; Properties props = DAGEngineFactory.getDAGEngine(cluster).getConfiguration(instance.getExternalID()); InstancesResult.KeyValuePair[] keyValuePairs = new InstancesResult.KeyValuePair[props.size()]; int i = 0; for (String name : props.stringPropertyNames()) { keyValuePairs[i++] = new InstancesResult.KeyValuePair(name, props.getProperty(name)); } instanceInfo.wfParams = keyValuePairs; break; default: throw new IllegalArgumentException("Unhandled action " + action); } return instanceInfo; }
private InstancesResult doJobAction( JobAction action, Entity entity, Date start, Date end, Properties props, List<LifeCycle> lifeCycles, boolean isForced) throws FalconException { Set<String> clusters = EntityUtil.getClustersDefinedInColos(entity); List<String> clusterList = getIncludedClusters(props, FALCON_INSTANCE_ACTION_CLUSTERS); APIResult.Status overallStatus = APIResult.Status.SUCCEEDED; int instanceCount = 0; Collection<InstanceState.STATE> states; switch (action) { case KILL: case SUSPEND: states = InstanceState.getActiveStates(); break; case RESUME: states = new ArrayList<>(); states.add(InstanceState.STATE.SUSPENDED); break; case PARAMS: // Applicable only for running and finished jobs. states = InstanceState.getRunningStates(); states.addAll(InstanceState.getTerminalStates()); states.add(InstanceState.STATE.SUSPENDED); break; case STATUS: states = InstanceState.getActiveStates(); states.addAll(InstanceState.getTerminalStates()); states.add(InstanceState.STATE.SUSPENDED); break; case RERUN: // Applicable only for terminated States states = InstanceState.getTerminalStates(); break; default: throw new IllegalArgumentException("Unhandled action " + action); } List<ExecutionInstance> instancesToActOn = new ArrayList<>(); for (String cluster : clusters) { if (clusterList.size() != 0 && !clusterList.contains(cluster)) { continue; } LOG.debug("Retrieving instances for cluster : {} for action {}", cluster, action); Collection<InstanceState> instances = STATE_STORE.getExecutionInstances( entity, cluster, states, new DateTime(start), new DateTime(end)); for (InstanceState state : instances) { instancesToActOn.add(state.getInstance()); } } // To ensure compatibility with OozieWorkflowEngine. // Also because users would like to see the most recent instances first. sortInstancesDescBySequence(instancesToActOn); List<InstancesResult.Instance> instances = new ArrayList<>(); for (ExecutionInstance ins : instancesToActOn) { instanceCount++; String instanceTimeStr = SchemaHelper.formatDateUTC(ins.getInstanceTime().toDate()); InstancesResult.Instance instance = null; try { instance = performAction(ins.getCluster(), entity, action, ins, props, isForced); instance.instance = instanceTimeStr; } catch (FalconException e) { LOG.warn("Unable to perform action {} on cluster", action, e); instance = new InstancesResult.Instance(ins.getCluster(), instanceTimeStr, null); instance.status = InstancesResult.WorkflowStatus.ERROR; instance.details = e.getMessage(); overallStatus = APIResult.Status.PARTIAL; } instances.add(instance); } if (instanceCount < 2 && overallStatus == APIResult.Status.PARTIAL) { overallStatus = APIResult.Status.FAILED; } InstancesResult instancesResult = new InstancesResult(overallStatus, action.name()); instancesResult.setInstances(instances.toArray(new InstancesResult.Instance[instances.size()])); return instancesResult; }