public void testInput() throws IOException { ExecutionResult res; res = executor.exec("bad input", false, false, scriptDir.getPath("input.sh")); assertEquals(1, res.getExitValue()); res = executor.exec("good input", false, false, scriptDir.getPath("input.sh")); assertEquals(0, res.getExitValue()); }
protected JobOutput executeJellyfishMerger( Args args, String ecqName, Set<File> fileSet, File outputDir) throws InterruptedException, ProcessExecutionException, ConanParameterException, IOException { String suffix = "jellyfish_" + ecqName + "_all.jf31_0"; String jobName = args.getJobPrefix() + "-merge-" + suffix; List<File> files = new ArrayList<>(); files.addAll(fileSet); File outputFile = new File(outputDir, suffix); JellyfishMergeV11 jellyfishMerge = this.makeJellyfishMerge(files, outputFile, args.getOrganism()); ExecutionResult id = this.conanExecutorService.executeProcess( jellyfishMerge, args.getOutputDir(), jobName, args.getThreadsPerProcess(), args.getMemoryPerProcess(), args.isRunParallel()); id.setName("merge-" + suffix); return new JobOutput(id, outputFile); }
protected JobOutput executeJellyfishCount(Args args, String ecqName, File outputDir, Library lib) throws ProcessExecutionException, InterruptedException, ConanParameterException, IOException { String suffix = "jellyfish_" + ecqName + "_" + lib.getName() + ".jf31"; // Create the process JellyfishCountV11 jellyfishProcess = this.makeJellyfishCount( this.makeInputStringFromLib(lib), new File(new File(outputDir, ecqName), suffix).getAbsolutePath(), args.getOrganism(), args.getThreadsPerProcess()); File outputFile = jellyfishProcess.getArgs().getOutputFile(); // Create a job name String jobName = args.getJobPrefix() + "-count-" + suffix; // Start jellyfish final ExecutionResult id = this.conanExecutorService.executeProcess( jellyfishProcess, new File(outputDir, ecqName), jobName, args.getThreadsPerProcess(), args.getMemoryPerProcess(), args.isRunParallel()); id.setName("count-" + suffix); return new JobOutput(id, outputFile); }
public static void main(String[] args) throws Exception { MukiGenerator generator = new MukiGenerator(); ExecutionResult result = new ExecutionResult(); result.setOk(true); generator.run(args, result); System.out.print("*** Muki v" + Version.id() + " Created by Gabriel Casarini ***"); System.out.println(result.getLog()); }
public Object execute(EvaluationContext evaluationContext) throws EvaluationException { OOEESmtEvaluationContext context = new OOEESmtEvaluationContext(evaluationContext); ExecutionResult execResult = block.execute(context); if (execResult.getResultCode() == StatementConstants.exceptionEnding) { throw new EvaluationException(ExceptionConstants.EBOS_000, execResult.getCauseException()); } else if (execResult.getResultCode() == StatementConstants.returnEnding) { return execResult.getReturnResult(); } return null; }
@Test public void concept() throws IOException, InterruptedException { String rulesDirectory = EffectiveRulesIT.class.getResource("/rules").getFile(); String[] args = new String[] {"effective-rules", "-r", rulesDirectory, "-concepts", "junit4:TestMethod"}; ExecutionResult executionResult = execute(args); assertThat(executionResult.getExitCode(), equalTo(0)); List<String> console = executionResult.getErrorConsole(); assertThat(console, hasItem(containsString("junit4:TestMethod"))); }
/** Validates the model and adds error messages to the ExecutionResult */ private void validateModel(ExecutionResult result) { result.append("-> Validating the project..."); ModelUtility modelUtility = new ModelUtility(); modelUtility.validate(this.getProject(), result); if (result.isOk()) { result.append("-> Validation is OK"); } else { result.append("-> *** ERROR: The project validation FAILED! See messages above. ***"); result.append("-> *** You need to fix the issues before code can be generated. ***"); } }
@Test public void testColumnAreInTheRightOrder() throws Exception { createTenNodes(); String q = "start one=node(1), two=node(2), three=node(3), four=node(4), five=node(5), six=node(6), " + "seven=node(7), eight=node(8), nine=node(9), ten=node(10) " + "return one, two, three, four, five, six, seven, eight, nine, ten"; ExecutionResult result = engine.execute(q); assertThat( result.toString(), matchesPattern("one.*two.*three.*four.*five.*six.*seven.*eight.*nine.*ten")); }
@Test public void exampleWithParameterForNodeObject() throws Exception { // START SNIPPET: exampleWithParameterForNodeObject Map<String, Object> params = new HashMap<String, Object>(); params.put("node", andreasNode); ExecutionResult result = engine.execute("start n=node({node}) return n.name", params); // END SNIPPET: exampleWithParameterForNodeObject assertThat(result.columns(), hasItem("n.name")); Iterator<Object> n_column = result.columnAs("n.name"); assertEquals("Andreas", n_column.next()); }
@Test public void customGroup() throws IOException, InterruptedException { String rulesDirectory = EffectiveRulesIT.class.getResource("/rules").getFile(); String[] args = new String[] {"effective-rules", "-r", rulesDirectory, "-groups", CUSTOM_GROUP}; ExecutionResult executionResult = execute(args); assertThat(executionResult.getExitCode(), equalTo(0)); List<String> console = executionResult.getErrorConsole(); assertThat(console, hasItem(containsString(TEST_CONCEPT))); assertThat(console, hasItem(containsString(TEST_CONSTRAINT))); assertThat(console, hasItem(containsString(CUSTOM_TEST_CONCEPT))); assertThat(console, not(hasItem(containsString("junit4:TestMethod")))); }
@Test public void exampleQuery() throws Exception { // START SNIPPET: JavaQuery ExecutionEngine engine = new ExecutionEngine(db); ExecutionResult result = engine.execute("start n=node(0) where 1=1 return n"); assertThat(result.columns(), hasItem("n")); Iterator<Node> n_column = result.columnAs("n"); assertThat(asIterable(n_column), hasItem(db.getNodeById(0))); assertThat(result.toString(), containsString("Node[0]")); // END SNIPPET: JavaQuery }
@Test public void exampleWithParameterForSkipAndLimit() throws Exception { // START SNIPPET: exampleWithParameterForSkipLimit Map<String, Object> params = new HashMap<String, Object>(); params.put("s", 1); params.put("l", 1); ExecutionResult result = engine.execute("start n=node(0,1,2) return n.name skip {s} limit {l}", params); // END SNIPPET: exampleWithParameterForSkipLimit assertThat(result.columns(), hasItem("n.name")); Iterator<Object> n_column = result.columnAs("n.name"); assertEquals("Andreas", n_column.next()); }
@Test public void exampleWithParameterRegularExpression() throws Exception { // START SNIPPET: exampleWithParameterRegularExpression Map<String, Object> params = new HashMap<String, Object>(); params.put("regex", ".*h.*"); ExecutionResult result = engine.execute("start n=node(0,1,2) where n.name =~ {regex} return n.name", params); // END SNIPPET: exampleWithParameterRegularExpression assertThat(result.columns(), hasItem("n.name")); Iterator<Object> n_column = result.columnAs("n.name"); assertEquals("Michaela", n_column.next()); assertEquals("Johan", n_column.next()); }
@Test public void shouldBeAbleToEmitJavaIterables() throws Exception { makeFriends(michaelaNode, andreasNode); makeFriends(michaelaNode, johanNode); ExecutionEngine engine = new ExecutionEngine(db); ExecutionResult result = engine.execute("start n=node(0) match n-->friend return collect(friend)"); Iterable<Node> friends = (Iterable<Node>) result.columnAs("collect(friend)").next(); assertThat(friends, hasItems(andreasNode, johanNode)); assertThat(friends, instanceOf(Iterable.class)); }
public void generateObjC(ExecutionResult result) throws Exception { this.validateModel(result); if (!result.isOk()) { return; } this.getIo().deleteDirectory(this.getOutputDirectory()); result.append("-> Generating Objective-C classes..."); try { ObjcGenerator generator = new ObjcGenerator(); generator.generateAll(this.getProject(), this.getOutputDirectory()); result.append("-> *** Code generation OK ***"); } catch (Exception e) { throw new RuntimeException(e); } }
public void generateSwift(ExecutionResult result) throws Exception { this.validateModel(result); if (!result.isOk()) { return; } this.getIo().deleteDirectory(this.getOutputDirectory()); result.append("-> Generating Swift classes..."); try { SwiftGenerator generator = new SwiftGenerator(); generator.generateAll(this.getProject(), this.getOutputDirectory()); result.append("-> *** Code generation OK ***"); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } }
@Test public void shouldCloseTransactionsWhenIteratingOverSingleColumn() throws Exception { // Given an execution result that has been started but not exhausted createNode(); createNode(); ExecutionResult executionResult = engine.execute("MATCH (n) RETURN n"); ResourceIterator<Node> resultIterator = executionResult.columnAs("n"); resultIterator.next(); assertThat(activeTransaction(), is(notNullValue())); // When resultIterator.close(); // Then assertThat(activeTransaction(), is(nullValue())); }
public void run(String[] args, ExecutionResult result) throws Exception { if (args != null && args.length == 1) { String value = args[0].trim().toLowerCase(); if (value.equals("help") || value.equals("?")) { result.append("Usage: " + COMMAND_HELP); result.setOk(false); return; } } if (args == null || args.length != 3) { result.append("Usage: " + COMMAND_HELP); result.setOk(false); return; } String option = args[0]; String projectFile = args[1]; String outputDirectory = args[2]; this.run(option, projectFile, outputDirectory, result); }
public void assertErrorHasNoStackTraces(ExecutionResult result) { String error = result.getError(); if (result instanceof ExecutionFailure) { // Axe everything after the expected exception int pos = error.indexOf("* Exception is:" + TextUtil.getPlatformLineSeparator()); if (pos >= 0) { error = error.substring(0, pos); } } assertNoStackTraces(error, "Standard error"); }
public void run(String option, String projectFile, String outputDirectory, ExecutionResult result) throws Exception { if (option == null || (!option.equals(GENERATE_JAVA) && !option.equals(GENERATE_OBJC) && !option.equals(GENERATE_SWIFT))) { result.append("-> Invalid option! The command line is:"); result.append(COMMAND_HELP); result.setOk(false); return; } if (projectFile == null || !this.getIo().existsFile(projectFile)) { result.append("-> The project file doesn't exists: " + projectFile); result.setOk(false); return; } if (outputDirectory == null || !this.getIo().existsFile(outputDirectory)) { result.append("-> The output directory doesn't exists: " + outputDirectory); result.setOk(false); return; } ModelUtility modelUtility = new ModelUtility(); Project newProject = modelUtility.openProject(projectFile); this.setProject(newProject); this.setOutputDirectory(outputDirectory); if (option.equals(GENERATE_JAVA)) { this.generateJava(result); return; } if (option.equals(GENERATE_OBJC)) { this.generateObjC(result); return; } if (option.equals(GENERATE_SWIFT)) { this.generateSwift(result); return; } }
private void assertOutputHasNoStackTraces(ExecutionResult result) { assertNoStackTraces(result.getOutput(), "Standard output"); }
public void assertOutputHasNoDeprecationWarnings(ExecutionResult result) { assertNoDeprecationWarnings(result.getOutput(), "Standard output"); assertNoDeprecationWarnings(result.getError(), "Standard error"); }
@Override public ExecutionResult execute(ExecutionContext executionContext) throws ProcessExecutionException, InterruptedException { try { StopWatch stopWatch = new StopWatch(); stopWatch.start(); log.info("Starting Kmer Counting on all Reads"); // Create shortcut to args for convienience Args args = this.getArgs(); // Force run parallel to false if not using a scheduler if (!executionContext.usingScheduler() && args.isRunParallel()) { log.warn("Forcing linear execution due to lack of job scheduler"); args.setRunParallel(false); } // Create the output directory args.getOutputDir().mkdirs(); JobOutputMap jfCountOutputs = new JobOutputMap(); List<ExecutionResult> jobResults = new ArrayList<>(); List<ExecutionResult> allJobResults = new ArrayList<>(); // Create the output directory for the RAW datasets File rawOutputDir = new File(args.getOutputDir(), "raw"); if (!rawOutputDir.exists()) { rawOutputDir.mkdirs(); } // Start jellyfish on all RAW datasets for (Library lib : args.getAllLibraries()) { // Execute jellyfish and add id to list of job ids JobOutput jfOut = this.executeJellyfishCount(args, "raw", args.getOutputDir(), lib); jobResults.add(jfOut.getResult()); allJobResults.add(jfOut.getResult()); jfCountOutputs.updateTracker("raw", jfOut.getOutputFile()); } // Also start jellyfish on all the prep-processed libraries from MECQ if (args.getAllMecqs() != null) { for (Mecq.EcqArgs ecqArgs : args.getAllMecqs()) { // Create the output directory for the RAW datasets File ecqOutputDir = new File(args.getOutputDir(), ecqArgs.getName()); if (!ecqOutputDir.exists()) { ecqOutputDir.mkdirs(); } for (Library lib : ecqArgs.getOutputLibraries()) { // Add jellyfish id to list of job ids JobOutput jfOut = this.executeJellyfishCount(args, ecqArgs.getName(), args.getOutputDir(), lib); jobResults.add(jfOut.getResult()); allJobResults.add(jfOut.getResult()); jfCountOutputs.updateTracker(ecqArgs.getName(), jfOut.getOutputFile()); } } } // If we're using a scheduler and we have been asked to run each job // in parallel, then we should wait for all those to complete before continueing. if (executionContext.usingScheduler() && args.isRunParallel()) { log.info("Kmer counting all ECQ groups in parallel, waiting for completion"); this.conanExecutorService.executeScheduledWait( jobResults, args.getJobPrefix() + "-count-*", ExitStatus.Type.COMPLETED_ANY, args.getJobPrefix() + "-kmer-count-wait", args.getOutputDir()); } // Waiting point... clear job ids. jobResults.clear(); JobOutputMap mergedOutputs = new JobOutputMap(); // Now execute merge jobs if required for (Map.Entry<String, Set<File>> entry : jfCountOutputs.entrySet()) { String ecqName = entry.getKey(); Set<File> fileSet = entry.getValue(); // Only merge if there's more than one library if (fileSet.size() > 1) { JobOutput jfOut = this.executeJellyfishMerger( args, ecqName, fileSet, new File(args.getOutputDir(), ecqName)); jobResults.add(jfOut.getResult()); allJobResults.add(jfOut.getResult()); mergedOutputs.updateTracker(ecqName, jfOut.getOutputFile()); } } // If we're using a scheduler and we have been asked to run each job // in parallel, then we should wait for all those to complete before continueing. if (executionContext.usingScheduler() && args.isRunParallel()) { log.info( "Creating merged kmer counts for all ECQ groups in parallel, waiting for completion"); this.conanExecutorService.executeScheduledWait( jobResults, args.getJobPrefix() + "-merge-*", ExitStatus.Type.COMPLETED_ANY, args.getJobPrefix() + "-kmer-merge-wait", args.getOutputDir()); } // Waiting point... clear job ids. jobResults.clear(); // Combine all jellyfish out maps jfCountOutputs.combine(mergedOutputs); String katGcpJobPrefix = args.getJobPrefix() + "-kat-gcp"; // Run KAT GCP on everything List<ExecutionResult> katGcpResults = this.executeKatGcp( jfCountOutputs, katGcpJobPrefix, args.getThreadsPerProcess(), args.getMemoryPerProcess(), args.isRunParallel()); for (ExecutionResult result : katGcpResults) { result.setName(result.getName().substring(args.getJobPrefix().length() + 1)); jobResults.add(result); allJobResults.add(result); } // If we're using a scheduler and we have been asked to run each job // in parallel, then we should wait for all those to complete before continueing. if (executionContext.usingScheduler() && args.isRunParallel()) { log.info("Running \"kat gcp\" for all ECQ groups in parallel, waiting for completion"); this.conanExecutorService.executeScheduledWait( jobResults, katGcpJobPrefix + "*", ExitStatus.Type.COMPLETED_ANY, args.getJobPrefix() + "-kat-gcp-wait", args.getOutputDir()); } // Waiting point... clear job ids. jobResults.clear(); log.info("Kmer counting of all reads finished."); stopWatch.stop(); TaskResult taskResult = new DefaultTaskResult( "rampart-read_analysis-kmer", true, allJobResults, stopWatch.getTime() / 1000L); // Output the resource usage to file FileUtils.writeLines( new File(args.getOutputDir(), args.getJobPrefix() + ".summary"), taskResult.getOutput()); return new DefaultExecutionResult( taskResult.getTaskName(), 0, new String[] {}, null, -1, new ResourceUsage( taskResult.getMaxMemUsage(), taskResult.getActualTotalRuntime(), taskResult.getTotalExternalCputime())); } catch (ConanParameterException | IOException e) { throw new ProcessExecutionException(-1, e); } }
private boolean executeInstructions( Map<Integer, Object> staticRegs, MethodBody body, ABC abc, AVM2Code code, LocalDataArea localData, int idx, int endIdx, ExecutionResult result, AVM2Instruction inlineIns) throws InterruptedException { int instructionsProcessed = 0; FixItemCounterStack stack = (FixItemCounterStack) localData.operandStack; Set<Long> refs = code.getImportantOffsets(body); boolean modified = false; while (true) { if (idx > endIdx) { break; } if (instructionsProcessed > executionLimit) { break; } AVM2Instruction ins = code.code.get(idx); if (instructionsProcessed > 0 && refs.contains(ins.getOffset())) { break; } modified = modified | code.inlineJumpExit(); InstructionDefinition def = ins.definition; if (inlineIns == ins) { if (def instanceof SetLocalTypeIns) { int regId = ((SetLocalTypeIns) def).getRegisterId(ins); staticRegs.put(regId, localData.localRegisters.get(regId)); code.replaceInstruction( idx, new AVM2Instruction(0, DeobfuscatePopIns.getInstance(), null), body); modified = true; } } if (def instanceof GetLocalTypeIns) { int regId = ((GetLocalTypeIns) def).getRegisterId(ins); if (staticRegs.containsKey(regId)) { AVM2Instruction pushins = abc.constants.makePush(staticRegs.get(regId)); if (pushins == null) { break; } code.replaceInstruction(idx, pushins, body); modified = true; ins = pushins; def = ins.definition; } } if (def instanceof NewFunctionIns && idx + 1 < code.code.size() && code.code.get(idx + 1).definition instanceof PopIns) { code.removeInstruction(idx + 1, body); code.removeInstruction(idx, body); modified = true; continue; } boolean ok = false; // todo: honfika: order by statistics if (def.isNotCompileTimeSupported() || def instanceof PushByteIns || def instanceof PushShortIns || def instanceof PushIntIns || def instanceof PushDoubleIns || def instanceof PushStringIns || def instanceof PushNullIns || def instanceof PushUndefinedIns || def instanceof PushFalseIns || def instanceof PushTrueIns || def instanceof DupIns || def instanceof SwapIns || def instanceof AddIns || def instanceof AddIIns || def instanceof SubtractIns || def instanceof SubtractIIns || def instanceof ModuloIns || def instanceof MultiplyIns || def instanceof MultiplyIIns // || def instanceof DivideIns // || def instanceof BitAndIns || def instanceof BitXorIns || def instanceof BitOrIns || def instanceof LShiftIns || def instanceof RShiftIns || def instanceof URShiftIns || def instanceof EqualsIns || def instanceof NotIns || def instanceof NegateIns // || def instanceof NegateIIns // || def instanceof IncrementIns // || def instanceof IncrementIIns // || def instanceof DecrementIns // || def instanceof DecrementIIns // || def instanceof IfTypeIns || def instanceof JumpIns || def instanceof EqualsIns || def instanceof LessEqualsIns || def instanceof GreaterEqualsIns || def instanceof GreaterThanIns || def instanceof LessThanIns || def instanceof StrictEqualsIns || def instanceof PopIns || def instanceof GetLocalTypeIns || def instanceof SetLocalTypeIns || def instanceof NewFunctionIns || def instanceof CoerceOrConvertTypeIns) { ok = true; } if (!ok) { break; } if (!(def instanceof NewFunctionIns)) { // do not throw EmptyStackException, much faster int requiredStackSize = def.getStackPopCount(ins, abc); if (stack.size() < requiredStackSize) { break; } if (requiredStackSize > 0 && !def.isNotCompileTimeSupported()) { boolean notCompileTime = false; for (int i = 0; i < requiredStackSize; i++) { if (stack.peek(i + 1) == NotCompileTime.INSTANCE) { notCompileTime = true; break; } } if (notCompileTime) { break; } } if (localData.scopeStack.size() < -def.getScopeStackDelta(ins, abc)) { break; } boolean supported; try { localData.jump = null; supported = def.execute(localData, abc.constants, ins); } catch (AVM2ExecutionException ex) { supported = false; } if (!supported) { break; } } boolean ifed = false; if (def instanceof IfTypeIns && !(def instanceof JumpIns)) { long address = ins.getTargetAddress(); int nidx = code.adr2pos(address); AVM2Instruction tarIns = code.code.get(nidx); // Some IfType instructions need more than 1 operand, we must pop out all of them int stackCount = -def.getStackDelta(ins, abc); if (localData.jump != null) { // System.err.println("replacing " + ins + " on " + idx + " with jump"); AVM2Instruction jumpIns = new AVM2Instruction(0, AVM2Instructions.Jump, new int[] {0}); // jumpIns.operands[0] = ins.operands[0] /*- ins.getBytes().length*/ + // jumpIns.getBytes().length; code.replaceInstruction(idx, jumpIns, body); jumpIns.operands[0] = (int) (tarIns.getOffset() - jumpIns.getOffset() - jumpIns.getBytesLength()); for (int s = 0; s < stackCount; s++) { code.insertInstruction( idx, new AVM2Instruction(ins.getOffset(), DeobfuscatePopIns.getInstance(), null), true, body); } idx = code.adr2pos(jumpIns.getTargetAddress()); } else { // System.err.println("replacing " + ins + " on " + idx + " with pop"); code.replaceInstruction( idx, new AVM2Instruction(ins.getOffset(), DeobfuscatePopIns.getInstance(), null), body); for (int s = 1 /*first is replaced*/; s < stackCount; s++) { code.insertInstruction( idx, new AVM2Instruction(ins.getOffset(), DeobfuscatePopIns.getInstance(), null), true, body); } // ins.definition = DeobfuscatePopIns.getInstance(); idx++; } modified = true; ifed = true; } else { idx++; } instructionsProcessed++; if (result != null && stack.allItemsFixed()) { result.idx = idx == code.code.size() ? idx - 1 : idx; result.instructionsProcessed = instructionsProcessed; result.stack.clear(); result.stack.addAll(stack); } if (ifed) { break; } if (localData.jump != null) { idx = code.adr2pos(localData.jump); if (idx == -1) { throw new TranslateException("Jump target not found: " + localData.jump); } } } return modified; }
public void testExitValue() throws IOException { ExecutionResult res = executor.exec(scriptDir.getPath("exitValue.sh")); assertEquals(4, res.getExitValue()); }
private <T> List<T> toList(ExecutionResult result, String column) { List<T> results = new ArrayList<T>(); IteratorUtil.addToCollection(result.<T>columnAs(column), results); return results; }
public void testOuptut() throws IOException { ExecutionResult res; res = executor.exec(true, false, scriptDir.getPath("test.sh")); assertEquals("test.sh\n", res.getOutput()); }
public void testExecuteWorkflow() throws Exception { { final FrameworkProject frameworkProject = testFramework.getFrameworkProjectMgr().getFrameworkProject(TEST_PROJECT); final INodeSet nodes = frameworkProject.getNodeSet(); assertNotNull(nodes); assertEquals(2, nodes.getNodes().size()); } { // test empty workflow final NodeSet nodeset = new NodeSet(); final WorkflowImpl workflow = new WorkflowImpl(new ArrayList<ExecutionItem>(), 1, false, WorkflowStrategy.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowStrategy strategy = new StepFirstWorkflowStrategy(testFramework); final com.dtolabs.rundeck.core.execution.ExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .build(); // setup testInterpreter for all command types final CommandInterpreterService interpreterService = CommandInterpreterService.getInstanceForFramework(testFramework); testInterpreter interpreterMock = new testInterpreter(); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance( WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, interpreterMock); interpreterService.registerInstance( WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, interpreterMock); // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, // interpreterMock); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(0, interpreterMock.executionItemList.size()); } { // test undefined workflow item final NodeSet nodeset = new NodeSet(); final ArrayList<ExecutionItem> commands = new ArrayList<ExecutionItem>(); commands.add(new testWorkflowCmdItem()); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowStrategy.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowStrategy strategy = new StepFirstWorkflowStrategy(testFramework); final com.dtolabs.rundeck.core.execution.ExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset.nodeSelectorWithDefaultAll()) .executionListener(new testListener()) .framework(testFramework) .build(); // setup testInterpreter for all command types final CommandInterpreterService interpreterService = CommandInterpreterService.getInstanceForFramework(testFramework); testInterpreter interpreterMock = new testInterpreter(); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance( WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, interpreterMock); interpreterService.registerInstance( WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, interpreterMock); // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, // interpreterMock); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertFalse(result.isSuccess()); assertEquals(0, interpreterMock.executionItemList.size()); assertNotNull("threw exception: " + result.getException(), result.getException()); assertTrue( "threw exception: " + result.getException(), result.getException() instanceof WorkflowStepFailureException); assertEquals( "threw exception: " + result.getException(), "Step 1 of the workflow threw an exception: Failed dispatching to node test1: provider name was null for Service: CommandInterpreter", result.getException().getMessage()); } { // test script exec item final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<ExecutionItem> commands = new ArrayList<ExecutionItem>(); final ExecutionItem testWorkflowCmdItem = new ScriptFileCommandBase() { @Override public String getScript() { return "a command"; } }; commands.add(testWorkflowCmdItem); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowStrategy.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowStrategy strategy = new StepFirstWorkflowStrategy(testFramework); final com.dtolabs.rundeck.core.execution.ExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .build(); // setup testInterpreter for all command types final CommandInterpreterService interpreterService = CommandInterpreterService.getInstanceForFramework(testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; interpreterService.registerInstance("exec", failMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, failMock); // set resturn result interpreterMock.resultList.add( new InterpreterResult() { public boolean isSuccess() { return true; } }); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(1, interpreterMock.executionItemList.size()); final ExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue( "wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) executionItem1; assertEquals("a command", scriptItem.getScript()); assertNull(scriptItem.getScriptAsStream()); assertNull(scriptItem.getServerScriptFilePath()); assertEquals(1, interpreterMock.executionContextList.size()); final ExecutionContext executionContext = interpreterMock.executionContextList.get(0); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNull(executionContext.getArgs()); assertNull(executionContext.getDataContext()); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals( "expected " + nodeset + ", but was " + executionContext.getNodeSelector(), nodeset, executionContext.getNodeSelector()); } { // test command exec item final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<ExecutionItem> commands = new ArrayList<ExecutionItem>(); final ExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[] {"a", "command"}; } }; commands.add(testWorkflowCmdItem); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowStrategy.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowStrategy strategy = new StepFirstWorkflowStrategy(testFramework); final com.dtolabs.rundeck.core.execution.ExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .build(); // setup testInterpreter for all command types final CommandInterpreterService interpreterService = CommandInterpreterService.getInstanceForFramework(testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, failMock); // set resturn result interpreterMock.resultList.add( new InterpreterResult() { public boolean isSuccess() { return true; } }); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(1, interpreterMock.executionItemList.size()); final ExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue( "wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(2, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("command", execItem.getCommand()[1]); assertEquals(1, interpreterMock.executionContextList.size()); final ExecutionContext executionContext = interpreterMock.executionContextList.get(0); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNull(executionContext.getArgs()); assertNull(executionContext.getDataContext()); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); } { // test workflow of three successful items final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<ExecutionItem> commands = new ArrayList<ExecutionItem>(); final ExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[] {"a", "2", "command"}; } }; commands.add(testWorkflowCmdItem); final ExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() { @Override public String getScript() { return "a command"; } @Override public String[] getArgs() { return new String[] {"-testargs", "1"}; } }; commands.add(testWorkflowCmdItemScript); final ExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() { @Override public String getServerScriptFilePath() { return "/some/file/path"; } @Override public String[] getArgs() { return new String[] {"-testargs", "2"}; } }; commands.add(testWorkflowCmdItemScript2); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowStrategy.STEP_FIRST); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowStrategy strategy = new StepFirstWorkflowStrategy(testFramework); final com.dtolabs.rundeck.core.execution.ExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .args(new String[] {"test", "args"}) .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .build(); // setup testInterpreter for all command types final CommandInterpreterService interpreterService = CommandInterpreterService.getInstanceForFramework(testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, // interpreterMock); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); // set resturn results interpreterMock.resultList.add(new testResult(true, 0)); interpreterMock.resultList.add(new testResult(true, 1)); interpreterMock.resultList.add(new testResult(true, 2)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertNull("threw exception: " + result.getException(), result.getException()); assertTrue(result.isSuccess()); assertEquals(1, result.getResultSet().size()); assertNotNull(result.getResultSet()); assertNotNull( "missing key " + testnode + ": " + result.getResultSet().keySet(), result.getResultSet().get(testnode)); final List<StatusResult> test1 = result.getResultSet().get(testnode); assertEquals(3, test1.size()); for (final int i : new int[] {0, 1, 2}) { final StatusResult interpreterResult = test1.get(i); assertTrue(interpreterResult instanceof testResult); testResult val = (testResult) interpreterResult; assertTrue(val.isSuccess()); assertEquals(i, val.flag); } assertEquals(3, interpreterMock.executionItemList.size()); final ExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue( "wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("2", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); final ExecutionItem item2 = interpreterMock.executionItemList.get(1); assertTrue( "wrong class: " + item2.getClass().getName(), item2 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2; assertEquals("a command", scriptItem.getScript()); assertNull(scriptItem.getScriptAsStream()); assertNull(scriptItem.getServerScriptFilePath()); final ExecutionItem item3 = interpreterMock.executionItemList.get(2); assertTrue( "wrong class: " + item3.getClass().getName(), item2 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem2 = (ScriptFileCommandExecutionItem) item3; assertNull(scriptItem2.getScript()); assertNull(scriptItem2.getScriptAsStream()); assertEquals("/some/file/path", scriptItem2.getServerScriptFilePath()); assertNotNull(scriptItem2.getArgs()); assertEquals(2, scriptItem2.getArgs().length); assertEquals("-testargs", scriptItem2.getArgs()[0]); assertEquals("2", scriptItem2.getArgs()[1]); assertEquals(3, interpreterMock.executionContextList.size()); for (final int i : new int[] {0, 1, 2}) { final ExecutionContext executionContext = interpreterMock.executionContextList.get(i); assertEquals("item " + i, TEST_PROJECT, executionContext.getFrameworkProject()); assertNull("item " + i, executionContext.getDataContext()); assertEquals("item " + i, 0, executionContext.getLoglevel()); assertEquals("item " + i, "user1", executionContext.getUser()); assertEquals("item " + i, nodeset, executionContext.getNodeSelector()); assertNotNull("item " + i, executionContext.getArgs()); assertEquals("item " + i, 2, executionContext.getArgs().length); assertEquals("item " + i, "test", executionContext.getArgs()[0]); assertEquals("item " + i, "args", executionContext.getArgs()[1]); } } { // test a workflow with a failing item (1), with keepgoing=false final NodesSelector nodeset = SelectorUtils.singleNode(testFramework.getFrameworkNodeName()); final ArrayList<ExecutionItem> commands = new ArrayList<ExecutionItem>(); final ExecutionItem testWorkflowCmdItem = new ExecCommandBase() { @Override public String[] getCommand() { return new String[] {"a", "2", "command"}; } }; commands.add(testWorkflowCmdItem); final ExecutionItem testWorkflowCmdItemScript = new ScriptFileCommandBase() { @Override public String getScript() { return "a command"; } @Override public String[] getArgs() { return new String[] {"-testargs", "1"}; } }; commands.add(testWorkflowCmdItemScript); final ExecutionItem testWorkflowCmdItemScript2 = new ScriptFileCommandBase() { @Override public String getServerScriptFilePath() { return "/some/file/path"; } @Override public String[] getArgs() { return new String[] {"-testargs", "2"}; } }; commands.add(testWorkflowCmdItemScript2); final WorkflowImpl workflow = new WorkflowImpl(commands, 1, false, WorkflowStrategy.STEP_FIRST); workflow.setKeepgoing(false); final WorkflowExecutionItemImpl executionItem = new WorkflowExecutionItemImpl(workflow); final StepFirstWorkflowStrategy strategy = new StepFirstWorkflowStrategy(testFramework); final com.dtolabs.rundeck.core.execution.ExecutionContext context = new ExecutionContextImpl.Builder() .frameworkProject(TEST_PROJECT) .user("user1") .nodeSelector(nodeset) .executionListener(new testListener()) .framework(testFramework) .build(); // setup testInterpreter for all command types final CommandInterpreterService interpreterService = CommandInterpreterService.getInstanceForFramework(testFramework); testInterpreter interpreterMock = new testInterpreter(); testInterpreter failMock = new testInterpreter(); failMock.shouldThrowException = true; // interpreterService.registerInstance(JobExecutionItem.COMMAND_TYPE, // interpreterMock); interpreterService.registerInstance("exec", interpreterMock); interpreterService.registerInstance("script", interpreterMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_NODE_FIRST, failMock); interpreterService.registerInstance(WorkflowExecutionItem.COMMAND_TYPE_STEP_FIRST, failMock); // set resturn results, fail on second item interpreterMock.resultList.add(new testResult(true, 0)); interpreterMock.resultList.add(new testResult(false, 1)); interpreterMock.resultList.add(new testResult(true, 2)); final WorkflowExecutionResult result = strategy.executeWorkflow(context, executionItem); assertNotNull(result); if (!result.isSuccess() && null != result.getException()) { result.getException().printStackTrace(System.err); } assertFalse(result.isSuccess()); assertNotNull("threw exception: " + result.getException(), result.getException()); assertTrue( "threw exception: " + result.getException(), result.getException() instanceof WorkflowStepFailureException); WorkflowStepFailureException wfsfe = (WorkflowStepFailureException) result.getException(); assertEquals(2, wfsfe.getWorkflowStep()); assertNotNull(wfsfe.getExecutionResult()); final ExecutionResult executionResult = wfsfe.getExecutionResult(); assertNotNull(executionResult.getResultObject()); assertNotNull(executionResult.getResultObject().getResults()); assertEquals(1, executionResult.getResultObject().getResults().size()); assertNotNull(executionResult.getResultObject().getResults().get(testnode)); final StatusResult testnode1 = executionResult.getResultObject().getResults().get(testnode); assertNotNull(testnode1); assertTrue(testnode1 instanceof testResult); testResult failResult = (testResult) testnode1; assertEquals(1, failResult.flag); assertEquals(1, result.getResultSet().size()); assertNotNull(result.getResultSet()); assertNotNull( "missing key" + testnode + ": " + result.getResultSet().keySet(), result.getResultSet().get(testnode)); final List<StatusResult> test1 = result.getResultSet().get(testnode); assertEquals(2, test1.size()); for (final int i : new int[] {0, 1}) { final StatusResult interpreterResult = test1.get(i); assertTrue(interpreterResult instanceof testResult); testResult val = (testResult) interpreterResult; assertEquals(i, val.flag); if (0 == i) { assertTrue(val.isSuccess()); } else { assertFalse(val.isSuccess()); } } assertEquals(2, interpreterMock.executionItemList.size()); final ExecutionItem executionItem1 = interpreterMock.executionItemList.get(0); assertTrue( "wrong class: " + executionItem1.getClass().getName(), executionItem1 instanceof ExecCommandExecutionItem); ExecCommandExecutionItem execItem = (ExecCommandExecutionItem) executionItem1; assertNotNull(execItem.getCommand()); assertEquals(3, execItem.getCommand().length); assertEquals("a", execItem.getCommand()[0]); assertEquals("2", execItem.getCommand()[1]); assertEquals("command", execItem.getCommand()[2]); final ExecutionItem item2 = interpreterMock.executionItemList.get(1); assertTrue( "wrong class: " + item2.getClass().getName(), item2 instanceof ScriptFileCommandExecutionItem); ScriptFileCommandExecutionItem scriptItem = (ScriptFileCommandExecutionItem) item2; assertEquals("a command", scriptItem.getScript()); assertNull(scriptItem.getScriptAsStream()); assertNull(scriptItem.getServerScriptFilePath()); assertNotNull(scriptItem.getArgs()); assertEquals(2, scriptItem.getArgs().length); assertEquals("-testargs", scriptItem.getArgs()[0]); assertEquals("1", scriptItem.getArgs()[1]); assertEquals(2, interpreterMock.executionContextList.size()); for (final int i : new int[] {0, 1}) { final ExecutionContext executionContext = interpreterMock.executionContextList.get(i); assertEquals(TEST_PROJECT, executionContext.getFrameworkProject()); assertNull(executionContext.getDataContext()); assertEquals(0, executionContext.getLoglevel()); assertEquals("user1", executionContext.getUser()); assertEquals(nodeset, executionContext.getNodeSelector()); assertNull(executionContext.getArgs()); } } }
/** * The run's result. * * <p>Allowed values include: * * <ul> * <li> * <p>PENDING: A pending condition. * <li> * <p>PASSED: A passing condition. * <li> * <p>WARNED: A warning condition. * <li> * <p>FAILED: A failed condition. * <li> * <p>SKIPPED: A skipped condition. * <li> * <p>ERRORED: An error condition. * <li> * <p>STOPPED: A stopped condition. * </ul> * * @param result The run's result. * <p>Allowed values include: * <ul> * <li> * <p>PENDING: A pending condition. * <li> * <p>PASSED: A passing condition. * <li> * <p>WARNED: A warning condition. * <li> * <p>FAILED: A failed condition. * <li> * <p>SKIPPED: A skipped condition. * <li> * <p>ERRORED: An error condition. * <li> * <p>STOPPED: A stopped condition. * @see ExecutionResult */ public void setResult(ExecutionResult result) { this.result = result.toString(); }
public void testError() throws IOException { ExecutionResult res; res = executor.exec(false, true, scriptDir.getPath("error.sh")); assertEquals(0, res.getExitValue()); assertEquals("error.sh\n", res.getError()); }