private static String keyFor( @NonNull SCM scm, @NonNull FilePath ws, @Nullable AbstractBuild<?, ?> build) { // JENKINS-12298 StringBuilder b = new StringBuilder(scm.getType()); for (FilePath root : scm.getModuleRoots(ws, build)) { b.append(root.getRemote().substring(ws.getRemote().length())); } return b.toString(); }
@Override public EnvVars getEnvironment(TaskListener log) throws IOException, InterruptedException { EnvVars env = super.getEnvironment(log); FilePath ws = getWorkspace(); if (ws != null) // if this is done very early on in the build, workspace may not be decided yet. // see HUDSON-3997 env.put("WORKSPACE", ws.getRemote()); // servlet container may have set CLASSPATH in its launch script, // so don't let that inherit to the new child process. // see http://www.nabble.com/Run-Job-with-JDK-1.4.2-tf4468601.html env.put("CLASSPATH", ""); JDK jdk = project.getJDK(); if (jdk != null) { Computer computer = Computer.currentComputer(); if (computer != null) { // just in case were not in a build jdk = jdk.forNode(computer.getNode(), log); } jdk.buildEnvVars(env); } project.getScm().buildEnvVars(this, env); if (buildEnvironments != null) for (Environment e : buildEnvironments) e.buildEnvVars(env); for (EnvironmentContributingAction a : Util.filter(getActions(), EnvironmentContributingAction.class)) a.buildEnvVars(this, env); EnvVars.resolve(env); return env; }
@Override public boolean checkout( AbstractBuild<?, ?> build, Launcher launcher, FilePath workspace, BuildListener listener, File changeLogFile) throws IOException, InterruptedException { if (workspace.exists()) { listener.getLogger().println("Deleting existing workspace " + workspace.getRemote()); workspace.deleteRecursive(); } listener.getLogger().println("Staging first zip: " + firstZip); workspace.unzipFrom(firstZip.openStream()); listener.getLogger().println("Staging second zip: " + secondZip); workspace.unzipFrom(secondZip.openStream()); // Get list of files changed in secondZip. ZipInputStream zip = new ZipInputStream(secondZip.openStream()); ZipEntry e; ExtractChangeLogParser.ExtractChangeLogEntry changeLog = new ExtractChangeLogParser.ExtractChangeLogEntry(secondZip.toString()); try { while ((e = zip.getNextEntry()) != null) { if (!e.isDirectory()) changeLog.addFile(new ExtractChangeLogParser.FileInZip(e.getName())); } } finally { zip.close(); } saveToChangeLog(changeLogFile, changeLog); return true; }
public String[] buildCommandLine(FilePath script) { if (command.startsWith("#!")) { // interpreter override int end = command.indexOf('\n'); if (end < 0) { end = command.length(); } List<String> args = new ArrayList<String>(); args.addAll(Arrays.asList(Util.tokenize(command.substring(0, end).trim()))); args.add(script.getRemote()); args.set(0, args.get(0).substring(2)); // trim off "#!" return args.toArray(new String[args.size()]); } else { return new String[] {getDescriptor().getShellOrDefault(), "-xe", script.getRemote()}; } }
@Override public final boolean start(StepContext context) { try { FilePath ws = context.get(FilePath.class); assert ws != null : context.getClass() + " failed to provide a FilePath even though one was requested"; String remote = ws.getRemote(); String node = null; for (Computer c : Jenkins.getInstance().getComputers()) { if (c.getChannel() == ws.getChannel()) { node = c.getName(); break; } } if (node == null) { throw new IllegalStateException("no known node for " + ws); } register( context, task() .launch( context.get(EnvVars.class), ws, context.get(Launcher.class), context.get(TaskListener.class)), node, remote); } catch (Exception x) { context.onFailure(x); } return false; // TODO implement stop, however it is design (will need to call Controller.stop) }
/** * An attempt to generate at least semi-useful EnvVars for polling calls, based on previous build. * Cribbed from various places. */ public static EnvVars getPollEnvironment( AbstractProject p, FilePath ws, Launcher launcher, TaskListener listener) throws IOException, InterruptedException { EnvVars env; AbstractBuild b = (AbstractBuild) p.getLastBuild(); if (b != null) { Node lastBuiltOn = b.getBuiltOn(); if (lastBuiltOn != null) { env = lastBuiltOn.toComputer().getEnvironment().overrideAll(b.getCharacteristicEnvVars()); } else { env = new EnvVars(System.getenv()); } String rootUrl = Hudson.getInstance().getRootUrl(); if (rootUrl != null) { env.put("HUDSON_URL", rootUrl); env.put("BUILD_URL", rootUrl + b.getUrl()); env.put("JOB_URL", rootUrl + p.getUrl()); } if (!env.containsKey("HUDSON_HOME")) { env.put("HUDSON_HOME", Hudson.getInstance().getRootDir().getPath()); } if (ws != null) { env.put("WORKSPACE", ws.getRemote()); } p.getScm().buildEnvVars(b, env); StreamBuildListener buildListener = new StreamBuildListener((OutputStream) listener.getLogger()); for (NodeProperty nodeProperty : Hudson.getInstance().getGlobalNodeProperties()) { Environment environment = nodeProperty.setUp(b, launcher, (BuildListener) buildListener); if (environment != null) { environment.buildEnvVars(env); } } if (lastBuiltOn != null) { for (NodeProperty nodeProperty : lastBuiltOn.getNodeProperties()) { Environment environment = nodeProperty.setUp(b, launcher, buildListener); if (environment != null) { environment.buildEnvVars(env); } } } EnvVars.resolve(env); } else { env = new EnvVars(System.getenv()); } return env; }
/** * Replaces the file extension of a given file and returns a new FilePath with the new extension. * * @param input given file to replace extension. * @param fext extension to replace (starting with .dot). * @param fext_new new extension (starting with .dot). * @return new FilePath. * @throws ToolsetException */ private FilePath replaceExtension(FilePath input, String fext, String fext_new) throws ToolsetException { if (input == null) { throw new ToolsetException(messages.getString("NO_INPUT_FILE")); } else { String filename = input.getRemote(); filename = filename.replace(fext, fext_new); return new FilePath(new File(filename)); } }
@Override public Map<String, List<Integer>> readLineCoverage() { String basePath = ""; FilePath workspace = getBuild().getWorkspace(); if (workspace != null) { basePath = workspace.getRemote(); } File[] reports = getCoberturaReports(getBuild()); CoberturaXMLParser parser = new CoberturaXMLParser(basePath); return parseReports(parser, reports); }
/* * (non-Javadoc) * * @see * jenkins.plugins.shiningpanda.command.Command#getEnvironment(hudson.FilePath * , hudson.EnvVars) */ @Override protected EnvVars getEnvironment(FilePath pwd, EnvVars environment) { // Check if conversion required if (!convert) // If not required return the environment directly return environment; // Get a new one environment = new EnvVars(environment); // Add the working directory in the path so `./` are useless on UNIX environment.override("PATH+", pwd.getRemote()); // Return the environment return environment; }
/** * Checks if the file path or file is valid. * * @param fp file path. * @param fext file extension. * @return true if file path is valid. */ private boolean isValid(FilePath fp, String fext) { boolean valid = true; try { valid = fp.getRemote().endsWith(fext) && fp.exists(); } catch (IOException ex) { lg.severe(ex); valid = false; } catch (InterruptedException ex) { lg.severe(ex); valid = false; } return valid; }
/** * Compiles a set of source files into a given object file. * * @param input set of source files. * @param output object file. * @return object file if successful. Otherwise null. * @throws Exception * @throws ToolsetException */ public FilePath compile(FilePath[] input, FilePath output) throws Exception, ToolsetException { // add every source file for (FilePath fp : input) { if (isValid(fp, ".wxs")) { lg.debug(messages.getString("ADDING_SOURCE_FILE"), fp.getRemote()); candle.addSourceFile(fp); } else { lg.log(messages.getString("NO_VALID_SOURCE_FILE"), fp.getRemote()); } } // add output file candle.setOutputFile(output); candle.createCommand(); lg.debug(messages.getString("EXECUTING_COMMAND"), candle.toString()); if (candle.execute()) { lg.log(messages.getString("COMPILING_SUCCESSFUL")); } else { lg.log(messages.getString("COMPILING_FAILED")); throw new ToolsetException(messages.getString("COMPILING_FAILED")); } return candle.getOutputFile(); }
/** * Links a set of given object files into an MSI package. * * @param input set of object files. * @param output name of MSI package. * @return MSI package file. * @throws Exception * @throws ToolsetException */ public FilePath link(FilePath[] input, FilePath output) throws Exception, ToolsetException { // add every object file for (FilePath fp : input) { if (isValid(fp, ".wixobj")) { lg.debug(messages.getString("ADDING_OBJECT_FILE"), fp.getRemote()); light.addSourceFile(fp); } else { lg.log(messages.getString("NO_VALID_OBJECT_FILE"), fp.getRemote()); } } // add output file light.setOutputFile(output); light.createCommand(); lg.debug(messages.getString("EXECUTING_COMMAND"), light.toString()); if (light.execute()) { lg.log(messages.getString("LINKING_SUCCESSFUL")); } else { lg.log(messages.getString("LINKING_FAILED")); throw new ToolsetException(messages.getString("LINKING_FAILED")); } return light.getOutputFile(); }
public Channel launchChannel( String[] cmd, OutputStream out, FilePath _workDir, Map<String, String> envVars) throws IOException, InterruptedException { printCommandLine(cmd, _workDir); try { Process proc = launcher.launch(Util.join(asList(cmd), " "), _workDir.getRemote()); return new Channel( "channel over named pipe to " + launcher.getHostName(), Computer.threadPoolForRemoting, proc.getInputStream(), new BufferedOutputStream(proc.getOutputStream())); } catch (JIException e) { throw new IOException(e); } }
/** * Get the root path where the build is located, the project may be checked out to a sub-directory * from the root workspace location. * * @param env EnvVars to take the workspace from, if workspace is not found then it is take from * project.getSomeWorkspace() * @return The location of the root of the Gradle build. * @throws IOException * @throws InterruptedException */ public FilePath getModuleRoot(Map<String, String> env) throws IOException, InterruptedException { FilePath someWorkspace = project.getSomeWorkspace(); if (someWorkspace == null) { throw new IllegalStateException("Couldn't find workspace"); } env.put("WORKSPACE", someWorkspace.getRemote()); for (Builder builder : project.getBuilders()) { if (builder instanceof Gradle) { Gradle gradleBuilder = (Gradle) builder; String rootBuildScriptDir = gradleBuilder.getRootBuildScriptDir(); if (rootBuildScriptDir != null && rootBuildScriptDir.trim().length() != 0) { String rootBuildScriptNormalized = Util.replaceMacro(rootBuildScriptDir.trim(), env); rootBuildScriptNormalized = Util.replaceMacro(rootBuildScriptNormalized, env); return new FilePath(someWorkspace, rootBuildScriptNormalized); } else { return someWorkspace; } } } throw new IllegalArgumentException("Couldn't find Gradle builder in the current builders list"); }
@Test public void globShouldIgnoreDefaultExcludesByRequest() throws Exception { FilePath tmp = new FilePath(tmpRule.getRoot()); try { tmp.child(".gitignore").touch(0); FilePath git = tmp.child(".git"); git.mkdirs(); git.child("HEAD").touch(0); DirScanner glob = new DirScanner.Glob("**/*", null, false); MatchingFileVisitor gitdir = new MatchingFileVisitor("HEAD"); MatchingFileVisitor gitignore = new MatchingFileVisitor(".gitignore"); glob.scan(new File(tmp.getRemote()), gitdir); glob.scan(new File(tmp.getRemote()), gitignore); assertTrue(gitdir.found); assertTrue(gitignore.found); } finally { tmp.deleteRecursive(); } }
private void writeFitnesseResults(PrintStream log, FilePath resultsFilePath, byte[] results) { OutputStream resultsStream = null; try { resultsStream = resultsFilePath.write(); resultsStream.write(results); log.println( "Xml results saved as " + Charset.defaultCharset().displayName() + " to " + resultsFilePath.getRemote()); } catch (IOException e) { e.printStackTrace(log); } catch (InterruptedException e2) { e2.printStackTrace(log); } finally { try { if (resultsStream != null) resultsStream.close(); } catch (Exception e) { // swallow } } }
protected Result doRun(BuildListener listener) throws Exception { // pick up a list of reporters to run reporters = getProject().createReporters(); MavenModuleSet mms = getProject().getParent(); if (debug) listener.getLogger().println("Reporters=" + reporters); for (BuildWrapper w : mms.getBuildWrappersList()) { Environment e = w.setUp(MavenBuild.this, launcher, listener); if (e == null) { return Result.FAILURE; } buildEnvironments.add(e); } EnvVars envVars = getEnvironment(listener); // buildEnvironments should be set up first MavenInstallation mvn = getProject().getParent().getMaven(); mvn = mvn.forEnvironment(envVars).forNode(Computer.currentComputer().getNode(), listener); MavenInformation mavenInformation = getModuleRoot().act(new MavenVersionCallable(mvn.getHome())); String mavenVersion = mavenInformation.getVersion(); LOGGER.fine( getFullDisplayName() + " is building with mavenVersion " + mavenVersion + " from file " + mavenInformation.getVersionResourcePath()); boolean maven3orLater = MavenUtil.maven3orLater(mavenVersion); ProcessCache.MavenProcess process = MavenBuild.mavenProcessCache.get( launcher.getChannel(), listener, maven3orLater ? new Maven3ProcessFactory( getParent().getParent(), launcher, envVars, getMavenOpts(listener, envVars), null) : new MavenProcessFactory( getParent().getParent(), launcher, envVars, getMavenOpts(listener, envVars), null)); ArgumentListBuilder margs = new ArgumentListBuilder("-N", "-B"); FilePath localRepo = mms.getLocalRepository().locate(MavenBuild.this); if (localRepo != null) // the workspace must be on this node, so getRemote() is safe. margs.add("-Dmaven.repo.local=" + localRepo.getRemote()); if (mms.getAlternateSettings() != null) { if (IOUtils.isAbsolute(mms.getAlternateSettings())) { margs.add("-s").add(mms.getAlternateSettings()); } else { FilePath mrSettings = getModuleRoot().child(mms.getAlternateSettings()); FilePath wsSettings = getWorkspace().child(mms.getAlternateSettings()); if (!wsSettings.exists() && mrSettings.exists()) wsSettings = mrSettings; margs.add("-s").add(wsSettings.getRemote()); } } margs.add("-f", getModuleRoot().child("pom.xml").getRemote()); margs.addTokenized(getProject().getGoals()); Map<String, String> systemProps = new HashMap<String, String>(envVars); // backward compatibility systemProps.put("hudson.build.number", String.valueOf(getNumber())); if (maven3orLater) { // FIXME here for maven 3 builds listener .getLogger() .println("Building single Maven modules is not implemented for Maven 3, yet!"); return Result.ABORTED; } else { boolean normalExit = false; try { Result r = process.call( new Builder( listener, new ProxyImpl(), getProject(), margs.toList(), systemProps)); normalExit = true; return r; } finally { if (normalExit) process.recycle(); else process.discard(); // tear down in reverse order boolean failed = false; for (int i = buildEnvironments.size() - 1; i >= 0; i--) { if (!buildEnvironments.get(i).tearDown(MavenBuild.this, listener)) { failed = true; } } // WARNING The return in the finally clause will trump any return before if (failed) return Result.FAILURE; } } }
@Override protected Result doRun(final BuildListener listener) throws Exception { PrintStream logger = listener.getLogger(); try { EnvVars envVars = getEnvironment(listener); Config config = IvyConfig.provider.getConfigById(project.getSettings()); if (config != null) { FilePath tmp = getWorkspace().createTextTempFile("ivy", "xml", config.content); settings = tmp.getRemote(); addAction(new CleanTempFilesAction(settings)); } else { String settingsFile = project.getIvySettingsFile(); if (settingsFile != null) { settings = getWorkspace().child(settingsFile).getRemote(); } } if (!project.isAggregatorStyleBuild()) { // start module builds parseIvyDescriptorFiles(listener, logger, envVars); Set<IvyModule> triggeredModules = new HashSet<IvyModule>(); if (!project.isIncrementalBuild() || IvyModuleSetBuild.this.getChangeSet().isEmptySet()) { for (IvyModule module : project.sortedActiveModules) { // Don't trigger builds if we've already triggered // one // of their dependencies. // It's safe to just get the direct dependencies // since // the modules are sorted in dependency order. List<AbstractProject> ups = module.getUpstreamProjects(); boolean triggerBuild = true; for (AbstractProject upstreamDep : ups) { if (triggeredModules.contains(upstreamDep)) { triggerBuild = false; break; } } if (triggerBuild) { logger.println("Triggering " + module.getModuleName()); module.scheduleBuild( new ParameterizedUpstreamCause( ((Run<?, ?>) IvyModuleSetBuild.this), IvyModuleSetBuild.this.getActions(ParametersAction.class))); } triggeredModules.add(module); } } else { for (IvyModule module : project.sortedActiveModules) { // If there are changes for this module, add it. // Also add it if we've never seen this module // before, // or if the previous build of this module // failed or was unstable. boolean triggerBuild = false; if ((module.getLastBuild() == null) || (!getChangeSetFor(module).isEmpty()) || (module.getLastBuild().getResult().isWorseThan(Result.SUCCESS))) { triggerBuild = true; List<AbstractProject> ups = module.getUpstreamProjects(); for (AbstractProject upstreamDep : ups) { if (triggeredModules.contains(upstreamDep)) { triggerBuild = false; triggeredModules.add(module); break; } } } if (triggerBuild) { logger.println("Triggering " + module.getModuleName()); module.scheduleBuild( new ParameterizedUpstreamCause( ((Run<?, ?>) IvyModuleSetBuild.this), IvyModuleSetBuild.this.getActions(ParametersAction.class))); triggeredModules.add(module); } } } } else { // do builds here try { List<BuildWrapper> wrappers = new ArrayList<BuildWrapper>(); for (BuildWrapper w : project.getBuildWrappersList()) wrappers.add(w); ParametersAction parameters = getAction(ParametersAction.class); if (parameters != null) parameters.createBuildWrappers(IvyModuleSetBuild.this, wrappers); for (BuildWrapper w : wrappers) { Environment e = w.setUp(IvyModuleSetBuild.this, launcher, listener); if (e == null) return Result.FAILURE; buildEnvironments.add(e); e.buildEnvVars(envVars); // #3502: too late for // getEnvironment to do // this } if (!preBuild(listener, project.getPublishers())) return Result.FAILURE; Properties additionalProperties = null; if (project.isIncrementalBuild()) { parseIvyDescriptorFiles(listener, logger, envVars); List<String> changedModules = new ArrayList<String>(); for (IvyModule m : project.sortedActiveModules) { // Check if incrementalBuild is selected and that // there are changes - // we act as if incrementalBuild is not set if there // are no changes. if (!IvyModuleSetBuild.this.getChangeSet().isEmptySet()) { // If there are changes for this module, add it. if (!getChangeSetFor(m).isEmpty()) { changedModules.add(m.getModuleName().name); } } } if (project.isAggregatorStyleBuild()) { additionalProperties = new Properties(); additionalProperties.put( project.getChangedModulesProperty() == null ? "hudson.ivy.changedModules" : project.getChangedModulesProperty(), StringUtils.join(changedModules, ',')); } } IvyBuilderType ivyBuilderType = project.getIvyBuilderType(); hudson.tasks.Builder builder = ivyBuilderType.getBuilder(additionalProperties, null, buildEnvironments); logger.println( "Building project with " + ivyBuilderType.getDescriptor().getDisplayName()); if (builder.perform(IvyModuleSetBuild.this, launcher, listener)) return Result.SUCCESS; return Result.FAILURE; } finally { // tear down in reverse order boolean failed = false; for (int i = buildEnvironments.size() - 1; i >= 0; i--) { if (!buildEnvironments.get(i).tearDown(IvyModuleSetBuild.this, listener)) { failed = true; } } buildEnvironments = null; // WARNING The return in the finally clause will trump // any return before if (failed) return Result.FAILURE; } } return null; } catch (AbortException e) { if (e.getMessage() != null) listener.error(e.getMessage()); return Result.FAILURE; } catch (InterruptedIOException e) { e.printStackTrace(listener.error("Aborted Ivy execution for InterruptedIOException")); return Result.ABORTED; } catch (InterruptedException e) { e.printStackTrace(listener.error("Aborted Ivy execution for InterruptedException")); return Result.ABORTED; } catch (IOException e) { e.printStackTrace(listener.error(Messages.IvyModuleSetBuild_FailedToParseIvyXml())); return Result.FAILURE; } catch (RunnerAbortedException e) { return Result.FAILURE; } catch (RuntimeException e) { // bug in the code. e.printStackTrace( listener.error( "Processing failed due to a bug in the code. Please report this to [email protected]")); logger.println("project=" + project); logger.println("project.getModules()=" + project.getModules()); throw e; } }
/** * Parses the XML for relevant information * * @param paths a file hopefully containing test related data in correct format * @return a collection of test results */ public TestNGResult parse(FilePath[] paths) { if (null == paths) { log("File paths not specified. paths var is null. Returning empty test results."); return new TestNGResult(); } finalResults = new TestNGResult(); for (FilePath path : paths) { File file = new File(path.getRemote()); if (!file.isFile()) { log("'" + file.getAbsolutePath() + "' points to an invalid test report"); continue; // move to next file } else { log("Processing '" + file.getAbsolutePath() + "'"); } BufferedInputStream bufferedInputStream = null; try { bufferedInputStream = new BufferedInputStream(new FileInputStream(file)); xmlPullParser = createXmlPullParser(bufferedInputStream); // some initial setup testList = new ArrayList<TestNGTestResult>(); while (XmlPullParser.END_DOCUMENT != xmlPullParser.nextToken()) { TAGS tag = TAGS.fromString(xmlPullParser.getName()); int eventType = xmlPullParser.getEventType(); switch (eventType) { // all opening tags case XmlPullParser.START_TAG: switch (tag) { case SUITE: startSuite(get("name")); break; case GROUPS: startGroups(); break; case GROUP: startGroup(get("name")); break; case METHOD: startGroupMethod(get("class"), get("name")); break; case TEST: startTest(get("name")); break; case CLASS: startClass(get("name")); break; case TEST_METHOD: startTestMethod( get("name"), get("test-instance-name"), get("status"), get("description"), get("duration-ms"), get("started-at"), get("is-config")); break; case REPORTER_OUTPUT: startReporterOutput(); break; case LINE: startLine(); currentCDATAParent = TAGS.LINE; break; case PARAMS: startMethodParameters(); currentCDATAParent = TAGS.PARAMS; break; case EXCEPTION: startException(get("class")); break; case MESSAGE: currentCDATAParent = TAGS.MESSAGE; break; case SHORT_STACKTRACE: currentCDATAParent = TAGS.SHORT_STACKTRACE; break; case FULL_STACKTRACE: currentCDATAParent = TAGS.FULL_STACKTRACE; break; } break; // all closing tags case XmlPullParser.END_TAG: switch (tag) { case SUITE: finishSuite(); break; case GROUP: finishGroup(); break; case METHOD: finishGroupMethod(); break; case TEST: finishTest(); break; case CLASS: finishClass(); break; case TEST_METHOD: finishTestMethod(); break; case REPORTER_OUTPUT: endReporterOutput(); break; case LINE: endLine(); currentCDATAParent = TAGS.UNKNOWN; break; case PARAMS: finishMethodParameters(); currentCDATAParent = TAGS.UNKNOWN; break; case EXCEPTION: finishException(); break; case MESSAGE: case SHORT_STACKTRACE: case FULL_STACKTRACE: currentCDATAParent = TAGS.UNKNOWN; break; } break; // all cdata reading case XmlPullParser.CDSECT: handleCDATA(); break; } } finalResults.addUniqueTests(testList); } catch (XmlPullParserException e) { log("Failed to parse XML: " + e.getMessage()); log(e); } catch (FileNotFoundException e) { log("Failed to find XML file"); log(e); } catch (IOException e) { log(e); } finally { try { if (bufferedInputStream != null) { bufferedInputStream.close(); } } catch (IOException e) { log(e); } } } // tally up the results properly before returning finalResults.tally(); return finalResults; }
public Repository getRepository() throws IOException { return new FileRepository(new File(workspace.getRemote(), Constants.DOT_GIT)); }
@Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException { listener.getLogger().println("[htmlpublisher] Archiving HTML reports..."); // Grab the contents of the header and footer as arrays ArrayList<String> headerLines; ArrayList<String> footerLines; try { headerLines = this.readFile("/htmlpublisher/HtmlPublisher/header.html"); footerLines = this.readFile("/htmlpublisher/HtmlPublisher/footer.html"); } catch (FileNotFoundException e1) { e1.printStackTrace(); return false; } catch (IOException e1) { e1.printStackTrace(); return false; } for (int i = 0; i < this.reportTargets.size(); i++) { // Create an array of lines we will eventually write out, initially the header. ArrayList<String> reportLines = new ArrayList<String>(headerLines); HtmlPublisherTarget reportTarget = this.reportTargets.get(i); boolean keepAll = reportTarget.getKeepAll(); FilePath archiveDir = build .getWorkspace() .child(resolveParametersInString(build, listener, reportTarget.getReportDir())); FilePath targetDir = reportTarget.getArchiveTarget(build); String levelString = keepAll ? "BUILD" : "PROJECT"; listener .getLogger() .println( "[htmlpublisher] Archiving at " + levelString + " level " + archiveDir + " to " + targetDir); // The index name might be a comma separated list of names, so let's figure out all the pages // we should index. String[] csvReports = resolveParametersInString(build, listener, reportTarget.getReportFiles()).split(","); ArrayList<String> reports = new ArrayList<String>(); for (int j = 0; j < csvReports.length; j++) { String report = csvReports[j]; report = report.trim(); // Ignore blank report names caused by trailing or double commas. if (report.equals("")) { continue; } reports.add(report); String tabNo = "tab" + (j + 1); // Make the report name the filename without the extension. int end = report.lastIndexOf("."); String reportName; if (end > 0) { reportName = report.substring(0, end); } else { reportName = report; } String tabItem = "<li id=\"" + tabNo + "\" class=\"unselected\" onclick=\"updateBody('" + tabNo + "');\" value=\"" + report + "\">" + reportName + "</li>"; reportLines.add(tabItem); } // Add the JS to change the link as appropriate. String hudsonUrl = Hudson.getInstance().getRootUrl(); AbstractProject job = build.getProject(); reportLines.add( "<script type=\"text/javascript\">document.getElementById(\"hudson_link\").innerHTML=\"Back to " + job.getName() + "\";</script>"); // If the URL isn't configured in Hudson, the best we can do is attempt to go Back. if (hudsonUrl == null) { reportLines.add( "<script type=\"text/javascript\">document.getElementById(\"hudson_link\").onclick = function() { history.go(-1); return false; };</script>"); } else { String jobUrl = hudsonUrl + job.getUrl(); reportLines.add( "<script type=\"text/javascript\">document.getElementById(\"hudson_link\").href=\"" + jobUrl + "\";</script>"); } reportLines.add( "<script type=\"text/javascript\">document.getElementById(\"zip_link\").href=\"*zip*/" + reportTarget.getSanitizedName() + ".zip\";</script>"); try { if (!archiveDir.exists()) { listener.error("Specified HTML directory '" + archiveDir + "' does not exist."); build.setResult(Result.FAILURE); return true; } else if (!keepAll) { // We are only keeping one copy at the project level, so remove the old one. targetDir.deleteRecursive(); } if (archiveDir.copyRecursiveTo("**/*", targetDir) == 0) { listener.error( "Directory '" + archiveDir + "' exists but failed copying to '" + targetDir + "'."); if (build.getResult().isBetterOrEqualTo(Result.UNSTABLE)) { // If the build failed, don't complain that there was no coverage. // The build probably didn't even get to the point where it produces coverage. listener.error("This is especially strange since your build otherwise succeeded."); } build.setResult(Result.FAILURE); return true; } } catch (IOException e) { Util.displayIOException(e, listener); e.printStackTrace(listener.fatalError("HTML Publisher failure")); build.setResult(Result.FAILURE); return true; } reportTarget.handleAction(build); // Now add the footer. reportLines.addAll(footerLines); // And write this as the index try { writeFile(reportLines, new File(targetDir.getRemote(), reportTarget.getWrapperName())); } catch (IOException e) { e.printStackTrace(); } } return true; }
/** * Download the PDS members for the datasets specified in the Jenkins configuration. * * @param listener Build listener * @param workspaceFilePath Workspace file path * @param filterPattern Source filter pattern * @return TRUE if able to retrieve source */ public boolean getSource( BuildListener listener, FilePath workspaceFilePath, String filterPattern) { boolean rtnValue = true; String datasets = convertFilterPattern(filterPattern); listener.getLogger().println("Comma delimited datasets: " + datasets); // $NON-NLS-1$ // Calling the Topaz HCI code that will run as an OSGI instance. The OSGI instance has a command // line interface to // download PDS members. String osFile = Constants.TOPAZ_CLI_BAT; if (SystemUtils.IS_OS_LINUX == true) { osFile = Constants.TOPAZ_CLI_SH; } String cliBatchFile = m_pdsConfig.getTopazCLILocation() + File.separator + osFile; String topazCliWorkspace = workspaceFilePath.getRemote() + File.separator + Constants.TOPAZ_CLI_WORKSPACE; listener.getLogger().println("TopazCLI workspace: " + topazCliWorkspace); // $NON-NLS-1$ ProcessBuilder processBldr = new ProcessBuilder( cliBatchFile, Constants.HOST_PARM, m_pdsConfig.getHost(), Constants.PORT_PARM, m_pdsConfig.getPort(), Constants.USERID_PARM, m_pdsConfig.getLoginInformation().getUsername(), Constants.PASSWORD_PARM, m_pdsConfig.getLoginInformation().getPassword().getPlainText(), Constants.FILTER_PARM, wrapInQuotes(datasets), Constants.TARGET_FOLDER_PARM, wrapInQuotes(workspaceFilePath.getRemote()), Constants.SCM_TYPE_PARM, Constants.PDS, Constants.FILE_EXT_PARM, m_pdsConfig.getFileExtension(), Constants.CODE_PAGE_PARM, m_pdsConfig.getCodePage(), Constants.DATA_PARM, wrapInQuotes(topazCliWorkspace)); try { // invoke the bat file that will start the OSGI instance. Process process = processBldr.start(); // Get the input stream connected to the normal output of the process so we can right all the // messages to the // Jenkins console output. InputStream is = process.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); String line; while ((line = br.readLine()) != null) { listener.getLogger().println(line); } // check the return value of the process int exitValue = process.waitFor(); listener .getLogger() .println("Call TopazCLI.bat exited with exit value = " + exitValue); // $NON-NLS-1$ if (exitValue != 0) { rtnValue = false; } } catch (IOException e) { rtnValue = false; listener.getLogger().println(e.getMessage()); } catch (InterruptedException e) { rtnValue = false; listener.getLogger().println(e.getMessage()); } return rtnValue; }
/** * copy from MavenUtil but here we have to ignore localRepo path and setting as thoses paths comes * from the remote node and can not exist in master see * http://issues.jenkins-ci.org/browse/JENKINS-8711 */ private MavenEmbedder createEmbedder(TaskListener listener, AbstractBuild<?, ?> build) throws MavenEmbedderException, IOException, InterruptedException { MavenInstallation m = null; File settingsLoc = null, remoteGlobalSettingsFromConfig = null; String profiles = null; Properties systemProperties = null; String privateRepository = null; FilePath remoteSettingsFromConfig = null; File tmpSettings = File.createTempFile("jenkins", "temp-settings.xml"); try { AbstractProject project = build.getProject(); if (project instanceof MavenModuleSet) { MavenModuleSet mavenModuleSet = ((MavenModuleSet) project); profiles = mavenModuleSet.getProfiles(); systemProperties = mavenModuleSet.getMavenProperties(); // olamy see // we have to take about the settings use for the project // order tru configuration // TODO maybe in goals with -s,--settings last wins but not done in during pom parsing // or -Dmaven.repo.local // if not we must get ~/.m2/settings.xml then $M2_HOME/conf/settings.xml // TODO check if the remoteSettings has a localRepository configured and disabled it String settingsConfigId = mavenModuleSet.getSettingConfigId(); String altSettingsPath = null; if (!StringUtils.isBlank(settingsConfigId)) { Config config = SettingsProviderUtils.findConfig(settingsConfigId, MavenSettingsProvider.class); if (config == null) { listener .getLogger() .println( " your Apache Maven build is setup to use a config with id " + settingsConfigId + " but cannot find the config"); } else { listener .getLogger() .println("redeploy publisher using settings config with name " + config.name); String settingsContent = config.content; if (config.content != null) { remoteSettingsFromConfig = SettingsProviderUtils.copyConfigContentToFilePath(config, build.getWorkspace()); altSettingsPath = remoteSettingsFromConfig.getRemote(); } } } if (mavenModuleSet.getAlternateSettings() != null) { altSettingsPath = mavenModuleSet.getAlternateSettings(); } String globalSettingsConfigId = mavenModuleSet.getGlobalSettingConfigId(); if (!StringUtils.isBlank(globalSettingsConfigId)) { Config config = SettingsProviderUtils.findConfig( globalSettingsConfigId, GlobalMavenSettingsProvider.class); if (config == null) { listener .getLogger() .println( " your Apache Maven build is setup to use a global settings config with id " + globalSettingsConfigId + " but cannot find the config"); } else { listener .getLogger() .println( "redeploy publisher using global settings config with name " + config.name); if (config.content != null) { remoteGlobalSettingsFromConfig = SettingsProviderUtils.copyConfigContentToFile(config); } } } Node buildNode = build.getBuiltOn(); if (buildNode == null) { // assume that build was made on master buildNode = Jenkins.getInstance(); } if (StringUtils.isBlank(altSettingsPath)) { // get userHome from the node where job has been executed String remoteUserHome = build.getWorkspace().act(new GetUserHome()); altSettingsPath = remoteUserHome + "/.m2/settings.xml"; } // we copy this file in the master in a temporary file FilePath filePath = new FilePath(tmpSettings); FilePath remoteSettings = build.getWorkspace().child(altSettingsPath); if (!remoteSettings.exists()) { // JENKINS-9084 we finally use $M2_HOME/conf/settings.xml as maven do String mavenHome = ((MavenModuleSet) project).getMaven().forNode(buildNode, listener).getHome(); String settingsPath = mavenHome + "/conf/settings.xml"; remoteSettings = build.getWorkspace().child(settingsPath); } listener .getLogger() .println( "Maven RedeployPublished use remote " + (buildNode != null ? buildNode.getNodeName() : "local") + " maven settings from : " + remoteSettings.getRemote()); remoteSettings.copyTo(filePath); settingsLoc = tmpSettings; } MavenEmbedderRequest mavenEmbedderRequest = new MavenEmbedderRequest( listener, m != null ? m.getHomeDir() : null, profiles, systemProperties, privateRepository, settingsLoc); if (remoteGlobalSettingsFromConfig != null) { mavenEmbedderRequest.setGlobalSettings(remoteGlobalSettingsFromConfig); } mavenEmbedderRequest.setTransferListener( new ConsoleMavenTransferListener(listener.getLogger())); return MavenUtil.createEmbedder(mavenEmbedderRequest); } finally { if (tmpSettings != null) { tmpSettings.delete(); } if (remoteSettingsFromConfig != null) { remoteSettingsFromConfig.delete(); } FileUtils.deleteQuietly(remoteGlobalSettingsFromConfig); } }
private ArgumentListBuilder buildMavenCmdLine( AbstractBuild<?, ?> build, BuildListener listener, EnvVars env) throws IOException, InterruptedException { FilePath mavenHome = getMavenHomeDir(build, listener, env); if (!mavenHome.exists()) { listener.error("Couldn't find Maven home: " + mavenHome.getRemote()); throw new Run.RunnerAbortedException(); } ArgumentListBuilder args = new ArgumentListBuilder(); FilePath mavenBootDir = new FilePath(mavenHome, "boot"); FilePath[] classworldsCandidates = mavenBootDir.list("plexus-classworlds*.jar"); if (classworldsCandidates == null || classworldsCandidates.length == 0) { listener.error("Couldn't find classworlds jar under " + mavenBootDir.getRemote()); throw new Run.RunnerAbortedException(); } FilePath classWorldsJar = classworldsCandidates[0]; // classpath args.add("-classpath"); // String cpSeparator = launcher.isUnix() ? ":" : ";"; args.add(classWorldsJar.getRemote()); // maven home args.addKeyValuePair("-D", "maven.home", mavenHome.getRemote(), false); String buildInfoPropertiesFile = env.get(BuildInfoConfigProperties.PROP_PROPS_FILE); boolean artifactoryIntegration = StringUtils.isNotBlank(buildInfoPropertiesFile); listener .getLogger() .println("Artifactory integration is " + (artifactoryIntegration ? "enabled" : "disabled")); String classworldsConfPath; if (artifactoryIntegration) { args.addKeyValuePair( "-D", BuildInfoConfigProperties.PROP_PROPS_FILE, buildInfoPropertiesFile, false); // use the classworlds conf packaged with this plugin and resolve the extractor libs File maven3ExtractorJar = Which.jarFile(Maven3BuildInfoLogger.class); FilePath actualDependencyDirectory = PluginDependencyHelper.getActualDependencyDirectory(build, maven3ExtractorJar); if (getMavenOpts() == null || !getMavenOpts().contains("-Dm3plugin.lib")) { args.addKeyValuePair("-D", "m3plugin.lib", actualDependencyDirectory.getRemote(), false); } URL classworldsResource = getClass() .getClassLoader() .getResource("org/jfrog/hudson/maven3/classworlds-freestyle.conf"); File classworldsConfFile = new File(URLDecoder.decode(classworldsResource.getFile(), "utf-8")); if (!classworldsConfFile.exists()) { listener.error( "Unable to locate classworlds configuration file under " + classworldsConfFile.getAbsolutePath()); throw new Run.RunnerAbortedException(); } // If we are on a remote slave, make a temp copy of the customized classworlds conf if (Computer.currentComputer() instanceof SlaveComputer) { FilePath remoteClassworlds = build.getWorkspace().createTextTempFile("classworlds", "conf", "", false); remoteClassworlds.copyFrom(classworldsResource); classworldsConfPath = remoteClassworlds.getRemote(); } else { classworldsConfPath = classworldsConfFile.getCanonicalPath(); } } else { classworldsConfPath = new FilePath(mavenHome, "bin/m2.conf").getRemote(); } args.addKeyValuePair("-D", "classworlds.conf", classworldsConfPath, false); // maven opts if (StringUtils.isNotBlank(getMavenOpts())) { String mavenOpts = Util.replaceMacro(getMavenOpts(), build.getBuildVariableResolver()); args.add(mavenOpts); } // classworlds launcher main class args.add(CLASSWORLDS_LAUNCHER); // pom file to build String rootPom = getRootPom(); if (StringUtils.isNotBlank(rootPom)) { args.add("-f", rootPom); } // maven goals args.addTokenized(getGoals()); return args; }
private int archiveArtifacts(Collection<ArtifactDTO> artifacts) { int count = 0; FilePath workspace = build.getWorkspace(); // Check if workspace exists. if (null == workspace) { muxlog.error("Missing workspace to archive artifacts from."); // TODO: consider this a failure? // build.setResult(Result.FAILURE); return count; } try { FilePath archiveDirectory = new FilePath(build.getArtifactsDir()); archiveDirectory.mkdirs(); log.debug("Archiving to {}", archiveDirectory); VirtualChannel channel = workspace.getChannel(); for (ArtifactDTO artifact : artifacts) { if (null != artifact.getCreatedProject() && maybeIncludePom(artifact)) { MavenCoordinatesDTO gav = artifact.getCoordinates(); // File information for the artifact. File file = ArtifactDTOHelper.getFile(artifact); // TODO: filter out nulls when generating the artifact list; i.e. at ArtifactRecorder if (file != null) { // Executing on the master; do not try to canonicalize the path since it could have been // defined // from a remote file system that is not the same as the masters. E.g. master being // linux, // remote being windows. // TODO: consider making a FilePath constructor using channel and file and extract the // path // so that users don't have to think about differences between canonical, absolute, and // path. FilePath source = new FilePath(channel, file.getPath()); // Store in archive using a similar format as a Maven repository. FilePath target = archiveDirectory .child(gav.getGroupId()) .child(gav.getArtifactId()) .child(gav.getVersion()) // Resolve source filename using the same node that created the file. This // should // ensure that the name portion is properly extracted since the file system is // the same. .child(source.getName()); if (log.isTraceEnabled()) { // Separate calls to get more info when there are failures. log.trace("Created path to archive: {} on channel {}", target, target.getChannel()); log.trace( "Copying FROM {} on channel {} TO {} on channel {}", $(source, source.getChannel(), target, target.getChannel())); } source.copyTo(target); // TODO: catch IOException and continue with rest of artifacts? artifact.withArchivedFile(target.getRemote()); count++; } else { muxlog.error("Failed to archive Maven 3 artifact {} -" + " unresolved.", gav); } } } } // No need to stop the build due to these exceptions. catch (IOException e) { Util.displayIOException(e, listener); muxlog.error("Failed to archive Maven 3 artifacts", e); } catch (InterruptedException e) { muxlog.error("Failed to archive Maven 3 artifacts", e); } return count; }
static String getAbsolutePathToFileThatMayBeRelativeToWorkspace( FilePath workingDirectory, String fileName) { if (new File(fileName).exists()) return fileName; return new File(workingDirectory.getRemote(), fileName).getAbsolutePath(); }
public ProcStarter init(FilePath path) { return run("init", path.getRemote()); }
/** * Normally, a workspace is assigned by {@link Runner}, but this lets you set the workspace in * case {@link AbstractBuild} is created without a build. */ protected void setWorkspace(FilePath ws) { this.workspace = ws.getRemote(); }
@Override public boolean checkout( final AbstractBuild build, Launcher launcher, final FilePath workspace, final BuildListener listener, File changelogFile) throws IOException, InterruptedException { listener .getLogger() .println( "Checkout:" + workspace.getName() + " / " + workspace.getRemote() + " - " + workspace.getChannel()); final String projectName = build.getProject().getName(); final int buildNumber = build.getNumber(); final String gitExe = getDescriptor().getGitExe(); final String buildnumber = "hudson-" + projectName + "-" + buildNumber; final BuildData buildData = getBuildData(build.getPreviousBuild(), true); if (buildData != null && buildData.lastBuild != null) { listener.getLogger().println("Last Built Revision: " + buildData.lastBuild.revision); } EnvVars tmp = new EnvVars(); try { tmp = build.getEnvironment(listener); } catch (InterruptedException e) { listener.error("Interrupted exception getting environment .. using empty environment"); } final EnvVars environment = tmp; final String singleBranch = getSingleBranch(build); final Revision revToBuild = workspace.act( new FileCallable<Revision>() { private static final long serialVersionUID = 1L; public Revision invoke(File localWorkspace, VirtualChannel channel) throws IOException { FilePath ws = new FilePath(localWorkspace); listener .getLogger() .println( "Checkout:" + ws.getName() + " / " + ws.getRemote() + " - " + ws.getChannel()); IGitAPI git = new GitAPI(gitExe, ws, listener, environment); if (git.hasGitRepo()) { // It's an update listener.getLogger().println("Fetching changes from the remote Git repository"); for (RemoteConfig remoteRepository : getRepositories()) { fetchFrom(git, localWorkspace, listener, remoteRepository); } } else { listener.getLogger().println("Cloning the remote Git repository"); // Go through the repositories, trying to clone from one // boolean successfullyCloned = false; for (RemoteConfig rc : remoteRepositories) { try { git.clone(rc); successfullyCloned = true; break; } catch (GitException ex) { listener.error( "Error cloning remote repo '%s' : %s", rc.getName(), ex.getMessage()); if (ex.getCause() != null) { listener.error("Cause: %s", ex.getCause().getMessage()); } // Failed. Try the next one listener.getLogger().println("Trying next repository"); } } if (!successfullyCloned) { listener.error("Could not clone from a repository"); throw new GitException("Could not clone"); } // Also do a fetch for (RemoteConfig remoteRepository : getRepositories()) { fetchFrom(git, localWorkspace, listener, remoteRepository); } if (git.hasGitModules()) { git.submoduleInit(); git.submoduleUpdate(); } } IBuildChooser buildChooser = new BuildChooser(GitSCM.this, git, new GitUtils(listener, git), buildData); Collection<Revision> candidates = buildChooser.getCandidateRevisions(false, singleBranch); if (candidates.size() == 0) return null; return candidates.iterator().next(); } }); if (revToBuild == null) { // getBuildCandidates should make the last item the last build, so a re-build // will build the last built thing. listener.error("Nothing to do"); return false; } listener.getLogger().println("Commencing build of " + revToBuild); Object[] returnData; // Changelog, BuildData if (mergeOptions.doMerge()) { if (!revToBuild.containsBranchName(mergeOptions.getMergeTarget())) { returnData = workspace.act( new FileCallable<Object[]>() { private static final long serialVersionUID = 1L; public Object[] invoke(File localWorkspace, VirtualChannel channel) throws IOException { EnvVars environment; try { environment = build.getEnvironment(listener); } catch (Exception e) { listener.error("Exception reading environment - using empty environment"); environment = new EnvVars(); } IGitAPI git = new GitAPI(gitExe, new FilePath(localWorkspace), listener, environment); IBuildChooser buildChooser = new BuildChooser(GitSCM.this, git, new GitUtils(listener, git), buildData); // Do we need to merge this revision onto MergeTarget // Only merge if there's a branch to merge that isn't // us.. listener .getLogger() .println( "Merging " + revToBuild + " onto " + mergeOptions.getMergeTarget()); // checkout origin/blah ObjectId target = git.revParse(mergeOptions.getMergeTarget()); git.checkout(target.name()); try { git.merge(revToBuild.getSha1().name()); } catch (Exception ex) { listener .getLogger() .println( "Branch not suitable for integration as it does not merge cleanly"); // We still need to tag something to prevent // repetitive builds from happening - tag the // candidate // branch. git.checkout(revToBuild.getSha1().name()); git.tag(buildnumber, "Hudson Build #" + buildNumber); buildChooser.revisionBuilt(revToBuild, buildNumber, Result.FAILURE); return new Object[] {null, buildChooser.getData()}; } if (git.hasGitModules()) { git.submoduleUpdate(); } // Tag the successful merge git.tag(buildnumber, "Hudson Build #" + buildNumber); StringBuilder changeLog = new StringBuilder(); if (revToBuild.getBranches().size() > 0) listener .getLogger() .println("Warning : There are multiple branch changesets here"); try { for (Branch b : revToBuild.getBranches()) { Build lastRevWas = buildData == null ? null : buildData.getLastBuildOfBranch(b.getName()); if (lastRevWas != null) { changeLog.append( putChangelogDiffsIntoFile( git, b.name, lastRevWas.getSHA1().name(), revToBuild.getSha1().name())); } } } catch (GitException ge) { changeLog.append("Unable to retrieve changeset"); } Build buildData = buildChooser.revisionBuilt(revToBuild, buildNumber, null); GitUtils gu = new GitUtils(listener, git); buildData.mergeRevision = gu.getRevisionForSHA1(target); // Fetch the diffs into the changelog file return new Object[] {changeLog, buildChooser.getData()}; } }); BuildData returningBuildData = (BuildData) returnData[1]; build.addAction(returningBuildData); return changeLogResult((String) returnData[0], changelogFile); } } // No merge returnData = workspace.act( new FileCallable<Object[]>() { private static final long serialVersionUID = 1L; public Object[] invoke(File localWorkspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(localWorkspace), listener, environment); IBuildChooser buildChooser = new BuildChooser(GitSCM.this, git, new GitUtils(listener, git), buildData); // Straight compile-the-branch listener.getLogger().println("Checking out " + revToBuild); git.checkout(revToBuild.getSha1().name()); // if( compileSubmoduleCompares ) if (doGenerateSubmoduleConfigurations) { SubmoduleCombinator combinator = new SubmoduleCombinator(git, listener, localWorkspace, submoduleCfg); combinator.createSubmoduleCombinations(); } if (git.hasGitModules()) { git.submoduleInit(); // Git submodule update will only 'fetch' from where it // regards as 'origin'. However, // it is possible that we are building from a // RemoteRepository with changes // that are not in 'origin' AND it may be a new module that // we've only just discovered. // So - try updating from all RRs, then use the submodule // Update to do the checkout for (RemoteConfig remoteRepository : getRepositories()) { fetchFrom(git, localWorkspace, listener, remoteRepository); } // Update to the correct checkout git.submoduleUpdate(); } // Tag the successful merge git.tag(buildnumber, "Hudson Build #" + buildNumber); StringBuilder changeLog = new StringBuilder(); int histories = 0; try { for (Branch b : revToBuild.getBranches()) { Build lastRevWas = buildData == null ? null : buildData.getLastBuildOfBranch(b.getName()); if (lastRevWas != null) { listener.getLogger().println("Recording changes in branch " + b.getName()); changeLog.append( putChangelogDiffsIntoFile( git, b.name, lastRevWas.getSHA1().name(), revToBuild.getSha1().name())); histories++; } else { listener.getLogger().println("No change to record in branch " + b.getName()); } } } catch (GitException ge) { changeLog.append("Unable to retrieve changeset"); } if (histories > 1) listener .getLogger() .println("Warning : There are multiple branch changesets here"); buildChooser.revisionBuilt(revToBuild, buildNumber, null); if (getClean()) { listener.getLogger().println("Cleaning workspace"); git.clean(); } // Fetch the diffs into the changelog file return new Object[] {changeLog.toString(), buildChooser.getData()}; } }); build.addAction((Action) returnData[1]); return changeLogResult((String) returnData[0], changelogFile); }
@Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { listener.getLogger().println("[CucumberReportPublisher] Compiling Cucumber Html Reports ..."); // source directory (possibly on slave) FilePath workspaceJsonReportDirectory; if (jsonReportDirectory.isEmpty()) { workspaceJsonReportDirectory = build.getWorkspace(); } else { workspaceJsonReportDirectory = new FilePath(build.getWorkspace(), jsonReportDirectory); } // target directory (always on master) File targetBuildDirectory = new File(build.getRootDir(), "cucumber-html-reports"); if (!targetBuildDirectory.exists()) { targetBuildDirectory.mkdirs(); } String buildNumber = Integer.toString(build.getNumber()); String buildProject = build.getProject().getName(); if (Computer.currentComputer() instanceof SlaveComputer) { listener .getLogger() .println( "[CucumberReportPublisher] Copying all json files from slave: " + workspaceJsonReportDirectory.getRemote() + " to master reports directory: " + targetBuildDirectory); } else { listener .getLogger() .println( "[CucumberReportPublisher] Copying all json files from: " + workspaceJsonReportDirectory.getRemote() + " to reports directory: " + targetBuildDirectory); } workspaceJsonReportDirectory.copyRecursiveTo( DEFAULT_FILE_INCLUDE_PATTERN, new FilePath(targetBuildDirectory)); // generate the reports from the targetBuildDirectory Result result = Result.NOT_BUILT; String[] jsonReportFiles = findJsonFiles(targetBuildDirectory, fileIncludePattern, fileExcludePattern); if (jsonReportFiles.length > 0) { listener .getLogger() .println( String.format( "[CucumberReportPublisher] Found %d json files.", jsonReportFiles.length)); int jsonIndex = 0; for (String jsonReportFile : jsonReportFiles) { listener .getLogger() .println( "[CucumberReportPublisher] " + jsonIndex + ". Found a json file: " + jsonReportFile); jsonIndex++; } listener.getLogger().println("[CucumberReportPublisher] Generating HTML reports"); try { ReportBuilder reportBuilder = new ReportBuilder( fullPathToJsonFiles(jsonReportFiles, targetBuildDirectory), targetBuildDirectory, pluginUrlPath, buildNumber, buildProject, skippedFails, pendingFails, undefinedFails, missingFails, !noFlashCharts, true, false, parallelTesting); reportBuilder.generateReports(); boolean buildSuccess = reportBuilder.getBuildStatus(); if (buildSuccess) { result = Result.SUCCESS; } else { result = ignoreFailedTests ? Result.UNSTABLE : Result.FAILURE; } } catch (Exception e) { e.printStackTrace(); result = Result.FAILURE; listener .getLogger() .println("[CucumberReportPublisher] there was an error generating the reports: " + e); for (StackTraceElement error : e.getStackTrace()) { listener.getLogger().println(error); } } } else { result = Result.SUCCESS; listener .getLogger() .println( "[CucumberReportPublisher] there were no json results found in: " + targetBuildDirectory); } build.addAction(new CucumberReportBuildAction(build)); build.setResult(result); return true; }