private String getJobDefinedMavenInstallation(BuildListener listener, VirtualChannel channel) { Maven.MavenInstallation mvn = getMavenInstallation(); if (mvn == null) { listener.error( "Maven version is not configured for this project. Can't determine which Maven to run"); throw new Run.RunnerAbortedException(); } String mvnHome = mvn.getHome(); if (mvnHome == null) { listener.error("Maven '%s' doesn't have its home set", mvn.getName()); throw new Run.RunnerAbortedException(); } return mvnHome; }
@Override public Environment setUp(final AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { final String artifactoryServerName = getArtifactoryName(); if (StringUtils.isBlank(artifactoryServerName)) { return super.setUp(build, launcher, listener); } final ArtifactoryServer artifactoryServer = getArtifactoryServer(); if (artifactoryServer == null) { listener .getLogger() .format( "[JFROG] No Artifactory server configured for %s. " + "Please check your configuration.", artifactoryServerName) .println(); build.setResult(Result.FAILURE); throw new IllegalArgumentException( "No Artifactory server configured for " + artifactoryServerName); } Credentials preferredDeployer; ArtifactoryServer server = getArtifactoryServer(); if (isOverridingDefaultDeployer()) { preferredDeployer = getOverridingDeployerCredentials(); } else { preferredDeployer = server.getResolvingCredentials(); } hudson.ProxyConfiguration proxy = Hudson.getInstance().proxy; ProxyConfiguration proxyConfiguration = null; if (proxy != null && proxy.getName() != null) { proxyConfiguration = new ProxyConfiguration(); proxyConfiguration.host = proxy.name; proxyConfiguration.port = proxy.port; proxyConfiguration.username = proxy.getUserName(); proxyConfiguration.password = proxy.getPassword(); } ArtifactoryDependenciesClient dependenciesClient = server.createArtifactoryDependenciesClient( preferredDeployer.getUsername(), preferredDeployer.getPassword(), proxyConfiguration, listener); try { GenericArtifactsResolver artifactsResolver = new GenericArtifactsResolver(build, listener, dependenciesClient, getResolvePattern()); publishedDependencies = artifactsResolver.retrievePublishedDependencies(); buildDependencies = artifactsResolver.retrieveBuildDependencies(); return createEnvironmentOnSuccessfulSetup(); } catch (Exception e) { e.printStackTrace(listener.error(e.getMessage())); } finally { dependenciesClient.shutdown(); } return null; }
/** * Logic is more-or-less copied from {@link * DefaultMatrixExecutionStrategyImpl#notifyStartBuild(java.util.List)} * * <p>Triggers the startBuild event on all aggregators. This should be called before any run is * started. * * @param aggregators The aggregators to be notified. * @param listener Listener from parent build that can be logged to. * @return True if all aggregators return true. If any aggregator returns false, false is * immediately returned and no new aggregators are called. * @throws IOException * @throws InterruptedException */ private boolean notifyStartBuild(List<MatrixAggregator> aggregators, BuildListener listener) throws IOException, InterruptedException { for (MatrixAggregator aggregator : aggregators) { if (!aggregator.startBuild()) { listener.error("Aggregator terminated build: " + aggregator.toString()); return false; } } return true; }
/** * Logic is copied from {@link * DefaultMatrixExecutionStrategyImpl#notifyEndBuild(hudson.matrix.MatrixRun, java.util.List)} */ private void notifyEndRun( MatrixRun run, List<MatrixAggregator> aggregators, BuildListener listener) throws InterruptedException, IOException { if (run == null) return; // can happen if the configuration run gets cancelled before it gets started. for (MatrixAggregator aggregator : aggregators) { if (!aggregator.endRun(run)) { listener.error("Aggregator terminated build: " + aggregator.toString()); throw new AbortException(); } } }
private boolean buildModule(String url, SVNLogClient svnlc, SVNXMLLogHandler logHandler) throws IOException2 { PrintStream logger = listener.getLogger(); Long prevRev = previousRevisions.get(url); if (prevRev == null) { logger.println("no revision recorded for " + url + " in the previous build"); return false; } Long thisRev = thisRevisions.get(url); if (thisRev == null) { listener.error( "No revision found for URL: " + url + " in " + SubversionSCM.getRevisionFile(build) + ". Revision file contains: " + thisRevisions.keySet()); return false; } if (thisRev.equals(prevRev)) { logger.println("no change for " + url + " since the previous build"); return false; } try { if (debug) listener .getLogger() .printf( "Computing changelog of %1s from %2s to %3s\n", SVNURL.parseURIEncoded(url), prevRev + 1, thisRev); svnlc.doLog( SVNURL.parseURIEncoded(url), null, SVNRevision.UNDEFINED, SVNRevision.create(prevRev + 1), SVNRevision.create(thisRev), false, // Don't stop on copy. true, // Report paths. 0, // Retrieve log entries for unlimited number of revisions. debug ? new DebugSVNLogHandler(logHandler) : logHandler); if (debug) listener.getLogger().println("done"); } catch (SVNException e) { throw new IOException2("revision check failed on " + url, e); } return true; }
/** * Schedules the given configuration. * * <p>Copied from the {@link * DefaultMatrixExecutionStrategyImpl#scheduleConfigurationBuild(hudson.matrix.MatrixBuild.MatrixBuildExecution, * hudson.matrix.MatrixConfiguration)} * * @param execution Contains information about the general build, including the listener used to * log queue blockage. * @param configuration The configuration to schedule. * @param upstreamCause The cause of the build. Will either be an {@link * hudson.model.Cause.UpstreamCause} or {@link * com.attask.jenkins.healingmatrixproject.SelfHealingCause}. */ private void scheduleConfigurationBuild( MatrixBuild.MatrixBuildExecution execution, MatrixConfiguration configuration, Cause.UpstreamCause upstreamCause) throws InterruptedException { MatrixBuild build = (MatrixBuild) execution.getBuild(); execution .getListener() .getLogger() .println(Messages.MatrixBuild_Triggering(ModelHyperlinkNote.encodeTo(configuration))); // filter the parent actions for those that can be passed to the individual jobs. List<MatrixChildAction> childActions = Util.filter(build.getActions(), MatrixChildAction.class); BuildListener listener = execution.getListener(); while (!configuration.scheduleBuild(childActions, upstreamCause)) { String msg = "Unable to schedule build " + configuration.getFullDisplayName() + ". Retrying."; listener.error(msg); Thread.sleep(500); } }
public void warn(Throwable throwable, String message, Object... args) { listener.error(String.format(message, args) + ": " + throwable.getCause()); }
public void warn(String message, Object... args) { listener.error(String.format(message, args)); }
@Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, final BuildListener listener) throws InterruptedException { // during matrix build, the push back would happen at the very end only once for the whole // matrix, // not for individual configuration build. if (build instanceof MatrixRun) { return true; } SCM scm = build.getProject().getScm(); if (!(scm instanceof GitSCM)) { return false; } final GitSCM gitSCM = (GitSCM) scm; if (gitSCM.getUseShallowClone()) { listener.getLogger().println("GitPublisher disabled while using shallow clone."); return true; } final String projectName = build.getProject().getName(); final FilePath workspacePath = build.getWorkspace(); final int buildNumber = build.getNumber(); final Result buildResult = build.getResult(); // If pushOnlyIfSuccess is selected and the build is not a success, don't push. if (pushOnlyIfSuccess && buildResult.isWorseThan(Result.SUCCESS)) { listener .getLogger() .println( "Build did not succeed and the project is configured to only push after a successful build, so no pushing will occur."); return true; } else { final String gitExe = gitSCM.getGitExe(build.getBuiltOn(), listener); EnvVars tempEnvironment; try { tempEnvironment = build.getEnvironment(listener); } catch (IOException e) { e.printStackTrace(listener.error("Failed to build up environment")); tempEnvironment = new EnvVars(); } String confName = gitSCM.getGitConfigNameToUse(); if ((confName != null) && (!confName.equals(""))) { tempEnvironment.put("GIT_COMMITTER_NAME", confName); tempEnvironment.put("GIT_AUTHOR_NAME", confName); } String confEmail = gitSCM.getGitConfigEmailToUse(); if ((confEmail != null) && (!confEmail.equals(""))) { tempEnvironment.put("GIT_COMMITTER_EMAIL", confEmail); tempEnvironment.put("GIT_AUTHOR_EMAIL", confEmail); } final EnvVars environment = tempEnvironment; final FilePath workingDirectory = gitSCM.workingDirectory(workspacePath, environment); boolean pushResult = true; // If we're pushing the merge back... if (pushMerge) { boolean mergeResult; try { mergeResult = workingDirectory.act( new FileCallable<Boolean>() { private static final long serialVersionUID = 1L; public Boolean invoke(File workspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(workspace), listener, environment); // We delete the old tag generated by the SCM plugin String buildnumber = "jenkins-" + projectName + "-" + buildNumber; git.deleteTag(buildnumber); // And add the success / fail state into the tag. buildnumber += "-" + buildResult.toString(); git.tag(buildnumber, "Jenkins Build #" + buildNumber); PreBuildMergeOptions mergeOptions = gitSCM.getMergeOptions(); if (mergeOptions.doMerge() && buildResult.isBetterOrEqualTo(Result.SUCCESS)) { RemoteConfig remote = mergeOptions.getMergeRemote(); listener .getLogger() .println( "Pushing HEAD to branch " + mergeOptions.getMergeTarget() + " of " + remote.getName() + " repository"); git.push(remote, "HEAD:" + mergeOptions.getMergeTarget()); } else { // listener.getLogger().println("Pushing result " + buildnumber + " to // origin repository"); // git.push(null); } return true; } }); } catch (Throwable e) { e.printStackTrace(listener.error("Failed to push merge to origin repository")); build.setResult(Result.FAILURE); mergeResult = false; } if (!mergeResult) { pushResult = false; } } if (isPushTags()) { boolean allTagsResult = true; for (final TagToPush t : tagsToPush) { boolean tagResult = true; if (t.getTagName() == null) { listener.getLogger().println("No tag to push defined"); tagResult = false; } if (t.getTargetRepoName() == null) { listener.getLogger().println("No target repo to push to defined"); tagResult = false; } if (tagResult) { final String tagName = environment.expand(t.getTagName()); final String targetRepo = environment.expand(t.getTargetRepoName()); try { tagResult = workingDirectory.act( new FileCallable<Boolean>() { private static final long serialVersionUID = 1L; public Boolean invoke(File workspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(workspace), listener, environment); RemoteConfig remote = gitSCM.getRepositoryByName(targetRepo); if (remote == null) { listener .getLogger() .println("No repository found for target repo name " + targetRepo); return false; } if (t.isCreateTag()) { if (git.tagExists(tagName)) { listener .getLogger() .println( "Tag " + tagName + " already exists and Create Tag is specified, so failing."); return false; } git.tag(tagName, "Jenkins Git plugin tagging with " + tagName); } else if (!git.tagExists(tagName)) { listener .getLogger() .println( "Tag " + tagName + " does not exist and Create Tag is not specified, so failing."); return false; } listener .getLogger() .println("Pushing tag " + tagName + " to repo " + targetRepo); git.push(remote, tagName); return true; } }); } catch (Throwable e) { e.printStackTrace( listener.error("Failed to push tag " + tagName + " to " + targetRepo)); build.setResult(Result.FAILURE); tagResult = false; } } if (!tagResult) { allTagsResult = false; } } if (!allTagsResult) { pushResult = false; } } if (isPushBranches()) { boolean allBranchesResult = true; for (final BranchToPush b : branchesToPush) { boolean branchResult = true; if (b.getBranchName() == null) { listener.getLogger().println("No branch to push defined"); return false; } if (b.getTargetRepoName() == null) { listener.getLogger().println("No branch repo to push to defined"); return false; } final String branchName = environment.expand(b.getBranchName()); final String targetRepo = environment.expand(b.getTargetRepoName()); if (branchResult) { try { branchResult = workingDirectory.act( new FileCallable<Boolean>() { private static final long serialVersionUID = 1L; public Boolean invoke(File workspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(workspace), listener, environment); RemoteConfig remote = gitSCM.getRepositoryByName(targetRepo); if (remote == null) { listener .getLogger() .println("No repository found for target repo name " + targetRepo); return false; } listener .getLogger() .println( "Pushing HEAD to branch " + branchName + " at repo " + targetRepo); git.push(remote, "HEAD:" + branchName); return true; } }); } catch (Throwable e) { e.printStackTrace( listener.error("Failed to push branch " + branchName + " to " + targetRepo)); build.setResult(Result.FAILURE); branchResult = false; } } if (!branchResult) { allBranchesResult = false; } } if (!allBranchesResult) { pushResult = false; } } if (isPushNotes()) { boolean allNotesResult = true; for (final NoteToPush b : notesToPush) { boolean noteResult = true; if (b.getnoteMsg() == null) { listener.getLogger().println("No note to push defined"); return false; } b.setEmptyTargetRepoToOrigin(); final String noteMsg = environment.expand(b.getnoteMsg()); final String noteNamespace = environment.expand(b.getnoteNamespace()); final String targetRepo = environment.expand(b.getTargetRepoName()); final boolean noteReplace = b.getnoteReplace(); if (noteResult) { try { noteResult = workingDirectory.act( new FileCallable<Boolean>() { private static final long serialVersionUID = 1L; public Boolean invoke(File workspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(workspace), listener, environment); RemoteConfig remote = gitSCM.getRepositoryByName(targetRepo); if (remote == null) { listener .getLogger() .println("No repository found for target repo name " + targetRepo); return false; } listener .getLogger() .println( "Adding note \"" + noteMsg + "\" to namespace \"" + noteNamespace + "\""); if (noteReplace) git.addNote(noteMsg, noteNamespace); else git.appendNote(noteMsg, noteNamespace); git.push(remote, "refs/notes/*"); return true; } }); } catch (Throwable e) { e.printStackTrace( listener.error( "Failed to add note \"" + noteMsg + "\" to \"" + noteNamespace + "\"")); build.setResult(Result.FAILURE); noteResult = false; } } if (!noteResult) { allNotesResult = false; } } if (!allNotesResult) { pushResult = false; } } return pushResult; } }
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { if (build.getResult().isWorseThan(getTreshold())) return true; // build failed. Don't publish List<MavenAbstractArtifactRecord> mavenAbstractArtifactRecords = getActions(build, listener); if (mavenAbstractArtifactRecords == null || mavenAbstractArtifactRecords.isEmpty()) { listener.getLogger().println("[ERROR] No artifacts are recorded. Is this a Maven project?"); build.setResult(Result.FAILURE); return true; } if (build instanceof MavenModuleSetBuild && ((MavenModuleSetBuild) build).getParent().isArchivingDisabled()) { listener .getLogger() .println( "[ERROR] You cannot use the \"Deploy artifacts to Maven repository\" feature if you " + "disabled automatic artifact archiving"); build.setResult(Result.FAILURE); return true; } long startupTime = Calendar.getInstance().getTimeInMillis(); try { MavenEmbedder embedder = createEmbedder(listener, build); ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) embedder.lookup(ArtifactRepositoryLayout.ROLE, "default"); ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) embedder.lookup(ArtifactRepositoryFactory.ROLE); ArtifactRepository artifactRepository = null; if (url != null) { // By default we try to get the repository definition from the job configuration artifactRepository = getDeploymentRepository(factory, layout, id, url); } for (MavenAbstractArtifactRecord mavenAbstractArtifactRecord : mavenAbstractArtifactRecords) { if (artifactRepository == null && mavenAbstractArtifactRecord instanceof MavenArtifactRecord) { // If no repository definition is set on the job level we try to take it from the POM MavenArtifactRecord mavenArtifactRecord = (MavenArtifactRecord) mavenAbstractArtifactRecord; artifactRepository = getDeploymentRepository( factory, layout, mavenArtifactRecord.repositoryId, mavenArtifactRecord.repositoryUrl); } if (artifactRepository == null) { listener .getLogger() .println( "[ERROR] No Repository settings defined in the job configuration or distributionManagement of the module."); build.setResult(Result.FAILURE); return true; } mavenAbstractArtifactRecord.deploy(embedder, artifactRepository, listener); } listener .getLogger() .println( "[INFO] Deployment done in " + Util.getTimeSpanString(Calendar.getInstance().getTimeInMillis() - startupTime)); return true; } catch (MavenEmbedderException e) { e.printStackTrace(listener.error(e.getMessage())); } catch (ComponentLookupException e) { e.printStackTrace(listener.error(e.getMessage())); } catch (ArtifactDeploymentException e) { e.printStackTrace(listener.error(e.getMessage())); } // failed build.setResult(Result.FAILURE); listener .getLogger() .println( "[INFO] Deployment failed after " + Util.getTimeSpanString(Calendar.getInstance().getTimeInMillis() - startupTime)); return true; }
/** * Waits for the given configurations to finish, retrying any that qualify to be rerun. * * @param execution Provided by the plugin. * @param patterns List of regular expression patterns used to scan the log to determine if a * build should be rerun. * @param retries Mutable map that tracks the number of times a specific configuration has been * retried. * @param configurations The configurations that have already been scheduled to run that should be * waited for to finish. * @return The worst result of all the runs. If a build was rerun, only the result of the rerun is * considered. * @throws InterruptedException * @throws IOException */ private Result waitForMatrixRuns( MatrixBuild.MatrixBuildExecution execution, List<Pattern> patterns, Map<MatrixConfiguration, Integer> retries, LinkedList<MatrixConfiguration> configurations) throws InterruptedException, IOException { BuildListener listener = execution.getListener(); PrintStream logger = listener.getLogger(); Map<String, String> whyBlockedMap = new HashMap< String, String>(); // keep track of why builds are blocked so we can print unique messages when // they change. Result finalResult = Result.SUCCESS; int iteration = 0; boolean continueRetrying = true; while (!configurations.isEmpty()) { ++iteration; MatrixConfiguration configuration = configurations.removeFirst(); if (isBuilding(execution, configuration, whyBlockedMap)) { if (iteration >= configurations.size()) { // Every time we loop through all the configurations, sleep for a bit. // This is to prevent polling too often while everything is still building. iteration = 0; Thread.sleep(1000); } configurations.add(configuration); continue; } Run parentBuild = execution.getBuild(); MatrixRun matrixRun = configuration.getBuildByNumber(parentBuild.getNumber()); Result runResult = matrixRun.getResult(); if (continueRetrying && runResult.isWorseOrEqualTo(getWorseThanOrEqualTo()) && runResult.isBetterOrEqualTo(getBetterThanOrEqualTo())) { if (matchesPattern(matrixRun, patterns)) { int retriedCount = retries.get(configuration); if (retriedCount < getMaxRetries()) { ++retriedCount; retries.put(configuration, retriedCount); // rerun String logMessage = String.format( "%s was %s. Matched pattern to rerun. Rerunning (%d).", matrixRun, runResult, retriedCount); listener.error(logMessage); HealedAction action = parentBuild.getAction(HealedAction.class); if (action == null) { //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (parentBuild.getActions()) { action = parentBuild.getAction(HealedAction.class); if (action == null) { action = new HealedAction(matrixRun.getCharset()); parentBuild.addAction(action); } } } action.addAutoHealedJob(matrixRun); MatrixConfiguration parent = matrixRun.getParent(); if (parent != null) { // I'm paranoid about NPEs parent.removeRun(matrixRun); matrixRun.delete(); } else { LOGGER.severe( "couldn't remove old run, parent was null. This is a Jenkins core bug."); } scheduleConfigurationBuild( execution, configuration, new SelfHealingCause(parentBuild, retriedCount)); configurations.add(configuration); continue; } else { String logMessage = String.format( "%s was %s. Matched pattern to rerun, but the max number of retries (%d) has been met.", matrixRun, runResult, getMaxRetries()); listener.error(logMessage); if (getStopRetryingAfterOneFails()) { listener.error("Not retrying any more builds."); continueRetrying = false; } } } else { String logMessage = String.format( "%s was %s. It did not match the pattern to rerun. Accepting result.", matrixRun, runResult); logger.println(logMessage); } } notifyEndRun(matrixRun, execution.getAggregators(), execution.getListener()); finalResult = finalResult.combine(runResult); } return finalResult; }
@Override protected Result doRun(final BuildListener listener) throws Exception { PrintStream logger = listener.getLogger(); try { EnvVars envVars = getEnvironment(listener); Config config = IvyConfig.provider.getConfigById(project.getSettings()); if (config != null) { FilePath tmp = getWorkspace().createTextTempFile("ivy", "xml", config.content); settings = tmp.getRemote(); addAction(new CleanTempFilesAction(settings)); } else { String settingsFile = project.getIvySettingsFile(); if (settingsFile != null) { settings = getWorkspace().child(settingsFile).getRemote(); } } if (!project.isAggregatorStyleBuild()) { // start module builds parseIvyDescriptorFiles(listener, logger, envVars); Set<IvyModule> triggeredModules = new HashSet<IvyModule>(); if (!project.isIncrementalBuild() || IvyModuleSetBuild.this.getChangeSet().isEmptySet()) { for (IvyModule module : project.sortedActiveModules) { // Don't trigger builds if we've already triggered // one // of their dependencies. // It's safe to just get the direct dependencies // since // the modules are sorted in dependency order. List<AbstractProject> ups = module.getUpstreamProjects(); boolean triggerBuild = true; for (AbstractProject upstreamDep : ups) { if (triggeredModules.contains(upstreamDep)) { triggerBuild = false; break; } } if (triggerBuild) { logger.println("Triggering " + module.getModuleName()); module.scheduleBuild( new ParameterizedUpstreamCause( ((Run<?, ?>) IvyModuleSetBuild.this), IvyModuleSetBuild.this.getActions(ParametersAction.class))); } triggeredModules.add(module); } } else { for (IvyModule module : project.sortedActiveModules) { // If there are changes for this module, add it. // Also add it if we've never seen this module // before, // or if the previous build of this module // failed or was unstable. boolean triggerBuild = false; if ((module.getLastBuild() == null) || (!getChangeSetFor(module).isEmpty()) || (module.getLastBuild().getResult().isWorseThan(Result.SUCCESS))) { triggerBuild = true; List<AbstractProject> ups = module.getUpstreamProjects(); for (AbstractProject upstreamDep : ups) { if (triggeredModules.contains(upstreamDep)) { triggerBuild = false; triggeredModules.add(module); break; } } } if (triggerBuild) { logger.println("Triggering " + module.getModuleName()); module.scheduleBuild( new ParameterizedUpstreamCause( ((Run<?, ?>) IvyModuleSetBuild.this), IvyModuleSetBuild.this.getActions(ParametersAction.class))); triggeredModules.add(module); } } } } else { // do builds here try { List<BuildWrapper> wrappers = new ArrayList<BuildWrapper>(); for (BuildWrapper w : project.getBuildWrappersList()) wrappers.add(w); ParametersAction parameters = getAction(ParametersAction.class); if (parameters != null) parameters.createBuildWrappers(IvyModuleSetBuild.this, wrappers); for (BuildWrapper w : wrappers) { Environment e = w.setUp(IvyModuleSetBuild.this, launcher, listener); if (e == null) return Result.FAILURE; buildEnvironments.add(e); e.buildEnvVars(envVars); // #3502: too late for // getEnvironment to do // this } if (!preBuild(listener, project.getPublishers())) return Result.FAILURE; Properties additionalProperties = null; if (project.isIncrementalBuild()) { parseIvyDescriptorFiles(listener, logger, envVars); List<String> changedModules = new ArrayList<String>(); for (IvyModule m : project.sortedActiveModules) { // Check if incrementalBuild is selected and that // there are changes - // we act as if incrementalBuild is not set if there // are no changes. if (!IvyModuleSetBuild.this.getChangeSet().isEmptySet()) { // If there are changes for this module, add it. if (!getChangeSetFor(m).isEmpty()) { changedModules.add(m.getModuleName().name); } } } if (project.isAggregatorStyleBuild()) { additionalProperties = new Properties(); additionalProperties.put( project.getChangedModulesProperty() == null ? "hudson.ivy.changedModules" : project.getChangedModulesProperty(), StringUtils.join(changedModules, ',')); } } IvyBuilderType ivyBuilderType = project.getIvyBuilderType(); hudson.tasks.Builder builder = ivyBuilderType.getBuilder(additionalProperties, null, buildEnvironments); logger.println( "Building project with " + ivyBuilderType.getDescriptor().getDisplayName()); if (builder.perform(IvyModuleSetBuild.this, launcher, listener)) return Result.SUCCESS; return Result.FAILURE; } finally { // tear down in reverse order boolean failed = false; for (int i = buildEnvironments.size() - 1; i >= 0; i--) { if (!buildEnvironments.get(i).tearDown(IvyModuleSetBuild.this, listener)) { failed = true; } } buildEnvironments = null; // WARNING The return in the finally clause will trump // any return before if (failed) return Result.FAILURE; } } return null; } catch (AbortException e) { if (e.getMessage() != null) listener.error(e.getMessage()); return Result.FAILURE; } catch (InterruptedIOException e) { e.printStackTrace(listener.error("Aborted Ivy execution for InterruptedIOException")); return Result.ABORTED; } catch (InterruptedException e) { e.printStackTrace(listener.error("Aborted Ivy execution for InterruptedException")); return Result.ABORTED; } catch (IOException e) { e.printStackTrace(listener.error(Messages.IvyModuleSetBuild_FailedToParseIvyXml())); return Result.FAILURE; } catch (RunnerAbortedException e) { return Result.FAILURE; } catch (RuntimeException e) { // bug in the code. e.printStackTrace( listener.error( "Processing failed due to a bug in the code. Please report this to [email protected]")); logger.println("project=" + project); logger.println("project.getModules()=" + project.getModules()); throw e; } }
@Override public boolean perform( final AbstractBuild<?, ?> build, Launcher launcher, final BuildListener listener) throws InterruptedException { final SCM scm = build.getProject().getScm(); if (!(scm instanceof GitSCM)) { return false; } final String projectName = build.getProject().getName(); final int buildNumber = build.getNumber(); final Result buildResult = build.getResult(); boolean canPerform; try { canPerform = build .getWorkspace() .act( new FileCallable<Boolean>() { private static final long serialVersionUID = 1L; public Boolean invoke(File workspace, VirtualChannel channel) throws IOException { GitSCM gitSCM = (GitSCM) scm; EnvVars environment; try { environment = build.getEnvironment(listener); } catch (IOException e) { listener.error("IOException publishing in git plugin"); environment = new EnvVars(); } catch (InterruptedException e) { listener.error("IOException publishing in git plugin"); environment = new EnvVars(); } IGitAPI git = new GitAPI( gitSCM.getDescriptor().getGitExe(), build.getWorkspace(), listener, environment); // We delete the old tag generated by the SCM plugin String buildnumber = "hudson-" + projectName + "-" + buildNumber; git.deleteTag(buildnumber); // And add the success / fail state into the tag. buildnumber += "-" + buildResult.toString(); git.tag(buildnumber, "Hudson Build #" + buildNumber); if (gitSCM.getMergeOptions().doMerge() && buildResult.isBetterOrEqualTo(Result.SUCCESS)) { listener .getLogger() .println( "Pushing result " + buildnumber + " to " + gitSCM.getMergeOptions().getMergeTarget() + " branch of origin repository"); RemoteConfig remote = gitSCM.getRepositories().get(0); git.push(remote, "HEAD:" + gitSCM.getMergeOptions().getMergeTarget()); } else { // listener.getLogger().println("Pushing result " + buildnumber + " to // origin repository"); // git.push(null); } return true; } }); } catch (Throwable e) { listener.error("Failed to push tags to origin repository: " + e.getMessage()); build.setResult(Result.FAILURE); return false; } return canPerform; }
@Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) { try { PrintStream logger = listener.getLogger(); ValidationDescriptor validationDesc = (ValidationDescriptor) Jenkins.getInstance().getDescriptorOrDie(Validator.class); String serverURL = validationDesc.getServiceURL(); URI uri = URI.create(serverURL); logger.format( "Using parameters%n scheme=%s%n host=%s%n port=%s%n prefix=%s%n", uri.getScheme(), uri.getHost(), uri.getPort(), uri.getPath()); Injector injector = Guice.createInjector( new StackHammerModule( uri.getScheme(), uri.getHost(), uri.getPort(), uri.getPath(), getApiKey())); DeploymentResult data = new DeploymentResult(); build.addAction(data); StackHammerFactory factory = injector.getInstance(StackHammerFactory.class); RepositoryService repoService = factory.createRepositoryService(); String[] splitName = getStack().split("/"); String owner = splitName[0]; String name = splitName[1]; logger.format( "Verifying that a local clone of repository %s/%s[%s] exists at Stack Hammer Service%n", owner, name, branch); ResultWithDiagnostic<Repository> cloneResult = repoService.cloneRepository(Provider.GITHUB, owner, name, branch); data.setCloneDiagnostic(cloneResult); if (cloneResult.getSeverity() == Diagnostic.ERROR) { listener.error(cloneResult.toString()); return false; } cloneResult.log(logger); StackService stackService = factory.createStackService(); Repository repo = cloneResult.getResult(); long pollInterval = validationDesc.getPollInterval().longValue(); long startTime = System.currentTimeMillis(); long lastPollTime = startTime; long failTime = Long.MAX_VALUE; Integer maxTimeObj = validationDesc.getMaxTime(); if (maxTimeObj != null && maxTimeObj.longValue() > 0) failTime = startTime + maxTimeObj.longValue() * 1000; Boolean dryRunObj = getDryRun(); boolean dryRun = dryRunObj == null ? false : dryRunObj.booleanValue(); String jobIdentifier = stackService.deployStack(repo, repo.getOwner() + "/" + repo.getName(), dryRun); logger.format("Sending order to deploy %s/%s to Stack Hammer Service%n", owner, name); for (; ; ) { long now = System.currentTimeMillis(); if (now > failTime) { logger.format("Job didn't finish in time.%n"); return false; } long sleepTime = (lastPollTime + pollInterval * 1000) - now; if (sleepTime > 0) Thread.sleep(sleepTime); lastPollTime = System.currentTimeMillis(); PollResult pollResult = stackService.pollJob(jobIdentifier); switch (pollResult.getJobState()) { case SCHEDULED: case STARTING: continue; case SLEEPING: case RUNNING: emitLogEntries(pollResult.getLogEntries(), data, logger); continue; case CANCELLED: listener.error("Job was cancelled"); return false; default: emitLogEntries(pollResult.getLogEntries(), data, logger); break; } break; } ResultWithDiagnostic<List<CatalogGraph>> deploymentResult = stackService.getDeploymentResult(jobIdentifier); data.setResult(deploymentResult); if (deploymentResult.getSeverity() == Diagnostic.ERROR) { listener.error(deploymentResult.toString()); return false; } deploymentResult.log(logger); } catch (Exception e) { e.printStackTrace(listener.error("Exception during deployment of %s", getStack())); return false; } return true; }
@Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException { listener.getLogger().println("[htmlpublisher] Archiving HTML reports..."); // Grab the contents of the header and footer as arrays ArrayList<String> headerLines; ArrayList<String> footerLines; try { headerLines = this.readFile("/htmlpublisher/HtmlPublisher/header.html"); footerLines = this.readFile("/htmlpublisher/HtmlPublisher/footer.html"); } catch (FileNotFoundException e1) { e1.printStackTrace(); return false; } catch (IOException e1) { e1.printStackTrace(); return false; } for (int i = 0; i < this.reportTargets.size(); i++) { // Create an array of lines we will eventually write out, initially the header. ArrayList<String> reportLines = new ArrayList<String>(headerLines); HtmlPublisherTarget reportTarget = this.reportTargets.get(i); boolean keepAll = reportTarget.getKeepAll(); FilePath archiveDir = build .getWorkspace() .child(resolveParametersInString(build, listener, reportTarget.getReportDir())); FilePath targetDir = reportTarget.getArchiveTarget(build); String levelString = keepAll ? "BUILD" : "PROJECT"; listener .getLogger() .println( "[htmlpublisher] Archiving at " + levelString + " level " + archiveDir + " to " + targetDir); // The index name might be a comma separated list of names, so let's figure out all the pages // we should index. String[] csvReports = resolveParametersInString(build, listener, reportTarget.getReportFiles()).split(","); ArrayList<String> reports = new ArrayList<String>(); for (int j = 0; j < csvReports.length; j++) { String report = csvReports[j]; report = report.trim(); // Ignore blank report names caused by trailing or double commas. if (report.equals("")) { continue; } reports.add(report); String tabNo = "tab" + (j + 1); // Make the report name the filename without the extension. int end = report.lastIndexOf("."); String reportName; if (end > 0) { reportName = report.substring(0, end); } else { reportName = report; } String tabItem = "<li id=\"" + tabNo + "\" class=\"unselected\" onclick=\"updateBody('" + tabNo + "');\" value=\"" + report + "\">" + reportName + "</li>"; reportLines.add(tabItem); } // Add the JS to change the link as appropriate. String hudsonUrl = Hudson.getInstance().getRootUrl(); AbstractProject job = build.getProject(); reportLines.add( "<script type=\"text/javascript\">document.getElementById(\"hudson_link\").innerHTML=\"Back to " + job.getName() + "\";</script>"); // If the URL isn't configured in Hudson, the best we can do is attempt to go Back. if (hudsonUrl == null) { reportLines.add( "<script type=\"text/javascript\">document.getElementById(\"hudson_link\").onclick = function() { history.go(-1); return false; };</script>"); } else { String jobUrl = hudsonUrl + job.getUrl(); reportLines.add( "<script type=\"text/javascript\">document.getElementById(\"hudson_link\").href=\"" + jobUrl + "\";</script>"); } reportLines.add( "<script type=\"text/javascript\">document.getElementById(\"zip_link\").href=\"*zip*/" + reportTarget.getSanitizedName() + ".zip\";</script>"); try { if (!archiveDir.exists()) { listener.error("Specified HTML directory '" + archiveDir + "' does not exist."); build.setResult(Result.FAILURE); return true; } else if (!keepAll) { // We are only keeping one copy at the project level, so remove the old one. targetDir.deleteRecursive(); } if (archiveDir.copyRecursiveTo("**/*", targetDir) == 0) { listener.error( "Directory '" + archiveDir + "' exists but failed copying to '" + targetDir + "'."); if (build.getResult().isBetterOrEqualTo(Result.UNSTABLE)) { // If the build failed, don't complain that there was no coverage. // The build probably didn't even get to the point where it produces coverage. listener.error("This is especially strange since your build otherwise succeeded."); } build.setResult(Result.FAILURE); return true; } } catch (IOException e) { Util.displayIOException(e, listener); e.printStackTrace(listener.fatalError("HTML Publisher failure")); build.setResult(Result.FAILURE); return true; } reportTarget.handleAction(build); // Now add the footer. reportLines.addAll(footerLines); // And write this as the index try { writeFile(reportLines, new File(targetDir.getRemote(), reportTarget.getWrapperName())); } catch (IOException e) { e.printStackTrace(); } } return true; }
@Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException { Result criteriaResult = CloneWorkspaceUtil.getResultForCriteria(criteria); String realIncludeGlob; // Default to **/* if no glob is specified. if (workspaceGlob.length() == 0) { realIncludeGlob = "**/*"; } else { try { realIncludeGlob = build.getEnvironment(listener).expand(workspaceGlob); } catch (IOException e) { // We couldn't get an environment for some reason, so we'll just use the original. realIncludeGlob = workspaceGlob; } } String realExcludeGlob = null; // Default to empty if no glob is specified. if (Util.fixNull(workspaceExcludeGlob).length() != 0) { try { realExcludeGlob = build.getEnvironment(listener).expand(workspaceExcludeGlob); } catch (IOException e) { // We couldn't get an environment for some reason, so we'll just use the original. realExcludeGlob = workspaceExcludeGlob; } } if (build.getResult().isBetterOrEqualTo(criteriaResult)) { listener.getLogger().println(Messages.CloneWorkspacePublisher_ArchivingWorkspace()); FilePath ws = build.getWorkspace(); if (ws == null) { // #3330: slave down? return true; } try { String includeMsg = ws.validateAntFileMask(realIncludeGlob); String excludeMsg = null; if (realExcludeGlob != null) { ws.validateAntFileMask(realExcludeGlob); } // This means we found something. if ((includeMsg == null) && (excludeMsg == null)) { DirScanner globScanner = new DirScanner.Glob(realIncludeGlob, realExcludeGlob); build.addAction(snapshot(build, ws, globScanner, listener, archiveMethod)); // Find the next most recent build meeting this criteria with an archived snapshot. AbstractBuild<?, ?> previousArchivedBuild = CloneWorkspaceUtil.getMostRecentBuildForCriteriaWithSnapshot( build.getPreviousBuild(), criteria); if (previousArchivedBuild != null) { listener .getLogger() .println( Messages.CloneWorkspacePublisher_DeletingOld( previousArchivedBuild.getDisplayName())); try { File oldWss = new File( previousArchivedBuild.getRootDir(), CloneWorkspaceUtil.getFileNameForMethod(archiveMethod)); Util.deleteFile(oldWss); } catch (IOException e) { e.printStackTrace(listener.error(e.getMessage())); } } return true; } else { listener .getLogger() .println(Messages.CloneWorkspacePublisher_NoMatchFound(realIncludeGlob, includeMsg)); return true; } } catch (IOException e) { Util.displayIOException(e, listener); e.printStackTrace( listener.error(Messages.CloneWorkspacePublisher_FailedToArchive(realIncludeGlob))); return true; } catch (InterruptedException e) { e.printStackTrace( listener.error(Messages.CloneWorkspacePublisher_FailedToArchive(realIncludeGlob))); return true; } } else { listener.getLogger().println(Messages.CloneWorkspacePublisher_CriteriaNotMet(criteriaResult)); return true; } }
/** {@inheritDoc} */ @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { JIRAClient client = null; // Get the previous JIRA Build Action, if any. Run<?, ?> prevBuild = build.getPreviousBuild(); JIRABuildResultReportAction buildAction = null; if (prevBuild != null) { buildAction = prevBuild.getAction(JIRABuildResultReportAction.class); } try { client = getSite().createClient(); if (build.getResult().isWorseThan(Result.SUCCESS)) { String issueKey = null; if (buildAction != null && StringUtils.isNotBlank(buildAction.raisedIssueKey) && !buildAction.resolved) { // Oke the previous build also failed and has an Issue linked to it. // So relink that issue also to this build issueKey = client.updateIssue(build, buildAction.raisedIssueKey); } else { issueKey = client.createIssue(build, projectKey, issueType, issuePriority, assignToBuildBreaker); } build.addAction(new JIRABuildResultReportAction(build, issueKey, false)); listener.getLogger().println("JBRR: Raised build failure issue: " + issueKey); } else if (autoClose && build.getResult().isBetterOrEqualTo(Result.SUCCESS)) { // Auto close the previously raised issues, if any if (buildAction != null) { RemoteIssue issue = client.getIssue(buildAction); if (issue == null || issue.getKey() == null) { listener .getLogger() .println( "WARN: Failed to automatically close issue: Unable to locate issue " + buildAction.raisedIssueKey + " in JIRA site " + getSite().name); } else if (client.canCloseIssue(issue)) { if (client.closeIssue(issue, build)) { build.addAction(new JIRABuildResultReportAction(build, issue.getKey(), true)); listener .getLogger() .println( "INFO: Closed issue " + issue.getKey() + " using action: " + getSite().getCloseActionName()); } else { listener .getLogger() .println("WARN: Failed to automatically close issue: " + issue.getKey()); } } } } } catch (AxisFault e) { listener.error("JBRR: " + e.getFaultString()); } catch (ServiceException e) { listener.error("JBRR: " + e.getMessage()); } catch (MalformedURLException e) { listener.error("JBRR: Invalid JIRA URL configured"); } finally { if (client != null) { client.logout(); } } return true; }
private ArgumentListBuilder buildMavenCmdLine( AbstractBuild<?, ?> build, BuildListener listener, EnvVars env) throws IOException, InterruptedException { FilePath mavenHome = getMavenHomeDir(build, listener, env); if (!mavenHome.exists()) { listener.error("Couldn't find Maven home: " + mavenHome.getRemote()); throw new Run.RunnerAbortedException(); } ArgumentListBuilder args = new ArgumentListBuilder(); FilePath mavenBootDir = new FilePath(mavenHome, "boot"); FilePath[] classworldsCandidates = mavenBootDir.list("plexus-classworlds*.jar"); if (classworldsCandidates == null || classworldsCandidates.length == 0) { listener.error("Couldn't find classworlds jar under " + mavenBootDir.getRemote()); throw new Run.RunnerAbortedException(); } FilePath classWorldsJar = classworldsCandidates[0]; // classpath args.add("-classpath"); // String cpSeparator = launcher.isUnix() ? ":" : ";"; args.add(classWorldsJar.getRemote()); // maven home args.addKeyValuePair("-D", "maven.home", mavenHome.getRemote(), false); String buildInfoPropertiesFile = env.get(BuildInfoConfigProperties.PROP_PROPS_FILE); boolean artifactoryIntegration = StringUtils.isNotBlank(buildInfoPropertiesFile); listener .getLogger() .println("Artifactory integration is " + (artifactoryIntegration ? "enabled" : "disabled")); String classworldsConfPath; if (artifactoryIntegration) { args.addKeyValuePair( "-D", BuildInfoConfigProperties.PROP_PROPS_FILE, buildInfoPropertiesFile, false); // use the classworlds conf packaged with this plugin and resolve the extractor libs File maven3ExtractorJar = Which.jarFile(Maven3BuildInfoLogger.class); FilePath actualDependencyDirectory = PluginDependencyHelper.getActualDependencyDirectory(build, maven3ExtractorJar); if (getMavenOpts() == null || !getMavenOpts().contains("-Dm3plugin.lib")) { args.addKeyValuePair("-D", "m3plugin.lib", actualDependencyDirectory.getRemote(), false); } URL classworldsResource = getClass() .getClassLoader() .getResource("org/jfrog/hudson/maven3/classworlds-freestyle.conf"); File classworldsConfFile = new File(URLDecoder.decode(classworldsResource.getFile(), "utf-8")); if (!classworldsConfFile.exists()) { listener.error( "Unable to locate classworlds configuration file under " + classworldsConfFile.getAbsolutePath()); throw new Run.RunnerAbortedException(); } // If we are on a remote slave, make a temp copy of the customized classworlds conf if (Computer.currentComputer() instanceof SlaveComputer) { FilePath remoteClassworlds = build.getWorkspace().createTextTempFile("classworlds", "conf", "", false); remoteClassworlds.copyFrom(classworldsResource); classworldsConfPath = remoteClassworlds.getRemote(); } else { classworldsConfPath = classworldsConfFile.getCanonicalPath(); } } else { classworldsConfPath = new FilePath(mavenHome, "bin/m2.conf").getRemote(); } args.addKeyValuePair("-D", "classworlds.conf", classworldsConfPath, false); // maven opts if (StringUtils.isNotBlank(getMavenOpts())) { String mavenOpts = Util.replaceMacro(getMavenOpts(), build.getBuildVariableResolver()); args.add(mavenOpts); } // classworlds launcher main class args.add(CLASSWORLDS_LAUNCHER); // pom file to build String rootPom = getRootPom(); if (StringUtils.isNotBlank(rootPom)) { args.add("-f", rootPom); } // maven goals args.addTokenized(getGoals()); return args; }
private boolean recordException(Exception e) { listener.error(e.getMessage()); e.printStackTrace(listener.getLogger()); return false; }
@Override public boolean checkout( final AbstractBuild build, Launcher launcher, final FilePath workspace, final BuildListener listener, File changelogFile) throws IOException, InterruptedException { listener .getLogger() .println( "Checkout:" + workspace.getName() + " / " + workspace.getRemote() + " - " + workspace.getChannel()); final String projectName = build.getProject().getName(); final int buildNumber = build.getNumber(); final String gitExe = getDescriptor().getGitExe(); final String buildnumber = "hudson-" + projectName + "-" + buildNumber; final BuildData buildData = getBuildData(build.getPreviousBuild(), true); if (buildData != null && buildData.lastBuild != null) { listener.getLogger().println("Last Built Revision: " + buildData.lastBuild.revision); } EnvVars tmp = new EnvVars(); try { tmp = build.getEnvironment(listener); } catch (InterruptedException e) { listener.error("Interrupted exception getting environment .. using empty environment"); } final EnvVars environment = tmp; final String singleBranch = getSingleBranch(build); final Revision revToBuild = workspace.act( new FileCallable<Revision>() { private static final long serialVersionUID = 1L; public Revision invoke(File localWorkspace, VirtualChannel channel) throws IOException { FilePath ws = new FilePath(localWorkspace); listener .getLogger() .println( "Checkout:" + ws.getName() + " / " + ws.getRemote() + " - " + ws.getChannel()); IGitAPI git = new GitAPI(gitExe, ws, listener, environment); if (git.hasGitRepo()) { // It's an update listener.getLogger().println("Fetching changes from the remote Git repository"); for (RemoteConfig remoteRepository : getRepositories()) { fetchFrom(git, localWorkspace, listener, remoteRepository); } } else { listener.getLogger().println("Cloning the remote Git repository"); // Go through the repositories, trying to clone from one // boolean successfullyCloned = false; for (RemoteConfig rc : remoteRepositories) { try { git.clone(rc); successfullyCloned = true; break; } catch (GitException ex) { listener.error( "Error cloning remote repo '%s' : %s", rc.getName(), ex.getMessage()); if (ex.getCause() != null) { listener.error("Cause: %s", ex.getCause().getMessage()); } // Failed. Try the next one listener.getLogger().println("Trying next repository"); } } if (!successfullyCloned) { listener.error("Could not clone from a repository"); throw new GitException("Could not clone"); } // Also do a fetch for (RemoteConfig remoteRepository : getRepositories()) { fetchFrom(git, localWorkspace, listener, remoteRepository); } if (git.hasGitModules()) { git.submoduleInit(); git.submoduleUpdate(); } } IBuildChooser buildChooser = new BuildChooser(GitSCM.this, git, new GitUtils(listener, git), buildData); Collection<Revision> candidates = buildChooser.getCandidateRevisions(false, singleBranch); if (candidates.size() == 0) return null; return candidates.iterator().next(); } }); if (revToBuild == null) { // getBuildCandidates should make the last item the last build, so a re-build // will build the last built thing. listener.error("Nothing to do"); return false; } listener.getLogger().println("Commencing build of " + revToBuild); Object[] returnData; // Changelog, BuildData if (mergeOptions.doMerge()) { if (!revToBuild.containsBranchName(mergeOptions.getMergeTarget())) { returnData = workspace.act( new FileCallable<Object[]>() { private static final long serialVersionUID = 1L; public Object[] invoke(File localWorkspace, VirtualChannel channel) throws IOException { EnvVars environment; try { environment = build.getEnvironment(listener); } catch (Exception e) { listener.error("Exception reading environment - using empty environment"); environment = new EnvVars(); } IGitAPI git = new GitAPI(gitExe, new FilePath(localWorkspace), listener, environment); IBuildChooser buildChooser = new BuildChooser(GitSCM.this, git, new GitUtils(listener, git), buildData); // Do we need to merge this revision onto MergeTarget // Only merge if there's a branch to merge that isn't // us.. listener .getLogger() .println( "Merging " + revToBuild + " onto " + mergeOptions.getMergeTarget()); // checkout origin/blah ObjectId target = git.revParse(mergeOptions.getMergeTarget()); git.checkout(target.name()); try { git.merge(revToBuild.getSha1().name()); } catch (Exception ex) { listener .getLogger() .println( "Branch not suitable for integration as it does not merge cleanly"); // We still need to tag something to prevent // repetitive builds from happening - tag the // candidate // branch. git.checkout(revToBuild.getSha1().name()); git.tag(buildnumber, "Hudson Build #" + buildNumber); buildChooser.revisionBuilt(revToBuild, buildNumber, Result.FAILURE); return new Object[] {null, buildChooser.getData()}; } if (git.hasGitModules()) { git.submoduleUpdate(); } // Tag the successful merge git.tag(buildnumber, "Hudson Build #" + buildNumber); StringBuilder changeLog = new StringBuilder(); if (revToBuild.getBranches().size() > 0) listener .getLogger() .println("Warning : There are multiple branch changesets here"); try { for (Branch b : revToBuild.getBranches()) { Build lastRevWas = buildData == null ? null : buildData.getLastBuildOfBranch(b.getName()); if (lastRevWas != null) { changeLog.append( putChangelogDiffsIntoFile( git, b.name, lastRevWas.getSHA1().name(), revToBuild.getSha1().name())); } } } catch (GitException ge) { changeLog.append("Unable to retrieve changeset"); } Build buildData = buildChooser.revisionBuilt(revToBuild, buildNumber, null); GitUtils gu = new GitUtils(listener, git); buildData.mergeRevision = gu.getRevisionForSHA1(target); // Fetch the diffs into the changelog file return new Object[] {changeLog, buildChooser.getData()}; } }); BuildData returningBuildData = (BuildData) returnData[1]; build.addAction(returningBuildData); return changeLogResult((String) returnData[0], changelogFile); } } // No merge returnData = workspace.act( new FileCallable<Object[]>() { private static final long serialVersionUID = 1L; public Object[] invoke(File localWorkspace, VirtualChannel channel) throws IOException { IGitAPI git = new GitAPI(gitExe, new FilePath(localWorkspace), listener, environment); IBuildChooser buildChooser = new BuildChooser(GitSCM.this, git, new GitUtils(listener, git), buildData); // Straight compile-the-branch listener.getLogger().println("Checking out " + revToBuild); git.checkout(revToBuild.getSha1().name()); // if( compileSubmoduleCompares ) if (doGenerateSubmoduleConfigurations) { SubmoduleCombinator combinator = new SubmoduleCombinator(git, listener, localWorkspace, submoduleCfg); combinator.createSubmoduleCombinations(); } if (git.hasGitModules()) { git.submoduleInit(); // Git submodule update will only 'fetch' from where it // regards as 'origin'. However, // it is possible that we are building from a // RemoteRepository with changes // that are not in 'origin' AND it may be a new module that // we've only just discovered. // So - try updating from all RRs, then use the submodule // Update to do the checkout for (RemoteConfig remoteRepository : getRepositories()) { fetchFrom(git, localWorkspace, listener, remoteRepository); } // Update to the correct checkout git.submoduleUpdate(); } // Tag the successful merge git.tag(buildnumber, "Hudson Build #" + buildNumber); StringBuilder changeLog = new StringBuilder(); int histories = 0; try { for (Branch b : revToBuild.getBranches()) { Build lastRevWas = buildData == null ? null : buildData.getLastBuildOfBranch(b.getName()); if (lastRevWas != null) { listener.getLogger().println("Recording changes in branch " + b.getName()); changeLog.append( putChangelogDiffsIntoFile( git, b.name, lastRevWas.getSHA1().name(), revToBuild.getSha1().name())); histories++; } else { listener.getLogger().println("No change to record in branch " + b.getName()); } } } catch (GitException ge) { changeLog.append("Unable to retrieve changeset"); } if (histories > 1) listener .getLogger() .println("Warning : There are multiple branch changesets here"); buildChooser.revisionBuilt(revToBuild, buildNumber, null); if (getClean()) { listener.getLogger().println("Cleaning workspace"); git.clean(); } // Fetch the diffs into the changelog file return new Object[] {changeLog.toString(), buildChooser.getData()}; } }); build.addAction((Action) returnData[1]); return changeLogResult((String) returnData[0], changelogFile); }
private boolean sendMail( EmailType mailType, AbstractBuild<?, ?> build, BuildListener listener, EmailTrigger trigger, Map<String, EmailTrigger> triggered) { try { MimeMessage msg = createMail(mailType, build, listener, trigger); debug(listener.getLogger(), "Successfully created MimeMessage"); Address[] allRecipients = msg.getAllRecipients(); int retries = 0; if (allRecipients != null) { StringBuilder buf = new StringBuilder("Sending email to:"); for (Address a : allRecipients) { buf.append(' ').append(a); } listener.getLogger().println(buf); if (executePresendScript(build, listener, msg, trigger, triggered)) { while (true) { try { Transport.send(msg); break; } catch (SendFailedException e) { if (e.getNextException() != null && ((e.getNextException() instanceof SocketException) || (e.getNextException() instanceof ConnectException))) { listener .getLogger() .println("Socket error sending email, retrying once more in 10 seconds..."); Thread.sleep(10000); } else { Address[] addresses = e.getValidSentAddresses(); if (addresses != null && addresses.length > 0) { buf = new StringBuilder("Successfully sent to the following addresses:"); for (Address a : addresses) { buf.append(' ').append(a); } listener.getLogger().println(buf); } addresses = e.getValidUnsentAddresses(); if (addresses != null && addresses.length > 0) { buf = new StringBuilder("Error sending to the following VALID addresses:"); for (Address a : addresses) { buf.append(' ').append(a); } listener.getLogger().println(buf); } addresses = e.getInvalidAddresses(); if (addresses != null && addresses.length > 0) { buf = new StringBuilder("Error sending to the following INVALID addresses:"); for (Address a : addresses) { buf.append(' ').append(a); } listener.getLogger().println(buf); } debug(listener.getLogger(), "SendFailedException message: " + e.getMessage()); break; } } retries++; if (retries > 1) { listener.getLogger().println("Failed after second try sending email"); break; } } if (build.getAction(MailMessageIdAction.class) == null) { build.addAction(new MailMessageIdAction(msg.getMessageID())); } } else { listener.getLogger().println("Email sending was cancelled" + " by user script."); } return true; } else { listener .getLogger() .println("An attempt to send an e-mail" + " to empty list of recipients, ignored."); } } catch (Exception e) { LOGGER.log(Level.WARNING, "Could not send email.", e); e.printStackTrace( listener.error("Could not send email as a part of the post-build publishers.")); } debug( listener.getLogger(), "Some error occured trying to send the email...check the Jenkins log"); return false; }