public Object[] getChildren(Object parentElement) { if (parentElement instanceof MavenProject) { /* * Walk the hierarchy list until we find the parentElement and * return the previous element, which is the child. */ MavenProject parent = (MavenProject) parentElement; if (getProjectHierarchy().size() == 1) { // No parent exists, only one element in the tree return new Object[0]; } if (getProjectHierarchy().getFirst().equals(parent)) { // We are the final child return new Object[0]; } ListIterator<MavenProject> iter = getProjectHierarchy().listIterator(); while (iter.hasNext()) { MavenProject next = iter.next(); if (next.equals(parent)) { iter.previous(); MavenProject previous = iter.previous(); return new Object[] {previous}; } } } return new Object[0]; }
/* (non-Javadoc) * @see org.eclipse.ui.dialogs.SelectionStatusDialog#computeResult() */ protected void computeResult() { MavenProject targetPOM = getTargetPOM(); IMavenProjectFacade targetFacade = MavenPlugin.getMavenProjectRegistry() .getMavenProject( targetPOM.getGroupId(), targetPOM.getArtifactId(), targetPOM.getVersion()); MavenProject currentPOM = projectHierarchy.getFirst(); IMavenProjectFacade currentFacade = MavenPlugin.getMavenProjectRegistry() .getMavenProject( currentPOM.getGroupId(), currentPOM.getArtifactId(), currentPOM.getVersion()); if (targetFacade == null || currentFacade == null) { return; } final boolean same = targetPOM.equals(currentPOM); final LinkedList<Dependency> modelDeps = getDependenciesList(); /* * 1) Remove version values from the dependencies from the current POM * 2) Add dependencies to dependencyManagement of targetPOM */ // First we remove the version from the original dependency final IFile current = currentFacade.getPom(); final IFile target = targetFacade.getPom(); Job perform = new Job("Updating POM file(s)") { @Override protected IStatus run(IProgressMonitor monitor) { try { if (same) { performOnDOMDocument( new OperationTuple( current, new CompoundOperation( createManageOperation(modelDeps), createRemoveVersionOperation(modelDeps)))); } else { performOnDOMDocument( new OperationTuple(target, createManageOperation(modelDeps)), new OperationTuple(current, createRemoveVersionOperation(modelDeps))); } } catch (Exception e) { LOG.error("Error updating managed dependencies", e); return new Status( IStatus.ERROR, MavenEditorPlugin.PLUGIN_ID, "Error updating managed dependencies", e); } return Status.OK_STATUS; } }; perform.setUser(false); perform.setSystem(true); perform.schedule(); }
private int execute(CliRequest cliRequest) { MavenExecutionResult result = maven.execute(cliRequest.request); if (result.hasExceptions()) { ExceptionHandler handler = new DefaultExceptionHandler(); Map<String, String> references = new LinkedHashMap<String, String>(); MavenProject project = null; for (Throwable exception : result.getExceptions()) { ExceptionSummary summary = handler.handleException(exception); logSummary(summary, references, "", cliRequest.showErrors); if (project == null && exception instanceof LifecycleExecutionException) { project = ((LifecycleExecutionException) exception).getProject(); } } logger.error(""); if (!cliRequest.showErrors) { logger.error("To see the full stack trace of the errors, re-run Maven with the -e switch."); } if (!logger.isDebugEnabled()) { logger.error("Re-run Maven using the -X switch to enable full debug logging."); } if (!references.isEmpty()) { logger.error(""); logger.error( "For more information about the errors and possible solutions" + ", please read the following articles:"); for (Map.Entry<String, String> entry : references.entrySet()) { logger.error(entry.getValue() + " " + entry.getKey()); } } if (project != null && !project.equals(result.getTopologicallySortedProjects().get(0))) { logger.error(""); logger.error("After correcting the problems, you can resume the build with the command"); logger.error(" mvn <goals> -rf :" + project.getArtifactId()); } if (MavenExecutionRequest.REACTOR_FAIL_NEVER.equals( cliRequest.request.getReactorFailureBehavior())) { logger.info("Build failures were ignored."); return 0; } else { return 1; } } else { return 0; } }
protected void tryAggregateUpper(MavenProject prj) throws Exception { if (prj != null && prj.hasParent() && canAggregate()) { MavenProject parent = prj.getParent(); List<MavenProject> modules = parent.getCollectedProjects(); if ((modules.size() > 1) && prj.equals(modules.get(modules.size() - 1))) { aggregate(parent); } } }
private AgentProjectInfo processProject(MavenProject project) { long startTime = System.currentTimeMillis(); log.info(WssUtils.logMsg(LOG_COMPONENT, "processing Maven project " + project.getId())); AgentProjectInfo projectInfo = new AgentProjectInfo(); // project token if (project.equals(mavenParser.getMavenProject())) { projectInfo.setProjectToken(projectToken); } else { projectInfo.setProjectToken(moduleTokens.get(project.getArtifactId())); } // project coordinates projectInfo.setCoordinates(extractCoordinates(project)); Parent parent = project.getModel().getParent(); // parent coordinates if (parent != null) { projectInfo.setParentCoordinates(extractParentCoordinates(parent)); } // dependencies Map<Dependency, Artifact> lut = createLookupTable(project); for (Dependency dependency : mavenParser.getDependencies(project)) { DependencyInfo dependencyInfo = getDependencyInfo(dependency); Artifact artifact = lut.get(dependency); if (artifact != null) { File artifactFile = artifact.getFile(); if (artifactFile != null && artifactFile.exists()) { try { dependencyInfo.setSha1(ChecksumUtils.calculateSHA1(artifactFile)); } catch (IOException e) { log.warn(WssUtils.logMsg(LOG_COMPONENT, ERROR_SHA1 + " for " + artifact.getId())); } } } projectInfo.getDependencies().add(dependencyInfo); } log.info( WssUtils.logMsg( LOG_COMPONENT, "Total Maven project processing time is " + (System.currentTimeMillis() - startTime) + " [msec]")); return projectInfo; }
public void execute() throws MojoExecutionException, MojoFailureException { if (skip || skipExec) { getLog().info("Skipping tests"); return; } if (!"eclipse-test-plugin".equals(project.getPackaging())) { getLog().warn("Unsupported packaging type " + project.getPackaging()); return; } if (testSuite != null || testClass != null) { if (testSuite == null || testClass == null) { throw new MojoExecutionException( "Both testSuite and testClass must be provided or both should be null"); } MavenProject suite = getTestSuite(testSuite); if (suite == null) { throw new MojoExecutionException( "Cannot find test suite project with Bundle-SymbolicName " + testSuite); } if (!suite.equals(project)) { getLog() .info( "Not executing tests, testSuite=" + testSuite + " and project is not the testSuite"); return; } } EquinoxInstallation testRuntime = createEclipseInstallation(false, DefaultReactorProject.adapt(session)); String testBundle = null; boolean succeeded = runTest(testRuntime, testBundle); if (succeeded) { getLog().info("All tests passed!"); } else { throw new MojoFailureException( "There are test failures.\n\nPlease refer to " + reportsDirectory + " for the individual test results."); } }
private int doExecute(final MavenExecutionRequest request) throws Exception { assert request != null; assert config != null; if (config.isDebug() || config.isShowVersion()) { CLIReportingUtils.showVersion(config.getStreams().out); } // // TODO: i18n all of this // if (request.isShowErrors()) { logger.info("Error stack-traces are turned on."); } if (MavenExecutionRequest.CHECKSUM_POLICY_WARN.equals(request.getGlobalChecksumPolicy())) { logger.info("Disabling strict checksum verification on all artifact downloads."); } else if (MavenExecutionRequest.CHECKSUM_POLICY_FAIL.equals( request.getGlobalChecksumPolicy())) { logger.info("Enabling strict checksum verification on all artifact downloads."); } if (log.isDebugEnabled()) { log.debug("Executing request: {}", Yarn.render(request, Yarn.Style.MULTI)); } MavenExecutionResult result; Maven maven = container.lookup(Maven.class); try { result = maven.execute(request); } finally { container.release(maven); } if (!result.hasExceptions()) { return 0; } // else process exceptions ExceptionHandler handler = new DefaultExceptionHandler(); Map<String, String> references = new LinkedHashMap<String, String>(); MavenProject project = null; for (Throwable exception : result.getExceptions()) { ExceptionSummary summary = handler.handleException(exception); logSummary(summary, references, "", request.isShowErrors()); if (project == null && exception instanceof LifecycleExecutionException) { project = ((LifecycleExecutionException) exception).getProject(); } } logger.error(""); if (!request.isShowErrors()) { logger.error("To see the full stack-trace of the errors, re-run Maven with the -e switch."); } if (!logger.isDebugEnabled()) { logger.error("Re-run Maven using the -X switch to enable full debug logging."); } if (!references.isEmpty()) { logger.error(""); logger.error( "For more information about the errors and possible solutions, please read the following articles:"); for (Map.Entry<String, String> entry : references.entrySet()) { logger.error(entry.getValue() + " " + entry.getKey()); } } if (project != null && !project.equals(result.getTopologicallySortedProjects().get(0))) { logger.error(""); logger.error("After correcting the problems, you can resume the build with the command"); logger.error(" mvn <goals> -rf :" + project.getArtifactId()); } if (MavenExecutionRequest.REACTOR_FAIL_NEVER.equals(request.getReactorFailureBehavior())) { logger.info("Build failures were ignored."); return 0; } else { return 1; } }