@SuppressWarnings("unchecked") private void prepareArtifacts() { try { // TODO: do only if artifacts are not resolved Set<Artifact> artifacts = new LinkedHashSet<Artifact>(); Map<String, Artifact> managedVersions = createManagedVersionMap(); List<ResolutionListener> listeners = new ArrayList<ResolutionListener>(); ArtifactResolutionResult resolveResult = artifactCollector.collect( getAllArtifacts(), project.getArtifact(), managedVersions, session.getLocalRepository(), project.getRemoteArtifactRepositories(), artifactMetadataSource, null, listeners); for (ResolutionNode node : (Set<ResolutionNode>) resolveResult.getArtifactResolutionNodes()) { if (!isReactorProject(node.getArtifact())) { artifactResolver.resolve( node.getArtifact(), node.getRemoteRepositories(), session.getLocalRepository()); artifacts.add(node.getArtifact()); } else { addProjectReferenceArtifact(node.getArtifact()); } } this.artifacts = artifacts; } catch (Exception e) { getLog().debug("[WarSync] " + e.getMessage(), e); getLog().error("[WarSync] " + e.getMessage()); } }
/** * Get the set of artifacts that are provided by Synapse at runtime. * * @return * @throws MojoExecutionException */ private Set<Artifact> getSynapseRuntimeArtifacts() throws MojoExecutionException { Log log = getLog(); log.debug("Looking for synapse-core artifact in XAR project dependencies ..."); Artifact synapseCore = null; for (Iterator<?> it = project.getDependencyArtifacts().iterator(); it.hasNext(); ) { Artifact artifact = (Artifact) it.next(); if (artifact.getGroupId().equals("org.apache.synapse") && artifact.getArtifactId().equals("synapse-core")) { synapseCore = artifact; break; } } if (synapseCore == null) { throw new MojoExecutionException("Could not locate dependency on synapse-core"); } log.debug("Loading project data for " + synapseCore + " ..."); MavenProject synapseCoreProject; try { synapseCoreProject = projectBuilder.buildFromRepository( synapseCore, remoteArtifactRepositories, localRepository); } catch (ProjectBuildingException e) { throw new MojoExecutionException( "Unable to retrieve project information for " + synapseCore, e); } Set<Artifact> synapseRuntimeDeps; try { synapseRuntimeDeps = synapseCoreProject.createArtifacts( artifactFactory, Artifact.SCOPE_RUNTIME, new TypeArtifactFilter("jar")); } catch (InvalidDependencyVersionException e) { throw new MojoExecutionException("Unable to get project dependencies for " + synapseCore, e); } log.debug("Direct runtime dependencies for " + synapseCore + " :"); logArtifacts(synapseRuntimeDeps); log.debug("Resolving transitive dependencies for " + synapseCore + " ..."); try { synapseRuntimeDeps = artifactCollector .collect( synapseRuntimeDeps, synapseCoreProject.getArtifact(), synapseCoreProject.getManagedVersionMap(), localRepository, remoteArtifactRepositories, artifactMetadataSource, null, Collections.singletonList(new DebugResolutionListener(logger))) .getArtifacts(); } catch (ArtifactResolutionException e) { throw new MojoExecutionException( "Unable to resolve transitive dependencies for " + synapseCore); } log.debug("All runtime dependencies for " + synapseCore + " :"); logArtifacts(synapseRuntimeDeps); return synapseRuntimeDeps; }
/** * Resolve project dependencies. Manual resolution is needed in order to avoid resoltion of * multiproject artifacts (if projects will be linked each other an installed jar is not needed) * and to avoid a failure when a jar is missing. * * @throws MojoExecutionException if dependencies can't be resolved * @return resoved IDE dependencies, with attached jars for non-reactor dependencies */ protected IdeDependency[] doDependencyResolution() throws MojoExecutionException { MavenProject project = getProject(); ArtifactRepository localRepo = getLocalRepository(); List dependencies = getProject().getDependencies(); // Collect the list of resolved IdeDependencies. List dependencyList = new ArrayList(); if (dependencies != null) { Map managedVersions = createManagedVersionMap( getArtifactFactory(), project.getId(), project.getDependencyManagement()); ArtifactResolutionResult artifactResolutionResult = null; try { List listeners = new ArrayList(); if (logger.isDebugEnabled()) { listeners.add(new DebugResolutionListener(logger)); } listeners.add(new WarningResolutionListener(logger)); artifactResolutionResult = artifactCollector.collect( getProjectArtifacts(), project.getArtifact(), managedVersions, localRepo, project.getRemoteArtifactRepositories(), getArtifactMetadataSource(), null, listeners); } catch (ArtifactResolutionException e) { getLog().debug(e.getMessage(), e); getLog() .error( Messages.getString( "artifactresolution", new Object[] { // $NON-NLS-1$ e.getGroupId(), e.getArtifactId(), e.getVersion(), e.getMessage() })); // if we are here artifactResolutionResult is null, create a project without dependencies // but don't fail // (this could be a reactor projects, we don't want to fail everything) return new IdeDependency[0]; } // keep track of added reactor projects in order to avoid duplicates Set emittedReactorProjectId = new HashSet(); for (Iterator i = artifactResolutionResult.getArtifactResolutionNodes().iterator(); i.hasNext(); ) { ResolutionNode node = (ResolutionNode) i.next(); Artifact art = node.getArtifact(); boolean isReactorProject = getUseProjectReferences() && isAvailableAsAReactorProject(art); // don't resolve jars for reactor projects if (!isReactorProject) { try { artifactResolver.resolve(art, node.getRemoteRepositories(), localRepository); } catch (ArtifactNotFoundException e) { getLog().debug(e.getMessage(), e); getLog() .warn( Messages.getString( "artifactdownload", new Object[] { // $NON-NLS-1$ e.getGroupId(), e.getArtifactId(), e.getVersion(), e.getMessage() })); } catch (ArtifactResolutionException e) { getLog().debug(e.getMessage(), e); getLog() .warn( Messages.getString( "artifactresolution", new Object[] { // $NON-NLS-1$ e.getGroupId(), e.getArtifactId(), e.getVersion(), e.getMessage() })); } } if (!isReactorProject || emittedReactorProjectId.add(art.getGroupId() + '-' + art.getArtifactId())) { IdeDependency dep = new IdeDependency( art.getGroupId(), art.getArtifactId(), art.getVersion(), isReactorProject, Artifact.SCOPE_TEST.equals(art.getScope()), Artifact.SCOPE_SYSTEM.equals(art.getScope()), Artifact.SCOPE_PROVIDED.equals(art.getScope()), art.getArtifactHandler().isAddedToClasspath(), art.getFile(), art.getType()); dependencyList.add(dep); } } // @todo a final report with the list of missingArtifacts? } IdeDependency[] deps = (IdeDependency[]) dependencyList.toArray(new IdeDependency[dependencyList.size()]); return deps; }
public ArtifactResolutionResult resolve(ArtifactResolutionRequest request) { Artifact rootArtifact = request.getArtifact(); Set<Artifact> artifacts = request.getArtifactDependencies(); Map<String, Artifact> managedVersions = request.getManagedVersionMap(); List<ResolutionListener> listeners = request.getListeners(); ArtifactFilter collectionFilter = request.getCollectionFilter(); ArtifactFilter resolutionFilter = request.getResolutionFilter(); RepositorySystemSession session = getSession(request.getLocalRepository()); // TODO: hack because metadata isn't generated in m2e correctly and i want to run the maven i // have in the workspace if (source == null) { try { source = container.lookup(ArtifactMetadataSource.class); } catch (ComponentLookupException e) { // won't happen } } if (listeners == null) { listeners = new ArrayList<ResolutionListener>(); if (logger.isDebugEnabled()) { listeners.add(new DebugResolutionListener(logger)); } listeners.add(new WarningResolutionListener(logger)); } ArtifactResolutionResult result = new ArtifactResolutionResult(); // The root artifact may, or may not be resolved so we need to check before we attempt to // resolve. // This is often an artifact like a POM that is taken from disk and we already have hold of the // file reference. But this may be a Maven Plugin that we need to resolve from a remote // repository // as well as its dependencies. if (request.isResolveRoot() /* && rootArtifact.getFile() == null */) { try { resolve(rootArtifact, request.getRemoteRepositories(), session); } catch (ArtifactResolutionException e) { result.addErrorArtifactException(e); return result; } catch (ArtifactNotFoundException e) { result.addMissingArtifact(request.getArtifact()); return result; } } ArtifactResolutionRequest collectionRequest = request; if (request.isResolveTransitively()) { MetadataResolutionRequest metadataRequest = new DefaultMetadataResolutionRequest(request); metadataRequest.setArtifact(rootArtifact); metadataRequest.setResolveManagedVersions(managedVersions == null); try { ResolutionGroup resolutionGroup = source.retrieve(metadataRequest); if (managedVersions == null) { managedVersions = resolutionGroup.getManagedVersions(); } Set<Artifact> directArtifacts = resolutionGroup.getArtifacts(); if (artifacts == null || artifacts.isEmpty()) { artifacts = directArtifacts; } else { List<Artifact> allArtifacts = new ArrayList<Artifact>(); allArtifacts.addAll(artifacts); allArtifacts.addAll(directArtifacts); Map<String, Artifact> mergedArtifacts = new LinkedHashMap<String, Artifact>(); for (Artifact artifact : allArtifacts) { String conflictId = artifact.getDependencyConflictId(); if (!mergedArtifacts.containsKey(conflictId)) { mergedArtifacts.put(conflictId, artifact); } } artifacts = new LinkedHashSet<Artifact>(mergedArtifacts.values()); } collectionRequest = new ArtifactResolutionRequest(request); collectionRequest.setServers(request.getServers()); collectionRequest.setMirrors(request.getMirrors()); collectionRequest.setProxies(request.getProxies()); collectionRequest.setRemoteRepositories(resolutionGroup.getResolutionRepositories()); } catch (ArtifactMetadataRetrievalException e) { ArtifactResolutionException are = new ArtifactResolutionException( "Unable to get dependency information for " + rootArtifact.getId() + ": " + e.getMessage(), rootArtifact, metadataRequest.getRemoteRepositories(), e); result.addMetadataResolutionException(are); return result; } } if (artifacts == null || artifacts.isEmpty()) { if (request.isResolveRoot()) { result.addArtifact(rootArtifact); } return result; } // After the collection we will have the artifact object in the result but they will not be // resolved yet. result = artifactCollector.collect( artifacts, rootArtifact, managedVersions, collectionRequest, source, collectionFilter, listeners, null); // We have metadata retrieval problems, or there are cycles that have been detected // so we give this back to the calling code and let them deal with this information // appropriately. if (result.hasMetadataResolutionExceptions() || result.hasVersionRangeViolations() || result.hasCircularDependencyExceptions()) { return result; } if (result.getArtifactResolutionNodes() != null) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); CountDownLatch latch = new CountDownLatch(result.getArtifactResolutionNodes().size()); for (ResolutionNode node : result.getArtifactResolutionNodes()) { Artifact artifact = node.getArtifact(); if (resolutionFilter == null || resolutionFilter.include(artifact)) { executor.execute( new ResolveTask( classLoader, latch, artifact, session, node.getRemoteRepositories(), result)); } else { latch.countDown(); } } try { latch.await(); } catch (InterruptedException e) { result.addErrorArtifactException( new ArtifactResolutionException("Resolution interrupted", rootArtifact, e)); } } // We want to send the root artifact back in the result but we need to do this after the other // dependencies // have been resolved. if (request.isResolveRoot()) { // Add the root artifact (as the first artifact to retain logical order of class path!) Set<Artifact> allArtifacts = new LinkedHashSet<Artifact>(); allArtifacts.add(rootArtifact); allArtifacts.addAll(result.getArtifacts()); result.setArtifacts(allArtifacts); } return result; }