private List<URL> generateExecutionClasspath( Set<Artifact> resolvedArtifacts, String... excludeGroups) throws MojoExecutionException { /* * Convert each resolved artifact into a URL/classpath element. */ final ArrayList<URL> classpath = new ArrayList<URL>(); final List<String> excludes = Arrays.asList(excludeGroups); try { for (Artifact resolvedArtifact : resolvedArtifacts) { if (excludes.contains(resolvedArtifact.getGroupId())) continue; final File file = resolvedArtifact.getFile(); // System.out.println("artifact " + resolvedArtifact.toString()); if (file != null) { if (artifactIdsToInsertAtStartOfClasspath.contains(resolvedArtifact.getArtifactId())) { getLog().info("adding at the start" + file.getAbsolutePath()); // a patch? grails is full of them, insert it at the start classpath.add(0, file.toURI().toURL()); } else { // insert it at the end classpath.add(file.toURI().toURL()); } } } } catch (MalformedURLException murle) { throw new MojoExecutionException("Unable to find files", murle); } return classpath; }
public List<Artifact> getIncludedArtifacts() throws MojoExecutionException { if (inlcudedArtifacts == null) { inlcudedArtifacts = new ArrayList<Artifact>(); Set<Artifact> artifacts = project.getArtifacts(); if (includes != null) { if (exclusions != null) { throw new MojoExecutionException("Both inlcudes and exclusions are specified"); } Set<String> inclusionKeys = getArtifactKeys(includes); for (Artifact a : artifacts) { if (inclusionKeys.contains( getArtifactKey(a.getGroupId(), a.getArtifactId(), a.getClassifier()))) { inlcudedArtifacts.add(a); } } } else { Set<String> exclusionKeys = new HashSet<String>(getArtifactKeys(exclusions)); exclusionKeys.addAll(getImportedArtifactKeys()); for (Artifact a : artifacts) { if (!exclusionKeys.contains( getArtifactKey(a.getGroupId(), a.getArtifactId(), a.getClassifier()))) { inlcudedArtifacts.add(a); } } } } return inlcudedArtifacts; }
private boolean isKnownJsf2BasedArtifact(Artifact artifact) { return (artifact.getGroupId().startsWith("org.jboss.seam.") // $NON-NLS-1$ && artifact.getArtifactId().startsWith("seam-faces") // $NON-NLS-1$ && artifact.getVersion().startsWith("3.")) || ("org.apache.deltaspike.modules".equals(artifact.getGroupId()) // $NON-NLS-1$ && artifact.getArtifactId().startsWith("deltaspike-jsf-module")); // $NON-NLS-1$ }
private void addPomDependenciesToTargetPlatform( MavenProject project, TargetPlatformBuilder resolutionContext, List<ReactorProject> reactorProjects, MavenSession session) { Set<String> projectIds = new HashSet<String>(); for (ReactorProject p : reactorProjects) { String key = ArtifactUtils.key(p.getGroupId(), p.getArtifactId(), p.getVersion()); projectIds.add(key); } ArrayList<String> scopes = new ArrayList<String>(); scopes.add(Artifact.SCOPE_COMPILE); Collection<Artifact> artifacts; try { artifacts = projectDependenciesResolver.resolve(project, scopes, session); } catch (MultipleArtifactsNotFoundException e) { Collection<Artifact> missing = new HashSet<Artifact>(e.getMissingArtifacts()); for (Iterator<Artifact> it = missing.iterator(); it.hasNext(); ) { Artifact a = it.next(); String key = ArtifactUtils.key(a.getGroupId(), a.getArtifactId(), a.getBaseVersion()); if (projectIds.contains(key)) { it.remove(); } } if (!missing.isEmpty()) { throw new RuntimeException("Could not resolve project dependencies", e); } artifacts = e.getResolvedArtifacts(); artifacts.removeAll(e.getMissingArtifacts()); } catch (AbstractArtifactResolutionException e) { throw new RuntimeException("Could not resolve project dependencies", e); } List<Artifact> externalArtifacts = new ArrayList<Artifact>(artifacts.size()); for (Artifact artifact : artifacts) { String key = ArtifactUtils.key( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion()); if (projectIds.contains(key)) { // resolved to an older snapshot from the repo, we only want the current project in the // reactor continue; } externalArtifacts.add(artifact); } List<Artifact> explicitArtifacts = MavenDependencyInjector.filterInjectedDependencies( externalArtifacts); // needed when the resolution is done again for the test runtime PomDependencyProcessor pomDependencyProcessor = new PomDependencyProcessor( session, repositorySystem, equinox.getService(LocalRepositoryP2Indices.class), getLogger()); pomDependencyProcessor.addPomDependenciesToResolutionContext( project, explicitArtifacts, resolutionContext); }
/** * @param o * @param a * @return */ protected boolean overlayMatchesArtifact(OverlayConfig o, Artifact a) { if (((o.getGroupId() == null && a.getGroupId() == null) || (o.getGroupId() != null && o.getGroupId().equals(a.getGroupId()))) && ((o.getArtifactId() == null && a.getArtifactId() == null) || (o.getArtifactId() != null && o.getArtifactId().equals(a.getArtifactId()))) && ((o.getClassifier() == null) || (o.getClassifier().equals(a.getClassifier())))) return true; return false; }
private void pack(Artifact artifact, String normalizedPackageJson) throws MojoExecutionException, IOException, CommandLineException, InterruptedException { node_modules.mkdirs(); final File normalizedPackageJsonFile = new File(node_modules, packageJson.getName()); getLog() .info( String.format("Saving normalized package.json file to %s", normalizedPackageJsonFile)); FileUtils.fileWrite(normalizedPackageJsonFile, normalizedPackageJson); npm("npm_install", "install"); final File archiveFile = new File(localRepository.getBasedir(), localRepository.getLayout().pathOf(artifact)); final File archiveFileTmp = new File(workdir, archiveFile.getName()); final Archiver archiver = createArchiver(); archiver.setDestFile(archiveFileTmp); // NOTE: .bin dirs will be recreated by npm rebuild; that makes the archive platform independent archiver.addDirectory(node_modules, null, new String[] {"**/.bin/**"}); final long startTime = System.currentTimeMillis(); archiver.createArchive(); getLog().info(String.format("Packing took %d millis", System.currentTimeMillis() - startTime)); final Artifact pomArtifact = factory.createBuildArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), "pom"); final File pomFile = new File(localRepository.getBasedir(), localRepository.getLayout().pathOf(pomArtifact)); pomFile.getParentFile().mkdirs(); getLog().info(String.format("Generating pom in %s", pomFile)); FileUtils.fileWrite( pomFile, String.format( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>%n" + "<project xmlns=\"http://maven.apache.org/POM/4.0.0\">%n" + " <modelVersion>4.0.0</modelVersion>%n" + " <groupId>%s</groupId>%n" + " <artifactId>%s</artifactId>%n" + " <version>%s</version>%n" + " <packaging>pom</packaging>%n" + " <description>generated by npmpack-maven-plugin</description>%n" + "</project>%n", artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion())); getLog() .info( String.format( "Moving artifact to local repository: %s (%d bytes)", archiveFile, archiveFileTmp.length())); FileUtils.rename(archiveFileTmp, archiveFile); // TODO: publish into nexus if desired }
/** * Compares artifacts lexicographically, using pattern [group_id][artifact_id][version]. * * @param art1 first object * @param art2 second object * @return the value <code>0</code> if the argument string is equal to this string; a value less * than <code>0</code> if this string is lexicographically less than the string argument; and * a value greater than <code>0</code> if this string is lexicographically greater than the * string argument. */ public int compare(Artifact art1, Artifact art2) { if (art1 == art2) { return 0; } else if (art1 == null) { return -1; } else if (art2 == null) { return +1; } String s1 = art1.getGroupId() + art1.getArtifactId() + art1.getVersion(); String s2 = art2.getGroupId() + art2.getArtifactId() + art2.getVersion(); return s1.compareTo(s2); }
/** Check gwt-user dependency matches plugin version */ private void checkGwtUserVersion() throws MojoExecutionException { InputStream inputStream = Thread.currentThread() .getContextClassLoader() .getResourceAsStream("org/codehaus/mojo/gwt/mojoGwtVersion.properties"); Properties properties = new Properties(); try { properties.load(inputStream); } catch (IOException e) { throw new MojoExecutionException("Failed to load plugin properties", e); } finally { IOUtils.closeQuietly(inputStream); } for (Iterator iterator = getProject().getCompileArtifacts().iterator(); iterator.hasNext(); ) { Artifact artifact = (Artifact) iterator.next(); if (GWT_GROUP_ID.equals(artifact.getGroupId()) && "gwt-user".equals(artifact.getArtifactId())) { String mojoGwtVersion = properties.getProperty("gwt.version"); // ComparableVersion with an up2date maven version ArtifactVersion mojoGwtArtifactVersion = new DefaultArtifactVersion(mojoGwtVersion); ArtifactVersion userGwtArtifactVersion = new DefaultArtifactVersion(artifact.getVersion()); if (userGwtArtifactVersion.compareTo(mojoGwtArtifactVersion) < 0) { getLog() .warn( "You're project declares dependency on gwt-user " + artifact.getVersion() + ". This plugin is designed for at least gwt version " + mojoGwtVersion); } break; } } }
private boolean match(Dependency dependency, Artifact artifact) { boolean match = dependency.getGroupId().equals(artifact.getGroupId()) && dependency.getArtifactId().equals(artifact.getArtifactId()) && dependency.getVersion().equals(artifact.getVersion()); if (match) { if (dependency.getClassifier() == null) { match = artifact.getClassifier() == null; } else { match = dependency.getClassifier().equals(artifact.getClassifier()); } } if (match) { String type = artifact.getType(); if (dependency.getType() == null) { match = type == null || type.equals("jar"); } else { match = dependency.getType().equals(type); } } return match; }
public void execute() throws MojoExecutionException, MojoFailureException { List<Artifact> dependencies = (List<Artifact>) project.getRuntimeArtifacts(); List<Artifact> excludes = new ArrayList<Artifact>(dependencies.size()); for (Artifact artifact : dependencies) { // Rather than simply outputting all dependencies as excludes, we only want to output // an exclude for the direct children of the allowed artifact, because the other ones // will be disabled recursively by Maven. int allowedParentPos = getAllowedParentPostion(artifact.getDependencyTrail()); if (allowedParentPos != -1 && allowedParentPos == artifact.getDependencyTrail().size() - 2) { excludes.add(artifact); } } if (excludes.size() > 0) { System.out.println(); System.out.println(); System.out.println(); System.out.println("Please add these excludes to the lily-hbase-client pom:"); System.out.println(); for (Artifact artifact : excludes) { System.out.println("<exclusion>"); System.out.println(" <groupId>" + artifact.getGroupId() + "</groupId>"); System.out.println(" <artifactId>" + artifact.getArtifactId() + "</artifactId>"); System.out.println("</exclusion>"); } System.out.println(); System.out.println(); System.out.println(); throw new MojoExecutionException("lily-hbase-client is missing some excludes, please adjust"); } }
private boolean isWorkspaceProject(Artifact artifact) { IMavenProjectFacade facade = projectManager.getMavenProject( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion()); return facade != null && facade.getFullPath(artifact.getFile()) != null; }
/** * Utility method that locates a project producing the given artifact. * * @param artifact the artifact a project should produce. * @return <code>true</code> if the artifact is produced by a reactor projectart. */ protected boolean isAvailableAsAReactorProject(Artifact artifact) { if (this.reactorProjects != null && (Constants.PROJECT_PACKAGING_JAR.equals(artifact.getType()) || Constants.PROJECT_PACKAGING_EJB.equals(artifact.getType()) || Constants.PROJECT_PACKAGING_WAR.equals(artifact.getType()))) { for (Object reactorProject1 : this.reactorProjects) { MavenProject reactorProject = (MavenProject) reactorProject1; if (reactorProject.getGroupId().equals(artifact.getGroupId()) && reactorProject.getArtifactId().equals(artifact.getArtifactId())) { if (reactorProject.getVersion().equals(artifact.getVersion())) { return true; } else { getLog() .info( "Artifact " + artifact.getId() + " already available as a reactor project, but with different version. " + "Expected: " + artifact.getVersion() + ", found: " + reactorProject.getVersion()); } } } } return false; }
/** * Get the set of artifacts that are provided by Synapse at runtime. * * @return * @throws MojoExecutionException */ private Set<Artifact> getSynapseRuntimeArtifacts() throws MojoExecutionException { Log log = getLog(); log.debug("Looking for synapse-core artifact in XAR project dependencies ..."); Artifact synapseCore = null; for (Iterator<?> it = project.getDependencyArtifacts().iterator(); it.hasNext(); ) { Artifact artifact = (Artifact) it.next(); if (artifact.getGroupId().equals("org.apache.synapse") && artifact.getArtifactId().equals("synapse-core")) { synapseCore = artifact; break; } } if (synapseCore == null) { throw new MojoExecutionException("Could not locate dependency on synapse-core"); } log.debug("Loading project data for " + synapseCore + " ..."); MavenProject synapseCoreProject; try { synapseCoreProject = projectBuilder.buildFromRepository( synapseCore, remoteArtifactRepositories, localRepository); } catch (ProjectBuildingException e) { throw new MojoExecutionException( "Unable to retrieve project information for " + synapseCore, e); } Set<Artifact> synapseRuntimeDeps; try { synapseRuntimeDeps = synapseCoreProject.createArtifacts( artifactFactory, Artifact.SCOPE_RUNTIME, new TypeArtifactFilter("jar")); } catch (InvalidDependencyVersionException e) { throw new MojoExecutionException("Unable to get project dependencies for " + synapseCore, e); } log.debug("Direct runtime dependencies for " + synapseCore + " :"); logArtifacts(synapseRuntimeDeps); log.debug("Resolving transitive dependencies for " + synapseCore + " ..."); try { synapseRuntimeDeps = artifactCollector .collect( synapseRuntimeDeps, synapseCoreProject.getArtifact(), synapseCoreProject.getManagedVersionMap(), localRepository, remoteArtifactRepositories, artifactMetadataSource, null, Collections.singletonList(new DebugResolutionListener(logger))) .getArtifacts(); } catch (ArtifactResolutionException e) { throw new MojoExecutionException( "Unable to resolve transitive dependencies for " + synapseCore); } log.debug("All runtime dependencies for " + synapseCore + " :"); logArtifacts(synapseRuntimeDeps); return synapseRuntimeDeps; }
/** * Log Projects and their resolved dependencies via MavenProject.getArtifacts(). * * @param reactorProjects MavenProjects in the current reactor */ private void checkReactor(final List<MavenProject> reactorProjects) { for (final MavenProject reactorProject : reactorProjects) { final String msg = "Check resolved Artifacts for: " + "\ngroudId: " + reactorProject.getGroupId() + "\nartifactId: " + reactorProject.getArtifactId() + "\nversion: " + reactorProject.getVersion(); if (getLog().isDebugEnabled()) getLog().debug(msg); if (reactorProject.getArtifacts() == null || reactorProject.getArtifacts().isEmpty()) { if (getLog().isDebugEnabled()) getLog().debug("+ Dependencies not resolved or Reactor-Project has no dependencies!"); } else { for (final Artifact artifact : reactorProject.getArtifacts()) { if (getLog().isDebugEnabled()) getLog() .debug( " + " + artifact.getGroupId() + " : " + artifact.getArtifactId() + " : " + artifact.getVersion() + " : " + artifact.getType() + " : " + artifact.getFile()); } } } }
/** * Get the artifact which refers to the POM of the executable artifact. * * @param executableArtifact this artifact refers to the actual assembly. * @return an artifact which refers to the POM of the executable artifact. */ private Artifact getExecutablePomArtifact(Artifact executableArtifact) { return this.artifactFactory.createBuildArtifact( executableArtifact.getGroupId(), executableArtifact.getArtifactId(), executableArtifact.getVersion(), "pom"); }
/** * Merge WsdlOptions that point to the same file by adding the extraargs to the first option and * deleting the second from the options list * * @param options */ @SuppressWarnings("unchecked") private Artifact resolveRemoteWsdlArtifact(List<?> remoteRepos, Artifact artifact) throws MojoExecutionException { /** * First try to find the artifact in the reactor projects of the maven session. So an artifact * that is not yet built can be resolved */ List<MavenProject> rProjects = mavenSession.getSortedProjects(); for (MavenProject rProject : rProjects) { if (artifact.getGroupId().equals(rProject.getGroupId()) && artifact.getArtifactId().equals(rProject.getArtifactId()) && artifact.getVersion().equals(rProject.getVersion())) { Set<Artifact> artifacts = rProject.getArtifacts(); for (Artifact pArtifact : artifacts) { if ("wadl".equals(pArtifact.getType())) { return pArtifact; } } } } /** If this did not work resolve the artifact using the artifactResolver */ try { artifactResolver.resolve(artifact, remoteRepos, localRepository); } catch (ArtifactResolutionException e) { throw new MojoExecutionException("Error downloading wsdl artifact.", e); } catch (ArtifactNotFoundException e) { throw new MojoExecutionException("Resource can not be found.", e); } return artifact; }
private void addDependencies(final MavenProject project, final JettyWebAppContext webAppConfig) throws Exception { List<File> dependencyFiles = new ArrayList<File>(); List<Resource> overlays = new ArrayList<Resource>(); for (Artifact artifact : project.getArtifacts()) { if (artifact.getType().equals("war")) { overlays.add(Resource.newResource("jar:" + artifact.getFile().toURL().toString() + "!/")); } else if ((!Artifact.SCOPE_PROVIDED.equals(artifact.getScope())) && (!Artifact.SCOPE_TEST.equals(artifact.getScope()))) { File dependencyFile = artifact.getFile(); if (dependencyFile == null || !dependencyFile.exists()) { String coordinates = String.format( "%s:%s:%s", artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion()); LOG.log( Level.WARNING, "Dependency '" + coordinates + "' does not exist in repository. Skipping!"); continue; } dependencyFiles.add(artifact.getFile()); } } webAppConfig.setOverlays(overlays); webAppConfig.setWebInfLib(dependencyFiles); }
private void copyNativeLibraryArtifactFileToDirectory( Artifact artifact, File destinationDirectory, String ndkArchitecture) throws MojoExecutionException { final File artifactFile = getArtifactResolverHelper().resolveArtifactToFile(artifact); try { final String artifactId = artifact.getArtifactId(); String filename = artifactId.startsWith("lib") ? artifactId + ".so" : "lib" + artifactId + ".so"; if (ndkFinalLibraryName != null && artifact.getFile().getName().startsWith("lib" + ndkFinalLibraryName)) { // The artifact looks like one we built with the NDK in this module // preserve the name from the NDK build filename = artifact.getFile().getName(); } final File finalDestinationDirectory = getFinalDestinationDirectoryFor(artifact, destinationDirectory, ndkArchitecture); final File file = new File(finalDestinationDirectory, filename); getLog() .debug( "Copying native dependency " + artifactId + " (" + artifact.getGroupId() + ") to " + file); FileUtils.copyFile(artifactFile, file); } catch (Exception e) { throw new MojoExecutionException("Could not copy native dependency.", e); } }
String dependencies(boolean includeProjectArtifact) { List<String> elements = new ArrayList<>(); Set<Artifact> artifacts = this.project.getArtifacts(); for (Artifact each : artifacts) { if (each.getGroupId().equals("org.jboss.logmanager") && each.getArtifactId().equals("jboss-logmanager")) { continue; } elements.add(each.getFile().toString()); } if (includeProjectArtifact) { elements.add(this.project.getBuild().getOutputDirectory()); } StringBuilder cp = new StringBuilder(); Iterator<String> iter = elements.iterator(); while (iter.hasNext()) { String element = iter.next(); cp.append(element); if (iter.hasNext()) { cp.append(File.pathSeparatorChar); } } return cp.toString(); }
protected Bus callCodeGenerator(WadlOption option, Bus bus, Set<URI> classPath) throws MojoExecutionException { File outputDirFile = option.getOutputDir(); outputDirFile.mkdirs(); URI basedir = project.getBasedir().toURI(); URI wadlURI = option.getWadlURI(basedir); File doneFile = getDoneFile(basedir, wadlURI); if (!shouldRun(option, doneFile, wadlURI)) { return bus; } doneFile.delete(); List<String> list = option.generateCommandLine(outputDirFile, basedir, wadlURI, getLog().isDebugEnabled()); String[] args = list.toArray(new String[list.size()]); getLog().debug("Calling wadl2java with args: " + Arrays.toString(args)); if (!"false".equals(fork)) { Set<URI> artifactsPath = new LinkedHashSet<URI>(); for (Artifact a : pluginArtifacts) { File file = a.getFile(); if (file == null) { throw new MojoExecutionException( "Unable to find file for artifact " + a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion()); } artifactsPath.add(file.toURI()); } addPluginArtifact(artifactsPath); artifactsPath.addAll(classPath); runForked(artifactsPath, WADLToJava.class, args); } else { if (bus == null) { bus = BusFactory.newInstance().createBus(); BusFactory.setThreadDefaultBus(bus); } try { new WADLToJava(args).run(new ToolContext()); } catch (Throwable e) { getLog().debug(e); throw new MojoExecutionException(e.getMessage(), e); } } try { doneFile.createNewFile(); } catch (Throwable e) { getLog().warn("Could not create marker file " + doneFile.getAbsolutePath()); getLog().debug(e); } return bus; }
/** * @param artifact * @return */ public boolean isPluginArtifact(Artifact artifact) { if (pluginArtifacts == null || pluginArtifacts.isEmpty()) return false; boolean isPluginArtifact = false; for (Iterator<Artifact> iter = pluginArtifacts.iterator(); iter.hasNext() && !isPluginArtifact; ) { Artifact pluginArtifact = iter.next(); if (getLog().isDebugEnabled()) { getLog().debug("Checking " + pluginArtifact); } if (pluginArtifact.getGroupId().equals(artifact.getGroupId()) && pluginArtifact.getArtifactId().equals(artifact.getArtifactId())) isPluginArtifact = true; } return isPluginArtifact; }
@Override protected boolean filterUnwantedArtifacts(Artifact artifact) { // filter out unwanted OSGi related JARs as some projects like ActiveMQ includes these // dependencies // and you should use the camel-blueprint goal for running as OSGi if (artifact.getGroupId().equals("org.apache.aries.blueprint")) { return true; } else if (artifact.getGroupId().startsWith("org.ops4j")) { return true; } else if (artifact.getGroupId().equals("org.osgi")) { return true; } else if (artifact.getGroupId().equals("org.apache.felix")) { return true; } return super.filterUnwantedArtifacts(artifact); }
@Before public void setUp() { artifact = Mockito.mock(Artifact.class); Mockito.when(artifact.getArtifactId()).thenReturn(ArtifactConstants.ARTIFACTID); Mockito.when(artifact.getGroupId()).thenReturn(ArtifactConstants.GROUPID); Mockito.when(artifact.getVersion()).thenReturn(ArtifactConstants.VERSION); Mockito.when(artifact.getClassifier()).thenReturn(ArtifactConstants.CLASSIFIER); Mockito.when(artifact.getType()).thenReturn(ArtifactConstants.JAR_TYPE); }
protected Artifact getCamelCoreArtifact(Set<Artifact> artifacts) throws MojoExecutionException { for (Artifact artifact : artifacts) { if (artifact.getGroupId().equals("org.apache.camel") && artifact.getArtifactId().equals("camel-core")) { return artifact; } } return null; }
/** * Selectively add artifacts from source to target excluding any whose groupId and artifactId * match the current build. Introduced to work around an issue when the ndk-build is executed * twice by maven for example when invoking maven 'install site'. In this case the artifacts * attached by the first invocation are found but are not valid dependencies and must be excluded. * * @param target artifact Set to copy in to * @param source artifact Set to filter */ private void filterNativeDependencies(Set<Artifact> target, Set<Artifact> source) { for (Artifact a : source) { if (project.getGroupId().equals(a.getGroupId()) && project.getArtifactId().equals(a.getArtifactId())) { getLog().warn("Excluding native dependency attached by this build"); } else { target.add(a); } } }
private static Artifact findCamelCoreArtifact(MavenProject project) { // maybe this project is camel-core itself Artifact artifact = project.getArtifact(); if (artifact.getGroupId().equals("org.apache.camel") && artifact.getArtifactId().equals("camel-core")) { return artifact; } // or its a component which has a dependency to camel-core Iterator it = project.getDependencyArtifacts().iterator(); while (it.hasNext()) { artifact = (Artifact) it.next(); if (artifact.getGroupId().equals("org.apache.camel") && artifact.getArtifactId().equals("camel-core")) { return artifact; } } return null; }
/** * Finds the requested artifact in the supplied artifact collection. * * @param artifacts A collection of artifacts. * @param groupId The group ID of the artifact to be found. * @param artifactId The artifact ID of the artifact to be found. * @return The artifact from the collection that matches the group ID and artifact ID value or * {@code null} if no match is found. */ private Artifact findArtifact( final Collection<Artifact> artifacts, final String groupId, final String artifactId) { for (final Artifact artifact : artifacts) { if (artifact.getGroupId().equals(groupId) && artifact.getArtifactId().equals(artifactId)) { return artifact; } } return null; }
@SuppressWarnings("unchecked") private Collection<Artifact> getNonTransitivePlugins(Set<Artifact> projectArtifacts) throws MojoExecutionException { Collection<Artifact> deps = new LinkedHashSet<Artifact>(); for (Artifact artifact : projectArtifacts) { Artifact pomArtifact = artifactFactory.createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), artifact.getClassifier(), "pom"); Set<Artifact> result; try { MavenProject pomProject = mavenProjectBuilder.buildFromRepository( pomArtifact, remoteRepositories, localRepository); Set<Artifact> artifacts = pomProject.createArtifacts(artifactFactory, null, null); artifacts = filterOutSystemDependencies(artifacts); ArtifactResolutionResult arr = resolver.resolveTransitively( artifacts, pomArtifact, localRepository, remoteRepositories, artifactMetadataSource, null); result = arr.getArtifacts(); } catch (Exception e) { throw new MojoExecutionException( "Failed to resolve non-transitive deps " + e.getMessage(), e); } LinkedHashSet<Artifact> plugins = new LinkedHashSet<Artifact>(); plugins.addAll(filtterArtifacts(result, getFilters(null, null, "nexus-plugin", null))); plugins.addAll(filtterArtifacts(result, getFilters(null, null, "zip", "bundle"))); plugins.addAll(getNonTransitivePlugins(plugins)); if (!plugins.isEmpty()) { getLog() .debug( "Adding non-transitive dependencies for: " + artifact + " -\n" + plugins.toString().replace(',', '\n')); } deps.addAll(plugins); } return deps; }
@Override public boolean include(Artifact artifact) { if (artifact.getGroupId().equals(projectGroupId)) { // Do not try to download artifacts from current project return false; } else if (sourceLocations.getSourceLocation(artifact.getGroupId(), artifact.getArtifactId()) != null) { // It is one of the artifacts of this project, hence the dependencies will have been // downloaded by Maven. Skip it. return false; } // This case is handled by the more generic case above // if (artifact.getGroupId().equals(projectGroupId) && // artifact.getArtifactId().equals(projectArtifactId) && // artifact.getVersion().equals(projectVersion)) { // // Current project's artifact is not yet deployed, therefore do not treat it // return false; // } return true; }
public boolean accept(Artifact artifact) { // Exclude non Nuxeo artifacts if (!artifact.getGroupId().startsWith("org.nuxeo")) { return false; } boolean include = matchPattern(getValuesToMatch(artifact)); if (MavenClientFactory.getLog().isDebugEnabled()) { MavenClientFactory.getLog().debug((include ? "accepts " : "rejects ") + artifact); } return include; }