private void logMustBeProvided(Artifact artifact) {
   getLog()
       .error(
           format(
               "This dependency must be declared with scope <provided>: %s",
               artifact.getDependencyConflictId()));
 }
예제 #2
0
 /**
  * Build a filter that excludes all artifacts that are provided by Synapse at runtime.
  *
  * @return
  * @throws MojoExecutionException
  */
 private ArtifactFilter buildSynapseRuntimeArtifactFilter() throws MojoExecutionException {
   final Map<String, Artifact> artifacts = new HashMap<String, Artifact>();
   for (Artifact artifact : getSynapseRuntimeArtifacts()) {
     artifacts.put(artifact.getDependencyConflictId(), artifact);
   }
   final Set<String> defaultExclusionSet =
       new HashSet<String>(Arrays.asList(defaultRuntimeExcludes));
   return new ArtifactFilter() {
     public boolean include(Artifact artifact) {
       Artifact runtimeArtifact = artifacts.get(artifact.getDependencyConflictId());
       if (runtimeArtifact == null) {
         return !defaultExclusionSet.contains(artifact.getDependencyConflictId());
       } else {
         if (!runtimeArtifact.getVersion().equals(artifact.getVersion())) {
           getLog()
               .warn(
                   "Possible runtime version conflict for "
                       + artifact.getArtifactId()
                       + ": XAR depends on "
                       + artifact.getVersion()
                       + ", Synapse runtime provides "
                       + runtimeArtifact.getVersion());
         }
         return false;
       }
     }
   };
 }
예제 #3
0
  public void testDependencyManagementOverridesTransitiveDependencyVersion() throws Exception {
    File localRepo = getLocalRepositoryPath();

    File pom0 = new File(localRepo, "p0/pom.xml");
    File pom0Basedir = pom0.getParentFile();
    File pom1 = new File(pom0Basedir, "p1/pom.xml");

    // load the child project, which inherits from p0...
    MavenProject project0 = getProjectWithDependencies(pom0);
    MavenProject project1 = getProjectWithDependencies(pom1);

    assertEquals(pom0Basedir, project1.getParent().getBasedir());
    System.out.println("Project " + project1.getId() + " " + project1);
    Map map = project1.getArtifactMap();
    assertNotNull("No artifacts", map);
    assertTrue("No Artifacts", map.size() > 0);
    assertTrue("Set size should be 3, is " + map.size(), map.size() == 3);

    Artifact a = (Artifact) map.get("maven-test:t10-a");
    Artifact b = (Artifact) map.get("maven-test:t10-b");
    Artifact c = (Artifact) map.get("maven-test:t10-c");

    assertNotNull(a);
    assertNotNull(b);
    assertNotNull(c);

    // inherited from depMgmt
    System.out.println(a.getScope());
    assertTrue("Incorrect scope for " + a.getDependencyConflictId(), a.getScope().equals("test"));

    // transitive dep, overridden b depMgmt
    assertTrue(
        "Incorrect scope for " + b.getDependencyConflictId(), b.getScope().equals("runtime"));

    // direct dep, overrides depMgmt
    assertTrue(
        "Incorrect scope for " + c.getDependencyConflictId(), c.getScope().equals("runtime"));
  }
  /**
   * Add sources.jar artifacts for project dependencies listed as compileSourcesArtifacts. This is a
   * GWT hack to avoid packaging java source files into JAR when sharing code between server and
   * client. Typically, some domain model classes or business rules may be packaged as a separate
   * Maven module. With GWT packaging this requires to distribute such classes with code, that may
   * not be desirable.
   *
   * <p>The hack can also be used to include utility code from external librariries that may not
   * have been designed for GWT.
   */
  protected void addCompileSourceArtifacts(JavaCommand cmd) throws MojoExecutionException {
    if (compileSourcesArtifacts == null) {
      return;
    }
    for (String include : compileSourcesArtifacts) {
      List<String> parts = new ArrayList<String>();
      parts.addAll(Arrays.asList(include.split(":")));
      if (parts.size() == 2) {
        // type is optional as it will mostly be "jar"
        parts.add("jar");
      }
      String dependencyId = StringUtils.join(parts.iterator(), ":");
      boolean found = false;

      for (Artifact artifact : getProjectArtifacts()) {
        getLog().debug("compare " + dependencyId + " with " + artifact.getDependencyConflictId());
        if (artifact.getDependencyConflictId().equals(dependencyId)) {
          getLog().debug("Add " + dependencyId + " sources.jar artifact to compile classpath");
          Artifact sources =
              resolve(
                  artifact.getGroupId(),
                  artifact.getArtifactId(),
                  artifact.getVersion(),
                  "jar",
                  "sources");
          cmd.addToClasspath(sources.getFile());
          found = true;
          break;
        }
      }
      if (!found)
        getLog()
            .warn(
                "Declared compileSourcesArtifact was not found in project dependencies "
                    + dependencyId);
    }
  }
  private List<String> copyDependencies() throws Exception {
    final List<String> ids = new ArrayList<String>();
    final List<String> libs = new ArrayList<String>();
    final File libDirectory = new File(getAppDirectory(), LIB_DIR);
    final Set<Artifact> artifacts = getNotProvidedDependencies();

    for (final Artifact artifact : artifacts) {
      final String targetFileName = getDefaultFinalName(artifact);
      FileUtils.copyFileIfModified(artifact.getFile(), new File(libDirectory, targetFileName));
      libs.add(LIB_DIR + targetFileName);
      ids.add(artifact.getDependencyConflictId());
    }

    if (!ids.isEmpty()) {
      getLog().info(getMessage("Following dependencies are packaged inside the plugin:", ids));
    }

    return libs;
  }
  public ArtifactResolutionResult resolve(ArtifactResolutionRequest request) {
    Artifact rootArtifact = request.getArtifact();
    Set<Artifact> artifacts = request.getArtifactDependencies();
    Map<String, Artifact> managedVersions = request.getManagedVersionMap();
    List<ResolutionListener> listeners = request.getListeners();
    ArtifactFilter collectionFilter = request.getCollectionFilter();
    ArtifactFilter resolutionFilter = request.getResolutionFilter();
    RepositorySystemSession session = getSession(request.getLocalRepository());

    // TODO: hack because metadata isn't generated in m2e correctly and i want to run the maven i
    // have in the workspace
    if (source == null) {
      try {
        source = container.lookup(ArtifactMetadataSource.class);
      } catch (ComponentLookupException e) {
        // won't happen
      }
    }

    if (listeners == null) {
      listeners = new ArrayList<ResolutionListener>();

      if (logger.isDebugEnabled()) {
        listeners.add(new DebugResolutionListener(logger));
      }

      listeners.add(new WarningResolutionListener(logger));
    }

    ArtifactResolutionResult result = new ArtifactResolutionResult();

    // The root artifact may, or may not be resolved so we need to check before we attempt to
    // resolve.
    // This is often an artifact like a POM that is taken from disk and we already have hold of the
    // file reference. But this may be a Maven Plugin that we need to resolve from a remote
    // repository
    // as well as its dependencies.

    if (request.isResolveRoot() /* && rootArtifact.getFile() == null */) {
      try {
        resolve(rootArtifact, request.getRemoteRepositories(), session);
      } catch (ArtifactResolutionException e) {
        result.addErrorArtifactException(e);
        return result;
      } catch (ArtifactNotFoundException e) {
        result.addMissingArtifact(request.getArtifact());
        return result;
      }
    }

    ArtifactResolutionRequest collectionRequest = request;

    if (request.isResolveTransitively()) {
      MetadataResolutionRequest metadataRequest = new DefaultMetadataResolutionRequest(request);

      metadataRequest.setArtifact(rootArtifact);
      metadataRequest.setResolveManagedVersions(managedVersions == null);

      try {
        ResolutionGroup resolutionGroup = source.retrieve(metadataRequest);

        if (managedVersions == null) {
          managedVersions = resolutionGroup.getManagedVersions();
        }

        Set<Artifact> directArtifacts = resolutionGroup.getArtifacts();

        if (artifacts == null || artifacts.isEmpty()) {
          artifacts = directArtifacts;
        } else {
          List<Artifact> allArtifacts = new ArrayList<Artifact>();
          allArtifacts.addAll(artifacts);
          allArtifacts.addAll(directArtifacts);

          Map<String, Artifact> mergedArtifacts = new LinkedHashMap<String, Artifact>();
          for (Artifact artifact : allArtifacts) {
            String conflictId = artifact.getDependencyConflictId();
            if (!mergedArtifacts.containsKey(conflictId)) {
              mergedArtifacts.put(conflictId, artifact);
            }
          }

          artifacts = new LinkedHashSet<Artifact>(mergedArtifacts.values());
        }

        collectionRequest = new ArtifactResolutionRequest(request);
        collectionRequest.setServers(request.getServers());
        collectionRequest.setMirrors(request.getMirrors());
        collectionRequest.setProxies(request.getProxies());
        collectionRequest.setRemoteRepositories(resolutionGroup.getResolutionRepositories());
      } catch (ArtifactMetadataRetrievalException e) {
        ArtifactResolutionException are =
            new ArtifactResolutionException(
                "Unable to get dependency information for "
                    + rootArtifact.getId()
                    + ": "
                    + e.getMessage(),
                rootArtifact,
                metadataRequest.getRemoteRepositories(),
                e);
        result.addMetadataResolutionException(are);
        return result;
      }
    }

    if (artifacts == null || artifacts.isEmpty()) {
      if (request.isResolveRoot()) {
        result.addArtifact(rootArtifact);
      }
      return result;
    }

    // After the collection we will have the artifact object in the result but they will not be
    // resolved yet.
    result =
        artifactCollector.collect(
            artifacts,
            rootArtifact,
            managedVersions,
            collectionRequest,
            source,
            collectionFilter,
            listeners,
            null);

    // We have metadata retrieval problems, or there are cycles that have been detected
    // so we give this back to the calling code and let them deal with this information
    // appropriately.

    if (result.hasMetadataResolutionExceptions()
        || result.hasVersionRangeViolations()
        || result.hasCircularDependencyExceptions()) {
      return result;
    }

    if (result.getArtifactResolutionNodes() != null) {
      ClassLoader classLoader = Thread.currentThread().getContextClassLoader();

      CountDownLatch latch = new CountDownLatch(result.getArtifactResolutionNodes().size());

      for (ResolutionNode node : result.getArtifactResolutionNodes()) {
        Artifact artifact = node.getArtifact();

        if (resolutionFilter == null || resolutionFilter.include(artifact)) {
          executor.execute(
              new ResolveTask(
                  classLoader, latch, artifact, session, node.getRemoteRepositories(), result));
        } else {
          latch.countDown();
        }
      }
      try {
        latch.await();
      } catch (InterruptedException e) {
        result.addErrorArtifactException(
            new ArtifactResolutionException("Resolution interrupted", rootArtifact, e));
      }
    }

    // We want to send the root artifact back in the result but we need to do this after the other
    // dependencies
    // have been resolved.
    if (request.isResolveRoot()) {
      // Add the root artifact (as the first artifact to retain logical order of class path!)
      Set<Artifact> allArtifacts = new LinkedHashSet<Artifact>();
      allArtifacts.add(rootArtifact);
      allArtifacts.addAll(result.getArtifacts());
      result.setArtifacts(allArtifacts);
    }

    return result;
  }