private Set<String> populateRealm(
      ClassRealm classRealm, List<ClassRealmConstituent> constituents) {
    Set<String> includedIds = new LinkedHashSet<String>();

    if (logger.isDebugEnabled()) {
      logger.debug("Populating class realm " + classRealm.getId());
    }

    for (ClassRealmConstituent constituent : constituents) {
      File file = constituent.getFile();

      String id = getId(constituent);
      includedIds.add(id);

      if (logger.isDebugEnabled()) {
        logger.debug("  Included: " + id);
      }

      try {
        classRealm.addURL(file.toURI().toURL());
      } catch (MalformedURLException e) {
        // Not going to happen
        logger.error(e.getMessage(), e);
      }
    }

    return includedIds;
  }
  private void multiThreadedProjectTaskSegmentBuild(
      ConcurrencyDependencyGraph analyzer,
      ReactorContext reactorContext,
      MavenSession rootSession,
      CompletionService<ProjectSegment> service,
      TaskSegment taskSegment,
      Map<MavenProject, ProjectSegment> projectBuildList,
      ThreadOutputMuxer muxer) {

    // schedule independent projects
    for (MavenProject mavenProject : analyzer.getRootSchedulableBuilds()) {
      ProjectSegment projectSegment = projectBuildList.get(mavenProject);
      logger.debug("Scheduling: " + projectSegment.getProject());
      Callable<ProjectSegment> cb =
          createBuildCallable(rootSession, projectSegment, reactorContext, taskSegment, muxer);
      service.submit(cb);
    }

    // for each finished project
    for (int i = 0; i < analyzer.getNumberOfBuilds(); i++) {
      try {
        ProjectSegment projectBuild = service.take().get();
        if (reactorContext.getReactorBuildStatus().isHalted()) {
          break;
        }
        final List<MavenProject> newItemsThatCanBeBuilt =
            analyzer.markAsFinished(projectBuild.getProject());
        for (MavenProject mavenProject : newItemsThatCanBeBuilt) {
          ProjectSegment scheduledDependent = projectBuildList.get(mavenProject);
          logger.debug("Scheduling: " + scheduledDependent);
          Callable<ProjectSegment> cb =
              createBuildCallable(
                  rootSession, scheduledDependent, reactorContext, taskSegment, muxer);
          service.submit(cb);
        }
      } catch (InterruptedException e) {
        rootSession.getResult().addException(e);
        break;
      } catch (ExecutionException e) {
        // TODO MNG-5766 changes likely made this redundant
        rootSession.getResult().addException(e);
        break;
      }
    }

    // cancel outstanding builds (if any)  - this can happen if an exception is thrown in above
    // block

    Future<ProjectSegment> unprocessed;
    while ((unprocessed = service.poll()) != null) {
      try {
        unprocessed.get();
      } catch (InterruptedException e) {
        throw new RuntimeException(e);
      } catch (ExecutionException e) {
        throw new RuntimeException(e);
      }
    }
  }
  /**
   * Retrieves dependency information from Spring XML configuration files in a Maven project.
   *
   * @param project the project to analyze
   * @param dependentClasses A set of classes that already had their dependencies analyzed. This
   *     method will <b>ADD</b> all Spring-induced dependencies to this set and also use it to
   *     determine whether a given class needs to have it's dependencies analyzed.
   * @throws Exception
   */
  public void addSpringDependencyClasses(MavenProject project, final Set<String> dependentClasses)
      throws Exception {
    final SpringFileBeanVisitor beanVisitor =
        new DefaultSpringXmlBeanVisitor(this.resolver, dependentClasses);

    for (File springXml : fileLocator.locateSpringXmls(project)) {
      final BufferedInputStream in = new BufferedInputStream(new FileInputStream(springXml));
      try {
        fileParser.parse(in, beanVisitor);
        if (log != null && log.isInfoEnabled()) {
          log.info("Scanned Spring XML " + springXml.getPath());
        }
      } catch (NoSpringXmlException ex) {
        if (log != null && log.isDebugEnabled()) {
          log.debug("Not a Spring XML file : " + springXml.getPath());
        }
        // ok
      } catch (Exception e) {
        if (log != null) {
          log.error("Failed to parse Spring XML " + springXml.getPath() + " ...", e);
        }
        throw e;
      } finally {
        in.close();
      }
    }
  }
  public void execute(final Archiver archiver, final AssemblerConfigurationSource configSource)
      throws ArchiveCreationException, AssemblyFormattingException,
          InvalidAssemblerConfigurationException {
    if ((dependencySets == null) || dependencySets.isEmpty()) {
      logger.debug("No dependency sets specified.");
      return;
    }

    @SuppressWarnings("unchecked")
    final List<Dependency> deps = project.getDependencies();
    if ((deps == null) || deps.isEmpty()) {
      logger.debug(
          "Project " + project.getId() + " has no dependencies. Skipping dependency set addition.");
    }

    for (final Iterator<DependencySet> i = dependencySets.iterator(); i.hasNext(); ) {
      final DependencySet dependencySet = i.next();

      addDependencySet(dependencySet, archiver, configSource);
    }
  }
  /** Add jar files in libs into the project classpath. */
  private void addLibsJarsToClassPath(
      UnpackedLibHelper helper, MavenProject project, Artifact artifact)
      throws MavenExecutionException {
    try {
      final File unpackLibFolder = helper.getUnpackedLibFolder(artifact);
      final File artifactFile = helper.getArtifactToFile(artifact);
      ZipFile zipFile = new ZipFile(artifactFile);
      Enumeration enumeration = zipFile.entries();
      while (enumeration.hasMoreElements()) {
        ZipEntry entry = (ZipEntry) enumeration.nextElement();
        String entryName = entry.getName();

        // Only jar files under 'libs' directory to be processed.
        if (Pattern.matches("^libs/.+\\.jar$", entryName)) {
          final File libsJarFile = new File(unpackLibFolder, entryName);
          log.warn("Adding jar from libs folder to classpath: " + libsJarFile);

          // In order to satisfy the LifecycleDependencyResolver on execution up to a phase that
          // has a Mojo requiring dependency resolution I need to create a dummy classesJar here.
          if (!libsJarFile.getParentFile().exists()) {
            libsJarFile.getParentFile().mkdirs();
          }
          libsJarFile.createNewFile();

          // Add the jar to the classpath.
          final Dependency dependency =
              createSystemScopeDependency(artifact, libsJarFile, libsJarFile.getName());

          project.getModel().addDependency(dependency);
          addedJarFromLibs = true;
        }
      }
    } catch (MojoExecutionException e) {
      log.debug("Error extract jars");
    } catch (ZipException e) {
      log.debug("Error");
    } catch (IOException e) {
      log.debug("Error");
    }
  }
  /** Add the dependent library classes to the project classpath. */
  private void addClassesToClasspath(
      UnpackedLibHelper helper, MavenProject project, Artifact artifact)
      throws MavenExecutionException {
    // Work out where the dep will be extracted and calculate the file path to the classes jar.
    // This is location where the GenerateSourcesMojo will extract the classes.
    final File classesJar = helper.getUnpackedClassesJar(artifact);
    log.debug("Adding to classpath : " + classesJar);

    // In order to satisfy the LifecycleDependencyResolver on execution up to a phase that
    // has a Mojo requiring dependency resolution I need to create a dummy classesJar here.
    classesJar.getParentFile().mkdirs();
    try {
      classesJar.createNewFile();
      log.debug("Created dummy " + classesJar.getName() + " exist=" + classesJar.exists());
    } catch (IOException e) {
      throw new MavenExecutionException(
          "Could not add " + classesJar.getName() + " as dependency", e);
    }

    // Add the classes to the classpath
    final Dependency dependency = createSystemScopeDependency(artifact, classesJar, null);
    project.getModel().addDependency(dependency);
  }
  public Set<Plugin> getPluginsBoundByDefaultToAllLifecycles(String packaging) {
    if (logger.isDebugEnabled()) {
      logger.debug(
          "Looking up lifecyle mappings for packaging "
              + packaging
              + " from "
              + Thread.currentThread().getContextClassLoader());
    }

    LifecycleMapping lifecycleMappingForPackaging = lifecycleMappings.get(packaging);

    if (lifecycleMappingForPackaging == null) {
      return null;
    }

    Map<Plugin, Plugin> plugins = new LinkedHashMap<>();

    for (Lifecycle lifecycle : getOrderedLifecycles()) {
      org.apache.maven.lifecycle.mapping.Lifecycle lifecycleConfiguration =
          lifecycleMappingForPackaging.getLifecycles().get(lifecycle.getId());

      Map<String, LifecyclePhase> phaseToGoalMapping = null;

      if (lifecycleConfiguration != null) {
        phaseToGoalMapping = lifecycleConfiguration.getPhases();
      } else if (lifecycle.getDefaultPhases() != null) {
        phaseToGoalMapping = lifecycle.getDefaultPhases();
      }

      if (phaseToGoalMapping != null) {
        for (Map.Entry<String, LifecyclePhase> goalsForLifecyclePhase :
            phaseToGoalMapping.entrySet()) {
          String phase = goalsForLifecyclePhase.getKey();
          LifecyclePhase goals = goalsForLifecyclePhase.getValue();
          if (goals != null) {
            parseLifecyclePhaseDefinitions(plugins, phase, goals);
          }
        }
      }
    }

    return plugins.keySet();
  }
 public void delegate() {
   switch (level) {
     case LEVEL_DEBUG:
       logger.debug(message, cause);
       break;
     case LEVEL_INFO:
       logger.info(message, cause);
       break;
     case LEVEL_WARN:
       logger.warn(message, cause);
       break;
     case LEVEL_ERROR:
       logger.error(message, cause);
       break;
     case LEVEL_FATAL:
       logger.fatalError(message, cause);
       break;
     default:
       throw new Error();
   }
 }
  private ClassRealm newRealm(String id) {
    ClassWorld world = getClassWorld();

    synchronized (world) {
      String realmId = id;

      Random random = new Random();

      while (true) {
        try {
          ClassRealm classRealm = world.newRealm(realmId, null);

          if (logger.isDebugEnabled()) {
            logger.debug("Created new class realm " + realmId);
          }

          return classRealm;
        } catch (DuplicateRealmException e) {
          realmId = id + '-' + random.nextInt();
        }
      }
    }
  }
  private void addNormalArtifact(
      final DependencySet dependencySet,
      final Artifact depArtifact,
      final MavenProject depProject,
      final Archiver archiver,
      final AssemblerConfigurationSource configSource)
      throws AssemblyFormattingException, ArchiveCreationException {
    logger.debug("Adding dependency artifact " + depArtifact.getId() + ".");

    final AddArtifactTask task = new AddArtifactTask(depArtifact, logger);

    task.setProject(depProject);
    task.setModuleProject(moduleProject);
    task.setModuleArtifact(moduleArtifact);
    task.setOutputDirectory(dependencySet.getOutputDirectory(), defaultOutputDirectory);
    task.setFileNameMapping(dependencySet.getOutputFileNameMapping(), defaultOutputFileNameMapping);

    final int dirMode = TypeConversionUtils.modeToInt(dependencySet.getDirectoryMode(), logger);
    if (dirMode != -1) {
      task.setDirectoryMode(dirMode);
    }

    final int fileMode = TypeConversionUtils.modeToInt(dependencySet.getFileMode(), logger);
    if (fileMode != -1) {
      task.setFileMode(fileMode);
    }

    task.setUnpack(dependencySet.isUnpack());

    final UnpackOptions opts = dependencySet.getUnpackOptions();
    if (dependencySet.isUnpack() && (opts != null)) {
      task.setIncludes(opts.getIncludes());
      task.setExcludes(opts.getExcludes());
    }

    task.execute(archiver, configSource);
  }
  /**
   * Returns the Set of APKLIB, AAR, APK (direct or transitive) dependencies of the supplied
   * artifact.
   *
   * <p>The project is searched until artifact is found and then the library dependencies are looked
   * for recursively.
   *
   * @param session MavenSession in which to resolve the artifacts.
   * @param repositorySystem RepositorySystem with which to resolve the artifacts.
   * @param artifact Artifact for whom to get the dependencies.
   * @return Set of APK, APKLIB and AAR dependencies.
   * @throws org.apache.maven.plugin.MojoExecutionException if it couldn't resolve any of the
   *     dependencies.
   */
  public Set<Artifact> getLibraryDependenciesFor(
      MavenSession session, RepositorySystem repositorySystem, Artifact artifact)
      throws MojoExecutionException {
    // Set a filter that should only return interesting artifacts.
    final ArtifactFilter filter =
        new ArtifactFilter() {
          @Override
          public boolean include(Artifact found) {
            final String type = found.getType();
            return (type.equals(APKLIB) || type.equals(AAR) || type.equals(APK));
          }
        };

    log.debug("MavenSession = " + session + "  repositorySystem = " + repositorySystem);

    final ArtifactResolutionRequest request = new ArtifactResolutionRequest();
    request.setArtifact(artifact);
    request.setResolveRoot(false); // Don't include source artifact in result
    request.setResolveTransitively(true); // Include direct plus transitive dependencies.
    request.setServers(session.getRequest().getServers());
    request.setMirrors(session.getRequest().getMirrors());
    request.setProxies(session.getRequest().getProxies());
    request.setLocalRepository(session.getLocalRepository());
    request.setRemoteRepositories(session.getCurrentProject().getRemoteArtifactRepositories());

    final ArtifactResolutionResult result = repositorySystem.resolve(request);

    final Set<Artifact> libraryDeps = new HashSet<Artifact>();
    for (final Artifact depArtifact : result.getArtifacts()) {
      if (filter.include(depArtifact)) {
        libraryDeps.add(depArtifact);
      }
    }

    return libraryDeps;
  }
  @Override
  public void afterProjectsRead(MavenSession session) throws MavenExecutionException {
    log.debug("");
    log.debug("ClasspathModifierLifecycleParticipant#afterProjectsRead - start");
    log.debug("");

    log.debug("CurrentProject=" + session.getCurrentProject());
    final List<MavenProject> projects = session.getProjects();
    final DependencyResolver dependencyResolver =
        new DependencyResolver(log, dependencyGraphBuilder);
    final ArtifactResolverHelper artifactResolverHelper =
        new ArtifactResolverHelper(artifactResolver, log);

    for (MavenProject project : projects) {
      log.debug("");
      log.debug("project=" + project.getArtifact());

      if (!AndroidExtension.isAndroidPackaging(project.getPackaging())) {
        continue; // do not modify classpath if not an android project.
      }

      final UnpackedLibHelper helper = new UnpackedLibHelper(artifactResolverHelper, project, log);

      final Set<Artifact> artifacts;

      // If there is an extension ClassRealm loaded for this project then use that
      // as the ContextClassLoader so that Wagon extensions can be used to resolves dependencies.
      final ClassLoader projectClassLoader =
          (project.getClassRealm() != null)
              ? project.getClassRealm()
              : Thread.currentThread().getContextClassLoader();

      final ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader();
      try {
        Thread.currentThread().setContextClassLoader(projectClassLoader);
        artifacts = dependencyResolver.getProjectDependenciesFor(project, session);
      } catch (DependencyGraphBuilderException e) {
        // Nothing to do. The resolution failure will be displayed by the standard resolution
        // mechanism.
        continue;
      } finally {
        Thread.currentThread().setContextClassLoader(originalClassLoader);
      }

      log.debug("projects deps: : " + artifacts);
      for (Artifact artifact : artifacts) {
        final String type = artifact.getType();
        if (type.equals(AndroidExtension.AAR)) {
          // An AAR lib contains a classes jar that needs to be added to the classpath.
          // Create a placeholder classes.jar and add it to the compile classpath.
          // It will replaced with the real classes.jar by GenerateSourcesMojo.
          addClassesToClasspath(helper, project, artifact);

          if (includeLibsJarsForAar) {
            // Add jar files in 'libs' into classpath.
            addLibsJarsToClassPath(helper, project, artifact);
          }
        } else if (type.equals(AndroidExtension.APK)) {
          // The only time that an APK will likely be a dependency is when this an an APK test
          // project.
          // So add a placeholder (we cannot resolve the actual dep pre build) to the compile
          // classpath.
          // The placeholder will be replaced with the real APK jar later.
          addClassesToClasspath(helper, project, artifact);
        } else if (type.equals(AndroidExtension.APKLIB) && includeLibsJarsForApklib) {
          // Add jar files in 'libs' into classpath.
          addLibsJarsToClassPath(helper, project, artifact);
        }
      }
    }

    if (addedJarFromLibs) {
      log.warn(
          "Transitive dependencies should really be provided by Maven dependency management.\n"
              + "          We suggest you to ask the above providers to package their component properly.\n"
              + "          Things may break at compile and/or runtime due to multiple copies of incompatible libraries.");
    }
    log.debug("");
    log.debug("ClasspathModifierLifecycleParticipant#afterProjectsRead - finish");
  }
  protected Set<Artifact> resolveDependencyArtifacts(final DependencySet dependencySet)
      throws InvalidAssemblerConfigurationException {
    final Set<Artifact> dependencyArtifacts = new LinkedHashSet<Artifact>();
    if (resolvedArtifacts != null) {
      dependencyArtifacts.addAll(resolvedArtifacts);
    }

    if (dependencySet.isUseProjectArtifact()) {
      final Artifact projectArtifact = project.getArtifact();
      if ((projectArtifact != null) && (projectArtifact.getFile() != null)) {
        dependencyArtifacts.add(projectArtifact);
      } else {
        logger.warn(
            "Cannot include project artifact: "
                + projectArtifact
                + "; it doesn't have an associated file or directory.");
      }
    }

    if (dependencySet.isUseProjectAttachments()) {
      @SuppressWarnings("unchecked")
      final List<Artifact> attachments = project.getAttachedArtifacts();
      if (attachments != null) {
        for (final Iterator<Artifact> attachmentIt = attachments.iterator();
            attachmentIt.hasNext(); ) {
          final Artifact attachment = attachmentIt.next();

          if (attachment.getFile() != null) {
            dependencyArtifacts.add(attachment);
          } else {
            logger.warn(
                "Cannot include attached artifact: "
                    + project.getId()
                    + " for project: "
                    + project.getId()
                    + "; it doesn't have an associated file or directory.");
          }
        }
      }
    }

    if (dependencySet.isUseTransitiveFiltering()) {
      logger.debug("Filtering dependency artifacts USING transitive dependency path information.");
    } else {
      logger.debug(
          "Filtering dependency artifacts WITHOUT transitive dependency path information.");
    }

    final ScopeArtifactFilter filter = new ScopeArtifactFilter(dependencySet.getScope());

    FilterUtils.filterArtifacts(
        dependencyArtifacts,
        dependencySet.getIncludes(),
        dependencySet.getExcludes(),
        dependencySet.isUseStrictFiltering(),
        dependencySet.isUseTransitiveFiltering(),
        logger,
        filter);

    return dependencyArtifacts;
  }
  private void addFilteredUnpackedArtifact(
      final DependencySet dependencySet,
      final Artifact depArtifact,
      final MavenProject depProject,
      final Archiver archiver,
      final AssemblerConfigurationSource configSource)
      throws ArchiveCreationException, AssemblyFormattingException {
    logger.debug(
        "Adding dependency artifact "
            + depArtifact.getId()
            + " after filtering the unpacked contents.");

    final StringBuilder sb =
        new StringBuilder()
            .append(depArtifact.getGroupId())
            .append("_")
            .append(depArtifact.getArtifactId())
            .append("_")
            .append(depArtifact.getVersion());

    final String classifier = depArtifact.getClassifier();
    if (classifier != null) {
      sb.append("_").append(classifier);
    }

    sb.append(".").append(depArtifact.getType());

    final File dir = new File(configSource.getWorkingDirectory(), sb.toString());
    if (dir.exists()) {
      logger.debug(
          "NOT unpacking: "
              + depArtifact.getId()
              + ". Directory already exists in workdir:\n\t"
              + dir.getAbsolutePath());
    } else {
      dir.mkdirs();

      UnArchiver unarchiver;
      try {
        unarchiver = archiverManager.getUnArchiver(depArtifact.getFile());
      } catch (final NoSuchArchiverException e) {
        throw new ArchiveCreationException(
            "Failed to retrieve un-archiver for: "
                + depArtifact.getId()
                + ". Dependency filtering cannot proceed.",
            e);
      }

      unarchiver.setDestDirectory(dir);
      unarchiver.setOverwrite(true);
      unarchiver.setSourceFile(depArtifact.getFile());
      unarchiver.setIgnorePermissions(configSource.isIgnorePermissions());

      try {
        unarchiver.extract();
      } catch (final ArchiverException e) {
        throw new ArchiveCreationException(
            "Failed to unpack dependency archive: "
                + depArtifact.getId()
                + ". Dependency filtering cannot proceed.",
            e);
      }
    }

    final UnpackOptions opts = dependencySet.getUnpackOptions();

    final FileSet fs = new FileSet();
    fs.setDirectory(dir.getAbsolutePath());
    fs.setDirectoryMode(dependencySet.getDirectoryMode());
    fs.setExcludes(opts.getExcludes());
    fs.setFileMode(dependencySet.getFileMode());
    fs.setFiltered(opts.isFiltered());
    fs.setIncludes(opts.getIncludes());

    String outDir = dependencySet.getOutputDirectory();
    if (outDir == null) {
      outDir = defaultOutputDirectory;
    }

    String filenameMapping = dependencySet.getOutputFileNameMapping();
    if (filenameMapping == null) {
      filenameMapping = defaultOutputFileNameMapping;
    }

    filenameMapping =
        AssemblyFormatUtils.evaluateFileNameMapping(
            filenameMapping,
            depArtifact,
            configSource.getProject(),
            moduleProject,
            moduleArtifact,
            depProject,
            configSource);

    final String outputLocation = new File(outDir, filenameMapping).getPath();

    fs.setOutputDirectory(outputLocation);

    fs.setLineEnding(opts.getLineEnding());
    fs.setUseDefaultExcludes(opts.isUseDefaultExcludes());

    final AddFileSetsTask task = new AddFileSetsTask(fs);
    task.setProject(depProject);
    task.setModuleProject(moduleProject);
    task.setLogger(logger);

    task.execute(archiver, configSource);
  }
  protected void addDependencySet(
      final DependencySet dependencySet,
      final Archiver archiver,
      final AssemblerConfigurationSource configSource)
      throws AssemblyFormattingException, ArchiveCreationException,
          InvalidAssemblerConfigurationException {
    logger.debug("Processing DependencySet (output=" + dependencySet.getOutputDirectory() + ")");

    if (!dependencySet.isUseTransitiveDependencies() && dependencySet.isUseTransitiveFiltering()) {
      logger.warn(
          "DependencySet has nonsensical configuration: useTransitiveDependencies == false "
              + "AND useTransitiveFiltering == true. Transitive filtering flag will be ignored.");
    }

    final Set<Artifact> dependencyArtifacts = resolveDependencyArtifacts(dependencySet);

    boolean filterContents = false;
    final UnpackOptions opts = dependencySet.getUnpackOptions();
    if (dependencySet.isUnpack()
        && opts != null
        && (opts.isFiltered() || opts.getLineEnding() != null)) {
      filterContents = true;
    } else if (dependencyArtifacts.size() > 1) {
      checkMultiArtifactOutputConfig(dependencySet);
    }

    logger.debug("Adding " + dependencyArtifacts.size() + " dependency artifacts.");

    for (final Iterator<Artifact> j = dependencyArtifacts.iterator(); j.hasNext(); ) {
      final Artifact depArtifact = j.next();

      MavenProject depProject;
      try {
        depProject =
            projectBuilder.buildFromRepository(
                depArtifact,
                configSource.getRemoteRepositories(),
                configSource.getLocalRepository());
      } catch (final ProjectBuildingException e) {
        logger.debug(
            "Error retrieving POM of module-dependency: "
                + depArtifact.getId()
                + "; Reason: "
                + e.getMessage()
                + "\n\nBuilding stub project instance.");

        depProject = buildProjectStub(depArtifact);
      }

      if (NON_ARCHIVE_DEPENDENCY_TYPES.contains(depArtifact.getType())) {
        addNonArchiveDependency(depArtifact, depProject, dependencySet, archiver, configSource);
      } else {
        if (filterContents) {
          addFilteredUnpackedArtifact(
              dependencySet, depArtifact, depProject, archiver, configSource);
        } else {
          addNormalArtifact(dependencySet, depArtifact, depProject, archiver, configSource);
        }
      }
    }
  }
    @SuppressWarnings("unchecked")
    private void traverseObjectWithParents(Class<?> cls, Object target)
        throws ModelInterpolationException {
      if (cls == null) {
        return;
      }

      if (cls.isArray()) {
        evaluateArray(target);
      } else if (isQualifiedForInterpolation(cls)) {
        Field[] fields = FIELDS_BY_CLASS.get(cls);
        if (fields == null) {
          fields = cls.getDeclaredFields();
          FIELDS_BY_CLASS.put(cls, fields);
        }

        for (Field field : fields) {
          Class<?> type = field.getType();
          if (isQualifiedForInterpolation(field, type)) {
            boolean isAccessible = field.isAccessible();
            field.setAccessible(true);
            try {
              try {
                if (String.class == type) {
                  String value = (String) field.get(target);
                  if (value != null) {
                    String interpolated =
                        modelInterpolator.interpolateInternal(
                            value, valueSources, postProcessors, debugEnabled);

                    if (!interpolated.equals(value)) {
                      field.set(target, interpolated);
                    }
                  }
                } else if (Collection.class.isAssignableFrom(type)) {
                  Collection<Object> c = (Collection<Object>) field.get(target);
                  if (c != null && !c.isEmpty()) {
                    List<Object> originalValues = new ArrayList<>(c);
                    try {
                      c.clear();
                    } catch (UnsupportedOperationException e) {
                      if (debugEnabled && logger != null) {
                        logger.debug(
                            "Skipping interpolation of field: "
                                + field
                                + " in: "
                                + cls.getName()
                                + "; it is an unmodifiable collection.");
                      }
                      continue;
                    }

                    for (Object value : originalValues) {
                      if (value != null) {
                        if (String.class == value.getClass()) {
                          String interpolated =
                              modelInterpolator.interpolateInternal(
                                  (String) value, valueSources, postProcessors, debugEnabled);

                          if (!interpolated.equals(value)) {
                            c.add(interpolated);
                          } else {
                            c.add(value);
                          }
                        } else {
                          c.add(value);
                          if (value.getClass().isArray()) {
                            evaluateArray(value);
                          } else {
                            interpolationTargets.add(value);
                          }
                        }
                      } else {
                        // add the null back in...not sure what else to do...
                        c.add(value);
                      }
                    }
                  }
                } else if (Map.class.isAssignableFrom(type)) {
                  Map<Object, Object> m = (Map<Object, Object>) field.get(target);
                  if (m != null && !m.isEmpty()) {
                    for (Map.Entry<Object, Object> entry : m.entrySet()) {
                      Object value = entry.getValue();

                      if (value != null) {
                        if (String.class == value.getClass()) {
                          String interpolated =
                              modelInterpolator.interpolateInternal(
                                  (String) value, valueSources, postProcessors, debugEnabled);

                          if (!interpolated.equals(value)) {
                            try {
                              entry.setValue(interpolated);
                            } catch (UnsupportedOperationException e) {
                              if (debugEnabled && logger != null) {
                                logger.debug(
                                    "Skipping interpolation of field: "
                                        + field
                                        + " (key: "
                                        + entry.getKey()
                                        + ") in: "
                                        + cls.getName()
                                        + "; it is an unmodifiable collection.");
                              }
                            }
                          }
                        } else {
                          if (value.getClass().isArray()) {
                            evaluateArray(value);
                          } else {
                            interpolationTargets.add(value);
                          }
                        }
                      }
                    }
                  }
                } else {
                  Object value = field.get(target);
                  if (value != null) {
                    if (field.getType().isArray()) {
                      evaluateArray(value);
                    } else {
                      interpolationTargets.add(value);
                    }
                  }
                }
              } catch (IllegalArgumentException | IllegalAccessException e) {
                throw new ModelInterpolationException(
                    "Failed to interpolate field: " + field + " on class: " + cls.getName(), e);
              }
            } finally {
              field.setAccessible(isAccessible);
            }
          }
        }

        traverseObjectWithParents(cls.getSuperclass(), target);
      }
    }
  /**
   * Creates a new class realm with the specified parent and imports.
   *
   * @param baseRealmId The base id to use for the new realm, must not be {@code null}.
   * @param type The type of the class realm, must not be {@code null}.
   * @param parent The parent realm for the new realm, may be {@code null} to use the Maven core
   *     realm.
   * @param imports The packages/types to import from the parent realm, may be {@code null}.
   * @param artifacts The artifacts to add to the realm, may be {@code null}. Unresolved artifacts
   *     (i.e. with a missing file) will automatically be excluded from the realm.
   * @return The created class realm, never {@code null}.
   */
  private ClassRealm createRealm(
      String baseRealmId,
      RealmType type,
      ClassLoader parent,
      List<String> imports,
      boolean importXpp3Dom,
      List<Artifact> artifacts) {
    Set<String> artifactIds = new LinkedHashSet<String>();

    List<ClassRealmConstituent> constituents = new ArrayList<ClassRealmConstituent>();

    if (artifacts != null) {
      for (Artifact artifact : artifacts) {
        artifactIds.add(getId(artifact));
        if (artifact.getFile() != null) {
          constituents.add(new ArtifactClassRealmConstituent(artifact));
        }
      }
    }

    if (imports != null) {
      imports = new ArrayList<String>(imports);
    } else {
      imports = new ArrayList<String>();
    }

    ClassRealm classRealm = newRealm(baseRealmId);

    if (parent != null) {
      classRealm.setParentClassLoader(parent);
    } else {
      classRealm.setParentRealm(getMavenRealm());
    }

    List<ClassRealmManagerDelegate> delegates = getDelegates();
    if (!delegates.isEmpty()) {
      ClassRealmRequest request = new DefaultClassRealmRequest(type, parent, imports, constituents);

      for (ClassRealmManagerDelegate delegate : delegates) {
        delegate.setupRealm(classRealm, request);
      }
    }

    if (importXpp3Dom) {
      importXpp3Dom(classRealm);
    }

    if (!imports.isEmpty()) {
      ClassLoader importedRealm = classRealm.getParentClassLoader();

      if (logger.isDebugEnabled()) {
        logger.debug("Importing packages into class realm " + classRealm.getId());
      }

      for (String imp : imports) {
        if (logger.isDebugEnabled()) {
          logger.debug("  Imported: " + imp);
        }

        classRealm.importFrom(importedRealm, imp);
      }
    }

    Set<String> includedIds = populateRealm(classRealm, constituents);

    if (logger.isDebugEnabled()) {
      artifactIds.removeAll(includedIds);

      for (String id : artifactIds) {
        logger.debug("  Excluded: " + id);
      }
    }

    return classRealm;
  }
  public TargetPlatformConfiguration getTargetPlatformConfiguration(
      MavenSession session, MavenProject project) {
    TargetPlatformConfiguration result = new TargetPlatformConfiguration();

    // Use org.eclipse.tycho:target-platform-configuration/configuration/environment, if provided
    Plugin plugin = project.getPlugin("org.eclipse.tycho:target-platform-configuration");

    if (plugin != null) {
      Xpp3Dom configuration = (Xpp3Dom) plugin.getConfiguration();
      if (configuration != null) {
        if (logger.isDebugEnabled()) {
          logger.debug(
              "target-platform-configuration for "
                  + project.toString()
                  + ":\n"
                  + configuration.toString());
        }

        addTargetEnvironments(result, project, configuration);

        setTargetPlatformResolver(result, configuration);

        setTarget(result, session, project, configuration);

        setPomDependencies(result, configuration);

        setAllowConflictingDependencies(result, configuration);

        setDisableP2Mirrors(result, configuration);

        setExecutionEnvironment(result, configuration);

        readFilters(result, configuration);

        readExtraRequirements(result, configuration);

        setOptionalDependencies(result, configuration);

        setIncludePackedArtifacts(result, configuration);
      }
    }

    if (result.getEnvironments().isEmpty()) {
      TychoProject projectType = projectTypes.get(project.getPackaging());
      if (projectType != null) {
        TargetEnvironment env = projectType.getImplicitTargetEnvironment(project);
        if (env != null) {
          if (logger.isDebugEnabled()) {
            logger.debug(
                "Implicit target environment for " + project.toString() + ": " + env.toString());
          }

          result.addEnvironment(env);
        }
      }
    }

    if (result.getEnvironments().isEmpty()) {
      // applying defaults
      logger.warn(
          "No explicit target runtime environment configuration. Build is platform dependent.");

      // Otherwise, use project or execution properties, if provided
      Properties properties =
          (Properties) project.getContextValue(TychoConstants.CTX_MERGED_PROPERTIES);

      // Otherwise, use current system os/ws/nl/arch
      String os = PlatformPropertiesUtils.getOS(properties);
      String ws = PlatformPropertiesUtils.getWS(properties);
      String arch = PlatformPropertiesUtils.getArch(properties);

      result.addEnvironment(new TargetEnvironment(os, ws, arch));

      result.setImplicitTargetEnvironment(true);
    } else {
      result.setImplicitTargetEnvironment(false);
    }

    return result;
  }
  public void copy(Repository sourceRepository, Repository targetRepository, String version)
      throws WagonException, IOException {
    String prefix = "staging-plugin";

    String fileName = prefix + "-" + version + ".zip";

    String tempdir = System.getProperty("java.io.tmpdir");

    logger.debug("Writing all output to " + tempdir);

    // Create the renameScript script

    String renameScriptName = prefix + "-" + version + "-rename.sh";

    File renameScript = new File(tempdir, renameScriptName);

    // Work directory

    File basedir = new File(tempdir, prefix + "-" + version);

    FileUtils.deleteDirectory(basedir);

    basedir.mkdirs();

    Wagon sourceWagon = wagonManager.getWagon(sourceRepository);
    AuthenticationInfo sourceAuth = wagonManager.getAuthenticationInfo(sourceRepository.getId());

    sourceWagon.connect(sourceRepository, sourceAuth);

    logger.info("Looking for files in the source repository.");

    List<String> files = new ArrayList<String>();

    scan(sourceWagon, "", files);

    logger.info("Downloading files from the source repository to: " + basedir);

    for (String s : files) {

      if (s.contains(".svn")) {
        continue;
      }

      File f = new File(basedir, s);

      FileUtils.mkdir(f.getParentFile().getAbsolutePath());

      logger.info("Downloading file from the source repository: " + s);

      sourceWagon.get(s, f);
    }

    // ----------------------------------------------------------------------------
    // Now all the files are present locally and now we are going to grab the
    // metadata files from the targetRepositoryUrl and pull those down locally
    // so that we can merge the metadata.
    // ----------------------------------------------------------------------------

    logger.info("Downloading metadata from the target repository.");

    Wagon targetWagon = wagonManager.getWagon(targetRepository);

    if (!(targetWagon instanceof CommandExecutor)) {
      throw new CommandExecutionException(
          "Wagon class '"
              + targetWagon.getClass().getName()
              + "' in use for target repository is not a CommandExecutor");
    }

    AuthenticationInfo targetAuth = wagonManager.getAuthenticationInfo(targetRepository.getId());

    targetWagon.connect(targetRepository, targetAuth);

    PrintWriter rw = new PrintWriter(new FileWriter(renameScript));

    File archive = new File(tempdir, fileName);

    for (String s : files) {

      if (s.startsWith("/")) {
        s = s.substring(1);
      }

      if (s.endsWith(MAVEN_METADATA)) {
        File emf = new File(basedir, s + IN_PROCESS_MARKER);

        try {
          targetWagon.get(s, emf);
        } catch (ResourceDoesNotExistException e) {
          // We don't have an equivalent on the targetRepositoryUrl side because we have something
          // new on the sourceRepositoryUrl side so just skip the metadata merging.

          continue;
        }

        try {
          mergeMetadata(emf);
        } catch (XmlPullParserException e) {
          throw new IOException("Metadata file is corrupt " + s + " Reason: " + e.getMessage());
        }
      }
    }

    Set moveCommands = new TreeSet();

    // ----------------------------------------------------------------------------
    // Create the Zip file that we will deploy to the targetRepositoryUrl stage
    // ----------------------------------------------------------------------------

    logger.info("Creating zip file.");

    OutputStream os = new FileOutputStream(archive);

    ZipOutputStream zos = new ZipOutputStream(os);

    scanDirectory(basedir, basedir, zos, version, moveCommands);

    // ----------------------------------------------------------------------------
    // Create the renameScript script. This is as atomic as we can
    // ----------------------------------------------------------------------------

    logger.info("Creating rename script.");

    for (Object moveCommand : moveCommands) {
      String s = (String) moveCommand;

      // We use an explicit unix '\n' line-ending here instead of using the println() method.
      // Using println() will cause files and folders to have a '\r' at the end if the plugin is run
      // on Windows.
      rw.print(s + "\n");
    }

    IOUtil.close(rw);

    ZipEntry e = new ZipEntry(renameScript.getName());

    zos.putNextEntry(e);

    InputStream is = new FileInputStream(renameScript);

    IOUtil.copy(is, zos);

    IOUtil.close(is);

    IOUtil.close(zos);

    sourceWagon.disconnect();

    // Push the Zip to the target system

    logger.info("Uploading zip file to the target repository.");

    targetWagon.put(archive, fileName);

    logger.info("Unpacking zip file on the target machine.");

    String targetRepoBaseDirectory = targetRepository.getBasedir();

    // We use the super quiet option here as all the noise seems to kill/stall the connection

    String command =
        "unzip -o -qq -d "
            + targetRepoBaseDirectory
            + " "
            + targetRepoBaseDirectory
            + "/"
            + fileName;

    ((CommandExecutor) targetWagon).executeCommand(command);

    logger.info("Deleting zip file from the target repository.");

    command = "rm -f " + targetRepoBaseDirectory + "/" + fileName;

    ((CommandExecutor) targetWagon).executeCommand(command);

    logger.info("Running rename script on the target machine.");

    command = "cd " + targetRepoBaseDirectory + "; sh " + renameScriptName;

    ((CommandExecutor) targetWagon).executeCommand(command);

    logger.info("Deleting rename script from the target repository.");

    command = "rm -f " + targetRepoBaseDirectory + "/" + renameScriptName;

    ((CommandExecutor) targetWagon).executeCommand(command);

    targetWagon.disconnect();
  }
Exemplo n.º 20
0
 /** @param message */
 private void debug(String message) {
   if (logger != null) logger.debug(message);
 }