/** * Retrieves dependency information from Spring XML configuration files in a Maven project. * * @param project the project to analyze * @param dependentClasses A set of classes that already had their dependencies analyzed. This * method will <b>ADD</b> all Spring-induced dependencies to this set and also use it to * determine whether a given class needs to have it's dependencies analyzed. * @throws Exception */ public void addSpringDependencyClasses(MavenProject project, final Set<String> dependentClasses) throws Exception { final SpringFileBeanVisitor beanVisitor = new DefaultSpringXmlBeanVisitor(this.resolver, dependentClasses); for (File springXml : fileLocator.locateSpringXmls(project)) { final BufferedInputStream in = new BufferedInputStream(new FileInputStream(springXml)); try { fileParser.parse(in, beanVisitor); if (log != null && log.isInfoEnabled()) { log.info("Scanned Spring XML " + springXml.getPath()); } } catch (NoSpringXmlException ex) { if (log != null && log.isDebugEnabled()) { log.debug("Not a Spring XML file : " + springXml.getPath()); } // ok } catch (Exception e) { if (log != null) { log.error("Failed to parse Spring XML " + springXml.getPath() + " ...", e); } throw e; } finally { in.close(); } } }
private Set<String> populateRealm( ClassRealm classRealm, List<ClassRealmConstituent> constituents) { Set<String> includedIds = new LinkedHashSet<String>(); if (logger.isDebugEnabled()) { logger.debug("Populating class realm " + classRealm.getId()); } for (ClassRealmConstituent constituent : constituents) { File file = constituent.getFile(); String id = getId(constituent); includedIds.add(id); if (logger.isDebugEnabled()) { logger.debug(" Included: " + id); } try { classRealm.addURL(file.toURI().toURL()); } catch (MalformedURLException e) { // Not going to happen logger.error(e.getMessage(), e); } } return includedIds; }
/** * Ensures that the required ArtifactGrouping is present in the repository. * * @throws SrampAtomException * @throws SrampClientException */ private BaseArtifactType ensureArtifactGrouping() throws SrampClientException, SrampAtomException { String groupingName = getParamFromRepositoryUrl("artifactGrouping"); // $NON-NLS-1$ if (groupingName == null || groupingName.trim().length() == 0) { logger.warn(Messages.i18n.format("NO_ARTIFACT_GROUPING_NAME")); // $NON-NLS-1$ return null; } QueryResultSet query = client .buildQuery("/s-ramp/ext/ArtifactGrouping[@name = ?]") .parameter(groupingName) .count(2) .query(); //$NON-NLS-1$ if (query.size() > 1) { logger.warn( Messages.i18n.format("MULTIPLE_ARTIFACT_GROUPSING_FOUND", groupingName)); // $NON-NLS-1$ return null; } else if (query.size() == 1) { ArtifactSummary summary = query.get(0); return client.getArtifactMetaData(summary.getType(), summary.getUuid()); } else { ExtendedArtifactType groupingArtifact = new ExtendedArtifactType(); groupingArtifact.setArtifactType(BaseArtifactEnum.EXTENDED_ARTIFACT_TYPE); groupingArtifact.setExtendedType("ArtifactGrouping"); // $NON-NLS-1$ groupingArtifact.setName(groupingName); groupingArtifact.setDescription( Messages.i18n.format("ARTIFACT_GROUPING_DESCRIPTION")); // $NON-NLS-1$ return client.createArtifact(groupingArtifact); } }
private void logStats(MavenSession session) { logger.info(chars('-', LINE_LENGTH)); Date finish = new Date(); long time = finish.getTime() - session.getRequest().getStartTime().getTime(); String wallClock = session.getRequest().isThreadConfigurationPresent() ? " (Wall Clock)" : ""; logger.info("Total time: " + getFormattedTime(time) + wallClock); logger.info("Finished at: " + finish); System.gc(); Runtime r = Runtime.getRuntime(); long MB = 1024 * 1024; logger.info( "Final Memory: " + (r.totalMemory() - r.freeMemory()) / MB + "M/" + r.totalMemory() / MB + "M"); }
private void multiThreadedProjectTaskSegmentBuild( ConcurrencyDependencyGraph analyzer, ReactorContext reactorContext, MavenSession rootSession, CompletionService<ProjectSegment> service, TaskSegment taskSegment, Map<MavenProject, ProjectSegment> projectBuildList, ThreadOutputMuxer muxer) { // schedule independent projects for (MavenProject mavenProject : analyzer.getRootSchedulableBuilds()) { ProjectSegment projectSegment = projectBuildList.get(mavenProject); logger.debug("Scheduling: " + projectSegment.getProject()); Callable<ProjectSegment> cb = createBuildCallable(rootSession, projectSegment, reactorContext, taskSegment, muxer); service.submit(cb); } // for each finished project for (int i = 0; i < analyzer.getNumberOfBuilds(); i++) { try { ProjectSegment projectBuild = service.take().get(); if (reactorContext.getReactorBuildStatus().isHalted()) { break; } final List<MavenProject> newItemsThatCanBeBuilt = analyzer.markAsFinished(projectBuild.getProject()); for (MavenProject mavenProject : newItemsThatCanBeBuilt) { ProjectSegment scheduledDependent = projectBuildList.get(mavenProject); logger.debug("Scheduling: " + scheduledDependent); Callable<ProjectSegment> cb = createBuildCallable( rootSession, scheduledDependent, reactorContext, taskSegment, muxer); service.submit(cb); } } catch (InterruptedException e) { rootSession.getResult().addException(e); break; } catch (ExecutionException e) { // TODO MNG-5766 changes likely made this redundant rootSession.getResult().addException(e); break; } } // cancel outstanding builds (if any) - this can happen if an exception is thrown in above // block Future<ProjectSegment> unprocessed; while ((unprocessed = service.poll()) != null) { try { unprocessed.get(); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } }
@Override public void mojoSkipped(ExecutionEvent event) { if (logger.isWarnEnabled()) { logger.warn( "Goal " + event.getMojoExecution().getGoal() + " requires online mode for execution but Maven is currently offline, skipping"); } }
private void logResult(MavenSession session) { logger.info(chars('-', LINE_LENGTH)); if (session.getResult().hasExceptions()) { logger.info("BUILD FAILURE"); } else { logger.info("BUILD SUCCESS"); } }
@Override public void close() throws Exception { super.close(); // Displaying the events logger.info(" ** Events received by m2github ** "); for (String className : classNames.keySet()) { logger.info(" ** " + className + " : " + classNames.get(className)); } }
@Override public void projectStarted(ExecutionEvent event) { if (logger.isInfoEnabled()) { logger.info(chars(' ', LINE_LENGTH)); logger.info(chars('-', LINE_LENGTH)); logger.info( "Building " + event.getProject().getName() + " " + event.getProject().getVersion()); logger.info(chars('-', LINE_LENGTH)); } }
@Override public void projectSkipped(ExecutionEvent event) { if (logger.isInfoEnabled()) { logger.info(chars(' ', LINE_LENGTH)); logger.info(chars('-', LINE_LENGTH)); logger.info("Skipping " + event.getProject().getName()); logger.info("This project has been banned from the build due to previous failures."); logger.info(chars('-', LINE_LENGTH)); } }
@Override public void forkedProjectStarted(ExecutionEvent event) { if (logger.isInfoEnabled() && event.getMojoExecution().getForkedExecutions().size() > 1) { logger.info(chars(' ', LINE_LENGTH)); logger.info(chars('>', LINE_LENGTH)); logger.info( "Forking " + event.getProject().getName() + " " + event.getProject().getVersion()); logger.info(chars('>', LINE_LENGTH)); } }
/** Verify operation manually. */ public static void main(String[] args) throws Exception { final Logger log = logger(); final RepositorySystem system = newSystem(); final RepositorySystemSession session = newSession(system); // String uri = "jmock:jmock:pom:1.1.0"; String uri = "org.apache.maven:maven-profile:2.2.1"; final Artifact artifact = new DefaultArtifact(uri); final Dependency dependency = new Dependency(artifact, "compile"); final RemoteRepository central = newRepoRemote(); final CollectRequest collectRequest = new CollectRequest(); collectRequest.setRoot(dependency); collectRequest.addRepository(central); final DependencyNode collectNode = system.collectDependencies(session, collectRequest).getRoot(); final List<String> include = new ArrayList<String>(); final List<String> exclude = new ArrayList<String>(); final DependencyFilter filter = new ScopeDependencyFilter(include, exclude); final DependencyRequest dependencyRequest = new DependencyRequest(collectNode, filter); final DependencyResult result = system.resolveDependencies(session, dependencyRequest); final DependencyNode resolveNode = result.getRoot(); final PreorderNodeListGenerator generator = new PreorderNodeListGenerator(); resolveNode.accept(generator); final String[] pathArray = generator.getClassPath().split(File.pathSeparator); for (String path : pathArray) { log.info("path = " + path); } // final MavenProject project = newProject("org.apache.maven:maven-model:pom:3.0"); log.info("project = " + project); }
@Override public void sessionStarted(ExecutionEvent event) { if (logger.isInfoEnabled() && event.getSession().getProjects().size() > 1) { logger.info(chars('-', LINE_LENGTH)); logger.info("Reactor Build Order:"); logger.info(""); for (MavenProject project : event.getSession().getProjects()) { logger.info(project.getName()); } } }
@Override public void sessionEnded(ExecutionEvent event) { if (logger.isInfoEnabled()) { if (event.getSession().getProjects().size() > 1) { logReactorSummary(event.getSession()); } logResult(event.getSession()); logStats(event.getSession()); logger.info(chars('-', LINE_LENGTH)); } }
@Override public void forkSucceeded(ExecutionEvent event) { if (logger.isInfoEnabled()) { StringBuilder buffer = new StringBuilder(128); buffer.append("<<< "); append(buffer, event.getMojoExecution()); append(buffer, event.getProject()); buffer.append(" <<<"); logger.info(""); logger.info(buffer.toString()); } }
private void setDisableP2Mirrors(TargetPlatformConfiguration result, Xpp3Dom configuration) { Xpp3Dom disableP2mirrorsDom = configuration.getChild("disableP2Mirrors"); if (disableP2mirrorsDom != null) { logger.warn( "Unsupported target-platform-configuration <disableP2Mirrors>. Use tycho.disableP2Mirrors -D command line parameter or settings.xml property."); } }
@Override @OutboundActionMeta(name = "batch") public void handleOutbound(Context ctx) throws ServletException, IOException { Payload payload = ctx.getPayload(); HttpServletRequest request = ctx.getHttpServletRequest(); HttpServletResponse response = ctx.getHttpServletResponse(); String userIp = m_util.getRemoteIp(request); String version = payload.getVersion(); boolean success = true; if (userIp != null) { if ("1".equals(version)) { processVersion1(payload, request, userIp); } else if ("2".equals(version)) { processVersion2(payload, request, userIp); } else { success = false; Cat.logEvent("InvalidVersion", version, Event.SUCCESS, version); } } else { success = false; Cat.logEvent("unknownIp", "batch", Event.SUCCESS, null); m_logger.info( "unknown http request, x-forwarded-for:" + request.getHeader("x-forwarded-for")); } if (success) { response.getWriter().write("OK"); } else { response.getWriter().write("validate request!"); } }
@Override public void run() { m_active = true; while (m_active) { ChannelFuture channel = m_manager.channel(); if (channel != null && checkWritable(channel)) { try { MessageTree tree = m_queue.poll(); if (tree != null) { sendInternal(tree); tree.setMessage(null); } } catch (Throwable t) { m_logger.error("Error when sending message over TCP socket!", t); } } else { try { Thread.sleep(5); } catch (Exception e) { // ignore it m_active = false; } } } }
@Override public void run() { try { m_channelManager = new ClientChannelManager(); long expireTime = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(1); while (m_channelManager.getActiveChannel() == null && System.currentTimeMillis() < expireTime) { TimeUnit.MILLISECONDS.sleep(1); } m_warmup.countDown(); run0(); } catch (Throwable e) { m_logger.error(e.getMessage(), e); m_warmup.countDown(); } finally { if (m_channelManager != null) { m_channelManager.close(); } m_latch.countDown(); } }
/** * {@inheritDoc} * * @see org.codehaus.mojo.javascript.assembler.AssemblerReader#getAssembler(java.io.File) */ public Assembler getAssembler(File file) throws Exception { AssemblerXpp3Reader reader = new AssemblerXpp3Reader(); try { logger.info("Reading assembler descriptor " + file.getAbsolutePath()); return reader.read(new FileReader(file)); } catch (Exception e) { throw new MojoExecutionException("Failed to read the script assembler descriptor", e); } }
private List<ClassRealmManagerDelegate> getDelegates() { try { return container.lookupList(ClassRealmManagerDelegate.class); } catch (ComponentLookupException e) { logger.error("Failed to lookup class realm delegates: " + e.getMessage(), e); return Collections.emptyList(); } }
protected TargetEnvironment getDeprecatedTargetEnvironment(Xpp3Dom configuration) { Xpp3Dom environmentDom = configuration.getChild("environment"); if (environmentDom != null) { logger.warn( "target-platform-configuration <environment> element is deprecated; use <environments> instead"); return newTargetEnvironment(environmentDom); } return null; }
/** * Common put implementation. Handles firing events and ultimately sending the data via the s-ramp * client. * * @param resource * @param source * @param content * @throws TransferFailedException * @throws ResourceDoesNotExistException * @throws AuthorizationException */ private void putCommon(Resource resource, File source, InputStream content) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException { logger.info(Messages.i18n.format("UPLOADING_TO_SRAMP", resource.getName())); // $NON-NLS-1$ firePutInitiated(resource, source); firePutStarted(resource, source); if (resource.getName().contains("maven-metadata.xml")) { // $NON-NLS-1$ logger.info(Messages.i18n.format("SKIPPING_ARTY", resource.getName())); // $NON-NLS-1$ try { transfer(resource, content, new DevNullOutputStream(), TransferEvent.REQUEST_PUT); } catch (IOException e) { throw new TransferFailedException(e.getMessage(), e); } } else { doPut(resource, content); } firePutCompleted(resource, source); }
protected void removeRepositoryForGroupId( String groupId, ManagedRepository repository, AuthorizationManager authorizationManager, Map<String, Privilege> existingPrivs) throws NoSuchPrivilegeException { final Logger logger = getLogger(); for (final String method : PRIVILEGE_METHODS) { final String name = createPrivilegeName(repository, groupId, method); final Privilege priv = existingPrivs.remove(name); if (priv != null) { authorizationManager.deletePrivilege(priv.getId()); logger.info("Deleted privilege: " + priv.getName()); } } }
/** Logic shared when adding new managed GroupIds and Repositories. */ protected void addRepositoryForGroupId( String groupId, ManagedRepository repository, Target managedTarget, AuthorizationManager authorizationManager, Set<String> deployerPrivs, Set<String> readOnlyPrivs, Map<String, Privilege> existingPrivs) throws InvalidConfigurationException { final Logger logger = this.getLogger(); for (final String method : PRIVILEGE_METHODS) { final String name = createPrivilegeName(repository, groupId, method); // Check for existing priv before creating a new one Privilege priv = existingPrivs.get(name); if (priv == null) { priv = new Privilege(); logger.info("Creating new privilege: " + name); } else { logger.info("Updating existing privilege: " + name); } priv.setName(name); priv.setDescription(priv.getName()); priv.setType(TargetPrivilegeDescriptor.TYPE); priv.addProperty(ApplicationPrivilegeMethodPropertyDescriptor.ID, method); priv.addProperty(TargetPrivilegeRepositoryTargetPropertyDescriptor.ID, managedTarget.getId()); priv.addProperty(TargetPrivilegeRepositoryPropertyDescriptor.ID, repository.getId()); // Store, capturing updated reference priv = authorizationManager.addPrivilege(priv); // Build up the priv lists if (DEPLOYER_METHODS.contains(method)) { deployerPrivs.add(priv.getId()); } if (READONLY_METHODS.contains(method)) { readOnlyPrivs.add(priv.getId()); } } }
/** * Grab lists of all root-level files and all directories contained in the given archive. * * @param file . * @param files . * @param dirs . * @throws java.io.IOException . */ protected static void grabFilesAndDirs(String file, List<String> dirs, List<String> files) throws IOException { File zipFile = new File(file); if (!zipFile.exists()) { Logger logger = new ConsoleLogger(Logger.LEVEL_INFO, "console"); logger.error("JarArchive skipping non-existing file: " + zipFile.getAbsolutePath()); } else if (zipFile.isDirectory()) { Logger logger = new ConsoleLogger(Logger.LEVEL_INFO, "console"); logger.info("JarArchiver skipping indexJar " + zipFile + " because it is not a jar"); } else { org.apache.commons.compress.archivers.zip.ZipFile zf = null; try { zf = new org.apache.commons.compress.archivers.zip.ZipFile(file, "utf-8"); Enumeration<ZipArchiveEntry> entries = zf.getEntries(); HashSet<String> dirSet = new HashSet<String>(); while (entries.hasMoreElements()) { ZipArchiveEntry ze = entries.nextElement(); String name = ze.getName(); // avoid index for manifest-only jars. if (!name.equals(META_INF_NAME) && !name.equals(META_INF_NAME + '/') && !name.equals(INDEX_NAME) && !name.equals(MANIFEST_NAME)) { if (ze.isDirectory()) { dirSet.add(name); } else if (!name.contains("/")) { files.add(name); } else { // a file, not in the root // since the jar may be one without directory // entries, add the parent dir of this file as // well. dirSet.add(name.substring(0, name.lastIndexOf("/") + 1)); } } } dirs.addAll(dirSet); } finally { if (zf != null) { zf.close(); } } } }
public Set<Plugin> getPluginsBoundByDefaultToAllLifecycles(String packaging) { if (logger.isDebugEnabled()) { logger.debug( "Looking up lifecyle mappings for packaging " + packaging + " from " + Thread.currentThread().getContextClassLoader()); } LifecycleMapping lifecycleMappingForPackaging = lifecycleMappings.get(packaging); if (lifecycleMappingForPackaging == null) { return null; } Map<Plugin, Plugin> plugins = new LinkedHashMap<>(); for (Lifecycle lifecycle : getOrderedLifecycles()) { org.apache.maven.lifecycle.mapping.Lifecycle lifecycleConfiguration = lifecycleMappingForPackaging.getLifecycles().get(lifecycle.getId()); Map<String, LifecyclePhase> phaseToGoalMapping = null; if (lifecycleConfiguration != null) { phaseToGoalMapping = lifecycleConfiguration.getPhases(); } else if (lifecycle.getDefaultPhases() != null) { phaseToGoalMapping = lifecycle.getDefaultPhases(); } if (phaseToGoalMapping != null) { for (Map.Entry<String, LifecyclePhase> goalsForLifecyclePhase : phaseToGoalMapping.entrySet()) { String phase = goalsForLifecyclePhase.getKey(); LifecyclePhase goals = goalsForLifecyclePhase.getValue(); if (goals != null) { parseLifecyclePhaseDefinitions(plugins, phase, goals); } } } } return plugins.keySet(); }
protected void sendStatus(String label, GitHubStatus status) { logger.info(" ** " + label + " : " + status + " ** "); HttpPost httpPostRequest = new HttpPost(githubEndpoint); try { String payload = String.format( "{\"state\": \"%s\", \"target_url\": \"%s\", \"description\": \"%s\", \"context\": \"%s\"}", status, "http://github.com", "This is a meaningful description", label); StringEntity params = new StringEntity(payload); httpPostRequest.addHeader("content-type", "application/json"); httpPostRequest.addHeader("Authorization", "token " + githubToken); RequestConfig requestConfig = RequestConfig.custom() .setSocketTimeout(5000) .setConnectTimeout(5000) .setConnectionRequestTimeout(5000) .build(); httpPostRequest.setConfig(requestConfig); httpPostRequest.setEntity(params); HttpResponse response = httpClient.execute(httpPostRequest); if (response.getStatusLine().getStatusCode() >= 300) { logger.error(response.getStatusLine().toString()); } logger.info(response.getStatusLine().toString()); } catch (Exception e) { logger.error(e.getMessage()); e.printStackTrace(); } finally { httpPostRequest.releaseConnection(); } // post.s }
private void startCourt(ThreadContext ctx) throws HttpException, IOException { Session session = ctx.getSession(); ThreadHelper.setRandom(ctx); try { ThreadHelper.executeRequest(session, m_stateCourtRequest, true); m_logger.info("овдирямЙЁи"); } catch (ThreadException e) { // ignore it } }
public void execute(final Archiver archiver, final AssemblerConfigurationSource configSource) throws ArchiveCreationException, AssemblyFormattingException, InvalidAssemblerConfigurationException { if ((dependencySets == null) || dependencySets.isEmpty()) { logger.debug("No dependency sets specified."); return; } @SuppressWarnings("unchecked") final List<Dependency> deps = project.getDependencies(); if ((deps == null) || deps.isEmpty()) { logger.debug( "Project " + project.getId() + " has no dependencies. Skipping dependency set addition."); } for (final Iterator<DependencySet> i = dependencySets.iterator(); i.hasNext(); ) { final DependencySet dependencySet = i.next(); addDependencySet(dependencySet, archiver, configSource); } }