private void logStats(MavenSession session) { logger.info(chars('-', LINE_LENGTH)); Date finish = new Date(); long time = finish.getTime() - session.getRequest().getStartTime().getTime(); String wallClock = session.getRequest().isThreadConfigurationPresent() ? " (Wall Clock)" : ""; logger.info("Total time: " + getFormattedTime(time) + wallClock); logger.info("Finished at: " + finish); System.gc(); Runtime r = Runtime.getRuntime(); long MB = 1024 * 1024; logger.info( "Final Memory: " + (r.totalMemory() - r.freeMemory()) / MB + "M/" + r.totalMemory() / MB + "M"); }
private void logResult(MavenSession session) { logger.info(chars('-', LINE_LENGTH)); if (session.getResult().hasExceptions()) { logger.info("BUILD FAILURE"); } else { logger.info("BUILD SUCCESS"); } }
@Override public void close() throws Exception { super.close(); // Displaying the events logger.info(" ** Events received by m2github ** "); for (String className : classNames.keySet()) { logger.info(" ** " + className + " : " + classNames.get(className)); } }
@Override public void projectSkipped(ExecutionEvent event) { if (logger.isInfoEnabled()) { logger.info(chars(' ', LINE_LENGTH)); logger.info(chars('-', LINE_LENGTH)); logger.info("Skipping " + event.getProject().getName()); logger.info("This project has been banned from the build due to previous failures."); logger.info(chars('-', LINE_LENGTH)); } }
@Override public void projectStarted(ExecutionEvent event) { if (logger.isInfoEnabled()) { logger.info(chars(' ', LINE_LENGTH)); logger.info(chars('-', LINE_LENGTH)); logger.info( "Building " + event.getProject().getName() + " " + event.getProject().getVersion()); logger.info(chars('-', LINE_LENGTH)); } }
@Override public void forkedProjectStarted(ExecutionEvent event) { if (logger.isInfoEnabled() && event.getMojoExecution().getForkedExecutions().size() > 1) { logger.info(chars(' ', LINE_LENGTH)); logger.info(chars('>', LINE_LENGTH)); logger.info( "Forking " + event.getProject().getName() + " " + event.getProject().getVersion()); logger.info(chars('>', LINE_LENGTH)); } }
/** Verify operation manually. */ public static void main(String[] args) throws Exception { final Logger log = logger(); final RepositorySystem system = newSystem(); final RepositorySystemSession session = newSession(system); // String uri = "jmock:jmock:pom:1.1.0"; String uri = "org.apache.maven:maven-profile:2.2.1"; final Artifact artifact = new DefaultArtifact(uri); final Dependency dependency = new Dependency(artifact, "compile"); final RemoteRepository central = newRepoRemote(); final CollectRequest collectRequest = new CollectRequest(); collectRequest.setRoot(dependency); collectRequest.addRepository(central); final DependencyNode collectNode = system.collectDependencies(session, collectRequest).getRoot(); final List<String> include = new ArrayList<String>(); final List<String> exclude = new ArrayList<String>(); final DependencyFilter filter = new ScopeDependencyFilter(include, exclude); final DependencyRequest dependencyRequest = new DependencyRequest(collectNode, filter); final DependencyResult result = system.resolveDependencies(session, dependencyRequest); final DependencyNode resolveNode = result.getRoot(); final PreorderNodeListGenerator generator = new PreorderNodeListGenerator(); resolveNode.accept(generator); final String[] pathArray = generator.getClassPath().split(File.pathSeparator); for (String path : pathArray) { log.info("path = " + path); } // final MavenProject project = newProject("org.apache.maven:maven-model:pom:3.0"); log.info("project = " + project); }
@Override public void forkSucceeded(ExecutionEvent event) { if (logger.isInfoEnabled()) { StringBuilder buffer = new StringBuilder(128); buffer.append("<<< "); append(buffer, event.getMojoExecution()); append(buffer, event.getProject()); buffer.append(" <<<"); logger.info(""); logger.info(buffer.toString()); } }
@Override public void sessionStarted(ExecutionEvent event) { if (logger.isInfoEnabled() && event.getSession().getProjects().size() > 1) { logger.info(chars('-', LINE_LENGTH)); logger.info("Reactor Build Order:"); logger.info(""); for (MavenProject project : event.getSession().getProjects()) { logger.info(project.getName()); } } }
/** * Retrieves dependency information from Spring XML configuration files in a Maven project. * * @param project the project to analyze * @param dependentClasses A set of classes that already had their dependencies analyzed. This * method will <b>ADD</b> all Spring-induced dependencies to this set and also use it to * determine whether a given class needs to have it's dependencies analyzed. * @throws Exception */ public void addSpringDependencyClasses(MavenProject project, final Set<String> dependentClasses) throws Exception { final SpringFileBeanVisitor beanVisitor = new DefaultSpringXmlBeanVisitor(this.resolver, dependentClasses); for (File springXml : fileLocator.locateSpringXmls(project)) { final BufferedInputStream in = new BufferedInputStream(new FileInputStream(springXml)); try { fileParser.parse(in, beanVisitor); if (log != null && log.isInfoEnabled()) { log.info("Scanned Spring XML " + springXml.getPath()); } } catch (NoSpringXmlException ex) { if (log != null && log.isDebugEnabled()) { log.debug("Not a Spring XML file : " + springXml.getPath()); } // ok } catch (Exception e) { if (log != null) { log.error("Failed to parse Spring XML " + springXml.getPath() + " ...", e); } throw e; } finally { in.close(); } } }
@Override @OutboundActionMeta(name = "batch") public void handleOutbound(Context ctx) throws ServletException, IOException { Payload payload = ctx.getPayload(); HttpServletRequest request = ctx.getHttpServletRequest(); HttpServletResponse response = ctx.getHttpServletResponse(); String userIp = m_util.getRemoteIp(request); String version = payload.getVersion(); boolean success = true; if (userIp != null) { if ("1".equals(version)) { processVersion1(payload, request, userIp); } else if ("2".equals(version)) { processVersion2(payload, request, userIp); } else { success = false; Cat.logEvent("InvalidVersion", version, Event.SUCCESS, version); } } else { success = false; Cat.logEvent("unknownIp", "batch", Event.SUCCESS, null); m_logger.info( "unknown http request, x-forwarded-for:" + request.getHeader("x-forwarded-for")); } if (success) { response.getWriter().write("OK"); } else { response.getWriter().write("validate request!"); } }
/** * {@inheritDoc} * * @see org.codehaus.mojo.javascript.assembler.AssemblerReader#getAssembler(java.io.File) */ public Assembler getAssembler(File file) throws Exception { AssemblerXpp3Reader reader = new AssemblerXpp3Reader(); try { logger.info("Reading assembler descriptor " + file.getAbsolutePath()); return reader.read(new FileReader(file)); } catch (Exception e) { throw new MojoExecutionException("Failed to read the script assembler descriptor", e); } }
/** * Common put implementation. Handles firing events and ultimately sending the data via the s-ramp * client. * * @param resource * @param source * @param content * @throws TransferFailedException * @throws ResourceDoesNotExistException * @throws AuthorizationException */ private void putCommon(Resource resource, File source, InputStream content) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException { logger.info(Messages.i18n.format("UPLOADING_TO_SRAMP", resource.getName())); // $NON-NLS-1$ firePutInitiated(resource, source); firePutStarted(resource, source); if (resource.getName().contains("maven-metadata.xml")) { // $NON-NLS-1$ logger.info(Messages.i18n.format("SKIPPING_ARTY", resource.getName())); // $NON-NLS-1$ try { transfer(resource, content, new DevNullOutputStream(), TransferEvent.REQUEST_PUT); } catch (IOException e) { throw new TransferFailedException(e.getMessage(), e); } } else { doPut(resource, content); } firePutCompleted(resource, source); }
/** Logic shared when adding new managed GroupIds and Repositories. */ protected void addRepositoryForGroupId( String groupId, ManagedRepository repository, Target managedTarget, AuthorizationManager authorizationManager, Set<String> deployerPrivs, Set<String> readOnlyPrivs, Map<String, Privilege> existingPrivs) throws InvalidConfigurationException { final Logger logger = this.getLogger(); for (final String method : PRIVILEGE_METHODS) { final String name = createPrivilegeName(repository, groupId, method); // Check for existing priv before creating a new one Privilege priv = existingPrivs.get(name); if (priv == null) { priv = new Privilege(); logger.info("Creating new privilege: " + name); } else { logger.info("Updating existing privilege: " + name); } priv.setName(name); priv.setDescription(priv.getName()); priv.setType(TargetPrivilegeDescriptor.TYPE); priv.addProperty(ApplicationPrivilegeMethodPropertyDescriptor.ID, method); priv.addProperty(TargetPrivilegeRepositoryTargetPropertyDescriptor.ID, managedTarget.getId()); priv.addProperty(TargetPrivilegeRepositoryPropertyDescriptor.ID, repository.getId()); // Store, capturing updated reference priv = authorizationManager.addPrivilege(priv); // Build up the priv lists if (DEPLOYER_METHODS.contains(method)) { deployerPrivs.add(priv.getId()); } if (READONLY_METHODS.contains(method)) { readOnlyPrivs.add(priv.getId()); } } }
protected void sendStatus(String label, GitHubStatus status) { logger.info(" ** " + label + " : " + status + " ** "); HttpPost httpPostRequest = new HttpPost(githubEndpoint); try { String payload = String.format( "{\"state\": \"%s\", \"target_url\": \"%s\", \"description\": \"%s\", \"context\": \"%s\"}", status, "http://github.com", "This is a meaningful description", label); StringEntity params = new StringEntity(payload); httpPostRequest.addHeader("content-type", "application/json"); httpPostRequest.addHeader("Authorization", "token " + githubToken); RequestConfig requestConfig = RequestConfig.custom() .setSocketTimeout(5000) .setConnectTimeout(5000) .setConnectionRequestTimeout(5000) .build(); httpPostRequest.setConfig(requestConfig); httpPostRequest.setEntity(params); HttpResponse response = httpClient.execute(httpPostRequest); if (response.getStatusLine().getStatusCode() >= 300) { logger.error(response.getStatusLine().toString()); } logger.info(response.getStatusLine().toString()); } catch (Exception e) { logger.error(e.getMessage()); e.printStackTrace(); } finally { httpPostRequest.releaseConnection(); } // post.s }
private void startCourt(ThreadContext ctx) throws HttpException, IOException { Session session = ctx.getSession(); ThreadHelper.setRandom(ctx); try { ThreadHelper.executeRequest(session, m_stateCourtRequest, true); m_logger.info("овдирямЙЁи"); } catch (ThreadException e) { // ignore it } }
@Override public void removeManagedGroupId(String groupId) throws NoSuchAuthorizationManagerException, NoSuchPrivilegeException, NoSuchRoleException, IOException { final Logger logger = getLogger(); final AuthorizationManager authorizationManager = this.securitySystem.getAuthorizationManager(SECURITY_CONTEXT); // Assumes priv name is unique final Map<String, Privilege> existingPrivs = new HashMap<String, Privilege>(); for (final Privilege priv : authorizationManager.listPrivileges()) { existingPrivs.put(priv.getName(), priv); } /* * Deletes privs */ final ManagedRepositories managedRepositoriesObj = this.getManagedRepositories(); for (final ManagedRepository repository : managedRepositoriesObj.getManagedRepositories()) { removeRepositoryForGroupId(groupId, repository, authorizationManager, existingPrivs); } // Delete roles final String deployerRoleId = this.createRoleId(groupId, DEPLOYER_ROLE_SUFFIX); authorizationManager.deleteRole(deployerRoleId); logger.info("Deleted role: " + deployerRoleId); final String readOnlyRoleId = this.createRoleId(groupId, READONLY_ROLE_SUFFIX); authorizationManager.deleteRole(readOnlyRoleId); logger.info("Deleted role: " + readOnlyRoleId); // delete the repository target final String targetId = GIDM_ID_PREFIX + groupId; this.targetRegistry.removeRepositoryTarget(targetId); logger.info("Deleted repository target: " + targetId); this.nexusConfiguration.saveConfiguration(); }
@Override public void sessionEnded(ExecutionEvent event) { if (logger.isInfoEnabled()) { if (event.getSession().getProjects().size() > 1) { logReactorSummary(event.getSession()); } logResult(event.getSession()); logStats(event.getSession()); logger.info(chars('-', LINE_LENGTH)); } }
private void logReactorSummary(MavenSession session) { logger.info(chars('-', LINE_LENGTH)); logger.info("Reactor Summary:"); logger.info(""); MavenExecutionResult result = session.getResult(); for (MavenProject project : session.getProjects()) { StringBuilder buffer = new StringBuilder(128); buffer.append(project.getName()); buffer.append(' '); while (buffer.length() < LINE_LENGTH - 21) { buffer.append('.'); } buffer.append(' '); BuildSummary buildSummary = result.getBuildSummary(project); if (buildSummary == null) { buffer.append("SKIPPED"); } else if (buildSummary instanceof BuildSuccess) { buffer.append("SUCCESS ["); buffer.append(getFormattedTime(buildSummary.getTime())); buffer.append("]"); } else if (buildSummary instanceof BuildFailure) { buffer.append("FAILURE ["); buffer.append(getFormattedTime(buildSummary.getTime())); buffer.append("]"); } logger.info(buffer.toString()); } }
protected void removeRepositoryForGroupId( String groupId, ManagedRepository repository, AuthorizationManager authorizationManager, Map<String, Privilege> existingPrivs) throws NoSuchPrivilegeException { final Logger logger = getLogger(); for (final String method : PRIVILEGE_METHODS) { final String name = createPrivilegeName(repository, groupId, method); final Privilege priv = existingPrivs.remove(name); if (priv != null) { authorizationManager.deletePrivilege(priv.getId()); logger.info("Deleted privilege: " + priv.getName()); } } }
/** * Grab lists of all root-level files and all directories contained in the given archive. * * @param file . * @param files . * @param dirs . * @throws java.io.IOException . */ protected static void grabFilesAndDirs(String file, List<String> dirs, List<String> files) throws IOException { File zipFile = new File(file); if (!zipFile.exists()) { Logger logger = new ConsoleLogger(Logger.LEVEL_INFO, "console"); logger.error("JarArchive skipping non-existing file: " + zipFile.getAbsolutePath()); } else if (zipFile.isDirectory()) { Logger logger = new ConsoleLogger(Logger.LEVEL_INFO, "console"); logger.info("JarArchiver skipping indexJar " + zipFile + " because it is not a jar"); } else { org.apache.commons.compress.archivers.zip.ZipFile zf = null; try { zf = new org.apache.commons.compress.archivers.zip.ZipFile(file, "utf-8"); Enumeration<ZipArchiveEntry> entries = zf.getEntries(); HashSet<String> dirSet = new HashSet<String>(); while (entries.hasMoreElements()) { ZipArchiveEntry ze = entries.nextElement(); String name = ze.getName(); // avoid index for manifest-only jars. if (!name.equals(META_INF_NAME) && !name.equals(META_INF_NAME + '/') && !name.equals(INDEX_NAME) && !name.equals(MANIFEST_NAME)) { if (ze.isDirectory()) { dirSet.add(name); } else if (!name.contains("/")) { files.add(name); } else { // a file, not in the root // since the jar may be one without directory // entries, add the parent dir of this file as // well. dirSet.add(name.substring(0, name.lastIndexOf("/") + 1)); } } } dirs.addAll(dirSet); } finally { if (zf != null) { zf.close(); } } } }
public void delegate() { switch (level) { case LEVEL_DEBUG: logger.debug(message, cause); break; case LEVEL_INFO: logger.info(message, cause); break; case LEVEL_WARN: logger.warn(message, cause); break; case LEVEL_ERROR: logger.error(message, cause); break; case LEVEL_FATAL: logger.fatalError(message, cause); break; default: throw new Error(); } }
private void scan(Wagon wagon, String basePath, List<String> collected) { try { List<String> files = wagon.getFileList(basePath); if (files.isEmpty()) { collected.add(basePath); } else { basePath = basePath + "/"; for (String file : files) { logger.info("Found file in the source repository: " + file); scan(wagon, basePath + file, collected); } } } catch (TransferFailedException e) { throw new RuntimeException(e); } catch (ResourceDoesNotExistException e) { // is thrown when calling getFileList on a file collected.add(basePath); } catch (AuthorizationException e) { throw new RuntimeException(e); } }
/** * Updates an artifact by storing its hash value as an S-RAMP property. * * @param gavInfo * @param resourceInputStream * @throws TransferFailedException */ private void doPutHash(MavenGavInfo gavInfo, InputStream resourceInputStream) throws TransferFailedException { logger.info(Messages.i18n.format("STORING_HASH_AS_PROP", gavInfo.getName())); // $NON-NLS-1$ try { String artyPath = gavInfo.getFullName(); String hashPropName; if (gavInfo.getType().endsWith(".md5")) { // $NON-NLS-1$ hashPropName = "maven.hash.md5"; // $NON-NLS-1$ artyPath = artyPath.substring(0, artyPath.length() - 4); } else { hashPropName = "maven.hash.sha1"; // $NON-NLS-1$ artyPath = artyPath.substring(0, artyPath.length() - 5); } String hashValue = IOUtils.toString(resourceInputStream); // See the comment in {@link SrampWagon#fillInputData(InputData)} about why we're doing this // context classloader magic. ClassLoader oldCtxCL = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(SrampWagon.class.getClassLoader()); try { SrampArchiveEntry entry = this.archive.getEntry(artyPath); // Re-fetch the artifact meta-data in case it changed on the server since we uploaded it. BaseArtifactType metaData = client.getArtifactMetaData(entry.getMetaData().getUuid()); SrampModelUtils.setCustomProperty(metaData, hashPropName, hashValue); this.archive.updateEntry(entry, null); // The meta-data has been updated in the local/temp archive - now send it to the remote repo client.updateArtifactMetaData(metaData); } catch (Throwable t) { throw new TransferFailedException(t.getMessage(), t); } finally { Thread.currentThread().setContextClassLoader(oldCtxCL); } } catch (Exception e) { throw new TransferFailedException( Messages.i18n.format("FAILED_TO_STORE_HASH", gavInfo.getName()), e); // $NON-NLS-1$ } }
@Override public void addManagedGroupId(String groupId) throws ConfigurationException, IOException, NoSuchAuthorizationManagerException, NoSuchRoleException { final Logger logger = this.getLogger(); // Validate the groupId and convert it to a repo target pattern final String targetPattern = groupIdToTargetPattern(groupId); // Get or Create the Target and persist the changes final String targetId = GIDM_ID_PREFIX + groupId; Target managedTarget = this.targetRegistry.getRepositoryTarget(targetId); if (managedTarget == null) { // Just using the name as the id ... hope thats ok! managedTarget = new Target( targetId, GIDM_NAME_PREFIX + groupId, M2_CONTENT_CLASS, Collections.singleton(targetPattern)); logger.info("Created new repository target: " + managedTarget.getName()); } else { final Set<String> patternTexts = managedTarget.getPatternTexts(); patternTexts.clear(); patternTexts.add(targetPattern); logger.info("Updated existing repository target: " + managedTarget.getName()); } this.targetRegistry.addRepositoryTarget(managedTarget); final AuthorizationManager authorizationManager = this.securitySystem.getAuthorizationManager(SECURITY_CONTEXT); // Get or Create the deployer and readonly Roles, need these here to add the privs to them as // they are created in the next step final Role deployerRole = getOrCreateRole(authorizationManager, groupId, DEPLOYER_ROLE_SUFFIX); final Set<String> deployerPrivs = deployerRole.getPrivileges(); deployerPrivs.clear(); final Role readOnlyRole = getOrCreateRole(authorizationManager, groupId, READONLY_ROLE_SUFFIX); final Set<String> readOnlyPrivs = readOnlyRole.getPrivileges(); readOnlyPrivs.clear(); // Assumes priv name is unique final Map<String, Privilege> existingPrivs = new HashMap<String, Privilege>(); for (final Privilege priv : authorizationManager.listPrivileges()) { existingPrivs.put(priv.getName(), priv); } /* * Adds create/read privs for each managed repository */ final ManagedRepositories managedRepositoriesObj = this.getManagedRepositories(); for (final ManagedRepository repository : managedRepositoriesObj.getManagedRepositories()) { addRepositoryForGroupId( groupId, repository, managedTarget, authorizationManager, deployerPrivs, readOnlyPrivs, existingPrivs); } // Add the roles authorizationManager.updateRole(deployerRole); authorizationManager.updateRole(readOnlyRole); this.nexusConfiguration.saveConfiguration(); }
// Ignore @Override warning because this code must be Java 1.3 compatible. protected void doStartEvent(String eventName, String target, long timestamp) { logger.info("[" + target + "]"); transmitEvent( new AntEventSummary(Constants.TARGET_STARTED, "unspecified project", target, null, null)); }
protected void publishException(ActionInvocation invocation, ExceptionHolder exceptionHolder) { Throwable e = exceptionHolder.getException(); logger.info("Error ocurred during execution", e); super.publishException(invocation, exceptionHolder); }
@Override public void projectDiscoveryStarted(ExecutionEvent event) { if (logger.isInfoEnabled()) { logger.info("Scanning for projects..."); } }
@Override public void init(Context context) throws Exception { super.init(context); this.context = context; String githubEndpointPrefix = "https://api.github.com/"; String githubRepo; String sha; Properties userProperties = (Properties) context.getData().get("userProperties"); githubRepo = userProperties.getProperty("m2github.repo"); if (githubRepo == null) { logger.error("m2github - Missing property m2github.repo"); initError = true; } githubToken = userProperties.getProperty("m2github.token"); if (githubToken == null) { logger.error("m2github - Missing property m2github.token"); initError = true; } if (userProperties.getProperty("m2github.endpoint") != null) { githubEndpointPrefix = userProperties.getProperty("m2github.endpoint"); } // Need the current SHA Process gitProcess = Runtime.getRuntime().exec("git rev-parse HEAD"); gitProcess.waitFor(); BufferedReader reader = new BufferedReader(new InputStreamReader(gitProcess.getInputStream())); StringBuffer shaStringBuffer = new StringBuffer(); String line = ""; while ((line = reader.readLine()) != null) { shaStringBuffer.append(line); } sha = shaStringBuffer.toString(); if (sha == null) { logger.error("m2github - Couldn't figure out SHA1"); initError = true; } if (!initError) { try { githubEndpoint = githubEndpointPrefix + "/repos/" + githubRepo + "/statuses/" + sha; httpClient = HttpClients.createDefault(); if (httpClient == null) { logger.error(" ** m2github - Failed to initialize HTTP Client"); System.exit(1); } logger.info( " ** m2github - GitHub Event Spy succesfully initialized - Endpoint is " + githubEndpoint + " ** "); } catch (Exception e) { // TODO Auto-generated catch block logger.error(e.getMessage()); e.printStackTrace(); } } }
public void copy(Repository sourceRepository, Repository targetRepository, String version) throws WagonException, IOException { String prefix = "staging-plugin"; String fileName = prefix + "-" + version + ".zip"; String tempdir = System.getProperty("java.io.tmpdir"); logger.debug("Writing all output to " + tempdir); // Create the renameScript script String renameScriptName = prefix + "-" + version + "-rename.sh"; File renameScript = new File(tempdir, renameScriptName); // Work directory File basedir = new File(tempdir, prefix + "-" + version); FileUtils.deleteDirectory(basedir); basedir.mkdirs(); Wagon sourceWagon = wagonManager.getWagon(sourceRepository); AuthenticationInfo sourceAuth = wagonManager.getAuthenticationInfo(sourceRepository.getId()); sourceWagon.connect(sourceRepository, sourceAuth); logger.info("Looking for files in the source repository."); List<String> files = new ArrayList<String>(); scan(sourceWagon, "", files); logger.info("Downloading files from the source repository to: " + basedir); for (String s : files) { if (s.contains(".svn")) { continue; } File f = new File(basedir, s); FileUtils.mkdir(f.getParentFile().getAbsolutePath()); logger.info("Downloading file from the source repository: " + s); sourceWagon.get(s, f); } // ---------------------------------------------------------------------------- // Now all the files are present locally and now we are going to grab the // metadata files from the targetRepositoryUrl and pull those down locally // so that we can merge the metadata. // ---------------------------------------------------------------------------- logger.info("Downloading metadata from the target repository."); Wagon targetWagon = wagonManager.getWagon(targetRepository); if (!(targetWagon instanceof CommandExecutor)) { throw new CommandExecutionException( "Wagon class '" + targetWagon.getClass().getName() + "' in use for target repository is not a CommandExecutor"); } AuthenticationInfo targetAuth = wagonManager.getAuthenticationInfo(targetRepository.getId()); targetWagon.connect(targetRepository, targetAuth); PrintWriter rw = new PrintWriter(new FileWriter(renameScript)); File archive = new File(tempdir, fileName); for (String s : files) { if (s.startsWith("/")) { s = s.substring(1); } if (s.endsWith(MAVEN_METADATA)) { File emf = new File(basedir, s + IN_PROCESS_MARKER); try { targetWagon.get(s, emf); } catch (ResourceDoesNotExistException e) { // We don't have an equivalent on the targetRepositoryUrl side because we have something // new on the sourceRepositoryUrl side so just skip the metadata merging. continue; } try { mergeMetadata(emf); } catch (XmlPullParserException e) { throw new IOException("Metadata file is corrupt " + s + " Reason: " + e.getMessage()); } } } Set moveCommands = new TreeSet(); // ---------------------------------------------------------------------------- // Create the Zip file that we will deploy to the targetRepositoryUrl stage // ---------------------------------------------------------------------------- logger.info("Creating zip file."); OutputStream os = new FileOutputStream(archive); ZipOutputStream zos = new ZipOutputStream(os); scanDirectory(basedir, basedir, zos, version, moveCommands); // ---------------------------------------------------------------------------- // Create the renameScript script. This is as atomic as we can // ---------------------------------------------------------------------------- logger.info("Creating rename script."); for (Object moveCommand : moveCommands) { String s = (String) moveCommand; // We use an explicit unix '\n' line-ending here instead of using the println() method. // Using println() will cause files and folders to have a '\r' at the end if the plugin is run // on Windows. rw.print(s + "\n"); } IOUtil.close(rw); ZipEntry e = new ZipEntry(renameScript.getName()); zos.putNextEntry(e); InputStream is = new FileInputStream(renameScript); IOUtil.copy(is, zos); IOUtil.close(is); IOUtil.close(zos); sourceWagon.disconnect(); // Push the Zip to the target system logger.info("Uploading zip file to the target repository."); targetWagon.put(archive, fileName); logger.info("Unpacking zip file on the target machine."); String targetRepoBaseDirectory = targetRepository.getBasedir(); // We use the super quiet option here as all the noise seems to kill/stall the connection String command = "unzip -o -qq -d " + targetRepoBaseDirectory + " " + targetRepoBaseDirectory + "/" + fileName; ((CommandExecutor) targetWagon).executeCommand(command); logger.info("Deleting zip file from the target repository."); command = "rm -f " + targetRepoBaseDirectory + "/" + fileName; ((CommandExecutor) targetWagon).executeCommand(command); logger.info("Running rename script on the target machine."); command = "cd " + targetRepoBaseDirectory + "; sh " + renameScriptName; ((CommandExecutor) targetWagon).executeCommand(command); logger.info("Deleting rename script from the target repository."); command = "rm -f " + targetRepoBaseDirectory + "/" + renameScriptName; ((CommandExecutor) targetWagon).executeCommand(command); targetWagon.disconnect(); }