/** * Test to make sure an archive can be exported to file and all contents are correctly located. * * @throws Exception */ @Test public void testExportToExistingFileFails() throws IOException { log.info("testExportToExistingFileFails"); // Get a temp directory for the test final File tempDirectory = createTempDirectory("testExportToExistingFileFails"); // Get an archive instance final Archive<?> archive = createArchiveWithAssets(); // Export as File final File alreadyExists = new File(tempDirectory, archive.getName()); final OutputStream alreadyExistsOutputStream = new FileOutputStream(alreadyExists); alreadyExistsOutputStream.write(new byte[] {}); alreadyExistsOutputStream.close(); Assert.assertTrue( "The test setup is incorrect; an empty file should exist before writing the archive", alreadyExists.exists()); // Should fail, as we're not overwriting boolean gotExpectedException = false; try { this.exportAsFile(archive, alreadyExists, false); } catch (final FileExistsException fee) { gotExpectedException = true; } Assert.assertTrue( "Should get " + FileExistsException.class.getSimpleName() + " when exporting to an existing file when overwrite is false", gotExpectedException); }
private void testDeployment(final Archive<?> archive) throws IOException { final ModelControllerClient client = domainMasterLifecycleUtil.getDomainClient(); final ModelNode readServerSubsystems = Operations.createOperation( ClientConstants.READ_CHILDREN_NAMES_OPERATION, Operations.createAddress("host", "master", "server", "main-one")); readServerSubsystems.get(ClientConstants.CHILD_TYPE).set(ClientConstants.SUBSYSTEM); final String name = archive.getName(); // Deploy the archive execute( client, createDeployAddOperation(archive.as(ZipExporter.class).exportAsInputStream(), name, null)); Assert.assertTrue("Deployment " + name + " was not deployed.", hasDeployment(client, name)); // Validate the subsystem child names on a server ModelNode result = execute(client, readServerSubsystems); validateSubsystemModel("/host=master/server=main-one", result); // Fully replace the deployment, but with the 'enabled' flag set to false, triggering undeploy final Operation fullReplaceOp = createReplaceAndDisableOperation( archive.as(ZipExporter.class).exportAsInputStream(), name, null); execute(client, fullReplaceOp); // Below validates that WFCORE-1577 is fixed, the model should not be missing on the // /host=master/server=main-one or main-two // Validate the subsystem child names result = execute(client, readServerSubsystems); validateSubsystemModel("/host=master/server=main-one", result); }
/** * Export an archive. * * @param archive archive to export * @throws IOException if an I/O error occurs */ protected static void exportArchive(Archive<?> archive) throws IOException { OutputStream out = new FileOutputStream("target/" + archive.getName() + ".zip"); archive.as(ZipExporter.class).exportTo(out); out.close(); }
/* (non-Javadoc) * @see org.jboss.arquillian.spi.DeploymentPackager#generateDeployment(org.jboss.arquillian.spi.TestDeployment) */ public Archive<?> generateDeployment( TestDeployment testDeployment, Collection<ProtocolArchiveProcessor> processors) { WebArchive protocol = new ProtocolDeploymentAppender().createAuxiliaryArchive(); Archive<?> applicationArchive = testDeployment.getApplicationArchive(); Collection<Archive<?>> auxiliaryArchives = testDeployment.getAuxiliaryArchives(); Processor processor = new Processor(testDeployment, processors); if (Validate.isArchiveOfType(EnterpriseArchive.class, applicationArchive)) { return handleArchive( applicationArchive.as(EnterpriseArchive.class), auxiliaryArchives, protocol, processor, testDeployment); } if (Validate.isArchiveOfType(WebArchive.class, applicationArchive)) { return handleArchive( applicationArchive.as(WebArchive.class), auxiliaryArchives, protocol, processor); } if (Validate.isArchiveOfType(JavaArchive.class, applicationArchive)) { return handleArchive( applicationArchive.as(JavaArchive.class), auxiliaryArchives, protocol, processor); } throw new IllegalArgumentException( ServletProtocolDeploymentPackager.class.getName() + " can not handle archive of type " + applicationArchive.getClass().getName()); }
@Test(expected = ArchiveExportException.class) public void testExportThrowsArchiveExceptionOnAssetWriteFailure() throws IOException { log.info("testExportThrowsArchiveExceptionOnAssetWriteFailure"); Archive<?> archive = createArchiveWithAssets(); // Check if a the path already contains a node so we remove it from the parent's children if (archive.contains(PATH_ONE)) { archive.delete(PATH_ONE); } archive.add( new Asset() { @Override public InputStream openStream() { throw new RuntimeException("Mock Exception from an Asset write"); } }, PATH_ONE); // Export final InputStream in = this.exportAsInputStream(archive); // Read in the full content (to in turn empty the underlying buffer and ensure we complete) final OutputStream sink = new OutputStream() { @Override public void write(int b) throws IOException {} }; IOUtil.copyWithClose(in, sink); }
/** removes test class from web archive (test class will be replaced by transformed assertions) */ private void removeTestClassFromDeployment(Archive<?> archive, TestClass testClass) { for (ArchivePath archivePath : archive.getContent().keySet()) { String path = archivePath.get(); String classPath = testClass.getName().replace(".", "/"); // remove TestClass and its anonymous classes - do not remove static inner classes if (path.matches("/WEB-INF/classes/" + classPath + "(\\$[0-9]*)?\\.class")) { archive.delete(archivePath); } } }
protected ZipExporterDelegate(final Archive<?> archive, final boolean compressed) { super(archive); this.compressed = compressed; // Precondition check if (archive.getContent().isEmpty()) { throw new IllegalArgumentException( "[SHRINKWRAP-93] Cannot use this JDK-based implementation to export as ZIP an archive with no content: " + archive.toString()); } }
/* (non-Javadoc) * @see org.jboss.arquillian.spi.client.container.DeployableContainer#deploy(org.jboss.arquillian.spi.client.deployment.Deployment[]) */ @Override public ProtocolMetaData deploy(final Archive<?> archive) throws DeploymentException { String deploymentName = archive.getName(); URL deploymentUrl = ShrinkWrapUtil.toURL(archive); deploy(deploymentName, deploymentUrl); try { return ManagementViewParser.parse( archive.getName(), (ProfileService) new InitialContext().lookup("ProfileService")); } catch (Exception e) { throw new DeploymentException("Could not extract deployment metadata", e); } }
private Archive<?> handleArchive( EnterpriseArchive applicationArchive, Collection<Archive<?>> auxiliaryArchives, JavaArchive protocol, Processor processor) { Map<ArchivePath, Node> applicationArchiveWars = applicationArchive.getContent(Filters.include(".*\\.war")); if (applicationArchiveWars.size() == 1) { ArchivePath warPath = applicationArchiveWars.keySet().iterator().next(); try { handleArchive( applicationArchive.getAsType(WebArchive.class, warPath), new ArrayList< Archive< ?>>(), // reuse the War handling, but Auxiliary Archives should be added to the // EAR, not the WAR protocol, processor); } catch (IllegalArgumentException e) { throw new IllegalArgumentException( "Can not manipulate war's that are not of type " + WebArchive.class, e); } } else if (applicationArchiveWars.size() > 1) { // TODO: fetch the TestDeployment.getArchiveForEnrichment throw new UnsupportedOperationException( "Multiple WebArchives found in " + applicationArchive.getName() + ". Can not determine which to enrich"); } else { // reuse handle(JavaArchive, ..) logic Archive<?> wrappedWar = handleArchive(protocol, new ArrayList<Archive<?>>(), null, processor); applicationArchive.addAsModule(wrappedWar); if (applicationArchive.contains(APPLICATION_XML_PATH)) { ApplicationDescriptor applicationXml = Descriptors.importAs(ApplicationDescriptor.class) .from(applicationArchive.get(APPLICATION_XML_PATH).getAsset().openStream()); applicationXml.webModule(wrappedWar.getName(), wrappedWar.getName()); // SHRINKWRAP-187, to eager on not allowing overrides, delete it first applicationArchive.delete(APPLICATION_XML_PATH); applicationArchive.setApplicationXML(new StringAsset(applicationXml.exportAsString())); } } applicationArchive.addAsLibraries(auxiliaryArchives.toArray(new Archive<?>[0])); return applicationArchive; }
@Test public void manifestCreatedInWar() { // When final Archive<?> archive = ShrinkWrap.create(MavenImporter.class) .loadPomFromFile("src/it/war-sample/pom.xml") .importBuildOutput() .as(WebArchive.class); // Then assertThat(archive.getContent(), contains("META-INF/MANIFEST.MF")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("Created-By", "ShrinkWrap Maven Resolver")); }
/** * Creates a tmp folder and exports the file. Returns the URL for that file location. * * @param archive Archive to export * @return */ public static URL toURL(final Archive<?> archive) { // create a random named temp file, then delete and use it as a directory try { File root = File.createTempFile("arquillian", archive.getName()); root.delete(); root.mkdirs(); File deployment = new File(root, archive.getName()); deployment.deleteOnExit(); archive.as(ZipExporter.class).exportTo(deployment, true); return deployment.toURI().toURL(); } catch (Exception e) { throw new RuntimeException("Could not export deployment to temp", e); } }
private static String readFileFromArchive(Archive archive, String path) throws IOException { try (InputStream manifest = archive.get(path).getAsset().openStream()) { BufferedReader reader = new BufferedReader(new InputStreamReader(manifest, StandardCharsets.UTF_8)); return reader.lines().collect(Collectors.joining()); } }
@SuppressWarnings("unchecked") default T mergeIgnoringDuplicates(Archive<?> source, String base, Filter<ArchivePath> filter) { if (!base.startsWith("/")) { base = "/" + base; } // Get existing contents from source archive final Map<ArchivePath, Node> sourceContent = source.getContent(); // Add each asset from the source archive for (final Map.Entry<ArchivePath, Node> contentEntry : sourceContent.entrySet()) { final Node node = contentEntry.getValue(); ArchivePath nodePath = contentEntry.getKey(); if (!nodePath.get().startsWith(base)) { continue; } if (!filter.include(nodePath)) { continue; } if (contains(nodePath)) { continue; } nodePath = new BasicPath(nodePath.get().replaceFirst(base, "")); // Delegate if (node.getAsset() == null) { addAsDirectory(nodePath); } else { add(node.getAsset(), nodePath); } } return (T) this; }
private void checkFractionsPresent(Archive uberjar) throws IOException { assertThat(uberjar.contains("META-INF/wildfly-swarm-manifest.yaml")).isTrue(); String manifestContent = readFileFromArchive(uberjar, "META-INF/wildfly-swarm-manifest.yaml"); for (String fraction : testingProject.fractionsThatShouldBePresent()) { assertThat(manifestContent).contains("org.wildfly.swarm." + fraction); assertThat(manifestContent).contains("org.wildfly.swarm:" + fraction); assertThat(uberjar.contains("m2repo/org/wildfly/swarm/" + fraction)).isTrue(); } for (String fraction : testingProject.fractionsThatShouldBeMissing()) { assertThat(manifestContent).doesNotContain("org.wildfly.swarm." + fraction); assertThat(manifestContent).doesNotContain("org.wildfly.swarm:" + fraction); assertThat(uberjar.contains("m2repo/org/wildfly/swarm/" + fraction)).isFalse(); } }
/* (non-Javadoc) * @see org.jboss.arquillian.spi.client.container.DeployableContainer#undeploy(org.jboss.arquillian.spi.client.deployment.Deployment[]) */ @Override public void undeploy(final Archive<?> archive) throws DeploymentException { try { undeploy(archive.getName()); } catch (Exception e) { throw new DeploymentException("Could not undeploy from container", e); } }
@Test public void suppliedManifestHasPrecedence() { // When final Archive<?> archive = ShrinkWrap.create(MavenImporter.class) .loadPomFromFile("src/it/jar-with-mf-sample/pom.xml") .importBuildOutput() .as(JavaArchive.class); // Then assertThat(archive.getContent(), contains("META-INF/MANIFEST.MF")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("Created-By", "User")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry(Attributes.Name.MANIFEST_VERSION.toString(), "1.0")); }
@Override public ArtifactSpec resolve(ArtifactSpec spec) throws Exception { File resolved = resolvedArtifacts.get(spec); if (resolved == null) { Archive archive = artifacts.get(spec); if (archive != null) { resolved = File.createTempFile(spec.artifactId(), ".jar"); resolved.delete(); resolved.deleteOnExit(); archive.as(ZipExporter.class).exportTo(resolved); this.resolvedArtifacts.put(spec, resolved); } } spec.file = resolved; return spec; }
@Test public void shouldGenerateDependencies() throws Exception { Archive<?> archive = new TestNGDeploymentAppender().createAuxiliaryArchive(); Assert.assertTrue( "Should have added TestRunner SPI", archive.contains( ArchivePaths.create( "/META-INF/services/org.jboss.arquillian.container.test.spi.TestRunner"))); Assert.assertTrue( "Should have added TestRunner Impl", archive.contains( ArchivePaths.create("/org/jboss/arquillian/testng/container/TestNGTestRunner.class"))); System.out.println(archive.toString(true)); }
/** * Test to make sure an archive can be exported to file and all contents are correctly located. * * @throws Exception */ @Test public void testExportToFile() throws IOException { log.info("testExportToFile"); // Get a temp directory for the test File tempDirectory = createTempDirectory("testExportToFile"); // Get an archive instance Archive<?> archive = createArchiveWithAssets(); // Export as File final File exported = new File(tempDirectory, archive.getName()); this.exportAsFile(archive, exported, true); // Roundtrip assertion this.ensureInExpectedForm(exported); }
/** * Test to make sue an archive can be exported and all contents are correctly located. * * @throws Exception */ @Test public void testExport() throws Exception { log.info("testExport"); // Get an archive instance Archive<?> archive = createArchiveWithAssets(); // Export as InputStream final InputStream exportStream = this.exportAsInputStream(archive); // Validate final File tempDirectory = createTempDirectory("testExport"); final File serialized = new File(tempDirectory, archive.getName()); final FileOutputStream out = new FileOutputStream(serialized); IOUtil.copyWithClose(exportStream, out); ensureInExpectedForm(serialized); }
@Test public void manifestWithManifestSections() { // When final Archive<?> archive = ShrinkWrap.create(MavenImporter.class) .loadPomFromFile("src/it/jar-with-mf-sample/pom-e.xml") .importBuildOutput() .as(JavaArchive.class); // Then assertThat(archive.getContent(), contains("META-INF/MANIFEST.MF")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("Created-By", "ShrinkWrap Maven Resolver")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("Specification-Title")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("Specification-Vendor", "Arquillian")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("MyFirstSection", "Foo", "bar")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("MySecondSection", "Foo2", "bar2")); }
/** * Test to make sure an archive can be exported to a {@link OutputStream} and all contents are * correctly located. * * @throws Exception */ @Test public void testExportToOutStream() throws IOException { log.info("testExportToOutStream"); // Get a temp directory for the test final File tempDirectory = createTempDirectory("testExportToOutStream"); // Get an archive instance final Archive<?> archive = createArchiveWithAssets(); // Export as OutStream and flush to a file manually final File serializedArchive = new File(tempDirectory, archive.getName()); final OutputStream out = new FileOutputStream(serializedArchive); this.exportToOutputStream(archive, out); // Validate this.ensureInExpectedForm(serializedArchive); }
/** * Exports the specified archive as a {@link File}, overwriting an existing one is specified * * @param archive * @param file * @param overwrite */ private void exportAsFile(final Archive<?> archive, final File file, final boolean overwrite) { // Precondition checks assert file != null : "file must be specified"; assert archive != null : "archive must be specified"; // Export final Class<? extends StreamExporter> exporter = this.getExporterClass(); assert exporter != null : "Exporter class must be specified"; archive.as(exporter).exportTo(file, overwrite); }
protected File export(Archive<?> archive) throws Exception { if (archive instanceof WebArchive == false) { throw new IllegalArgumentException("Can only handle .war deployments: " + archive); } Node dockerFile = archive.get("Dockerfile"); if (dockerFile == null) { if (configuration.getFrom() == null) { throw new IllegalArgumentException("Missing Docker's FROM value!"); } log.info("Using Docker FROM: " + configuration.getFrom()); String content = "FROM " + configuration.getFrom() + "\n" + "ADD . /app" + "\n"; archive.add(new StringAsset(content), "Dockerfile"); } return super.export(archive); }
/** * Attempt to get the asset from a nested archive. * * @param path * @return */ private Node getNestedNode(ArchivePath path) { // Iterate through nested archives for (Entry<ArchivePath, ArchiveAsset> nestedArchiveEntry : nestedArchives.entrySet()) { ArchivePath archivePath = nestedArchiveEntry.getKey(); ArchiveAsset archiveAsset = nestedArchiveEntry.getValue(); // Check to see if the requested path starts with the nested archive path if (startsWith(path, archivePath)) { Archive<?> nestedArchive = archiveAsset.getArchive(); // Get the asset path from within the nested archive ArchivePath nestedAssetPath = getNestedPath(path, archivePath); // Recurse the call to the nested archive return nestedArchive.get(nestedAssetPath); } } return null; }
@Test public void manifestWithDefaultImplementationEntries() { // When final Archive<?> archive = ShrinkWrap.create(MavenImporter.class) .loadPomFromFile("src/it/jar-with-mf-sample/pom-b.xml") .importBuildOutput() .as(JavaArchive.class); // Then assertThat(archive.getContent(), contains("META-INF/MANIFEST.MF")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("Created-By", "ShrinkWrap Maven Resolver")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), hasManifestEntry("Implementation-Title")); assertThat( archive.get("META-INF/MANIFEST.MF").getAsset(), not(hasManifestEntry("Implementation-Vendor"))); }
private ProgressObject jsr88Deploy(Archive<?> archive) throws Exception { // Get the deployment manager and the distribution targets DeploymentManager manager = getDeploymentManager(); Target[] targets = manager.getTargets(); assertEquals(1, targets.length); InputStream deploymentPlan = createDeploymentPlan(archive.getName()); // Deploy the test archive InputStream inputStream = archive.as(ZipExporter.class).exportAsInputStream(); ProgressObject progress = manager.distribute(targets, inputStream, deploymentPlan); StateType state = awaitCompletion(progress, 5000); if (state == StateType.COMPLETED) { progress = manager.start(progress.getResultTargetModuleIDs()); awaitCompletion(progress, 5000); } return progress; }
/** Adds Warp archive to the protocol archive to make it available for WARs and EARs. */ @Override public void process(TestDeployment testDeployment, Archive<?> protocolArchive) { final TestClass testClass = this.testClass.get(); final Archive<?> applicationArchive = testDeployment.getApplicationArchive(); if (WarpCommons.isWarpTest(testClass.getJavaClass())) { if (!Validate.isArchiveOfType(WebArchive.class, protocolArchive)) { throw new IllegalArgumentException( "Protocol archives of type " + protocolArchive.getClass() + " not supported by Warp. Please use the Servlet 3.0 protocol."); } addWarpPackageToDeployment(protocolArchive.as(WebArchive.class)); addWarpExtensionsDeployment(protocolArchive.as(WebArchive.class)); removeTestClassFromDeployment(applicationArchive, testClass); } }
private Set<ArchivePath> getLibraries(Archive<?> archive) { return archive .getContent( new Filter<ArchivePath>() { public boolean include(ArchivePath arg0) { String path = arg0.get(); return path.endsWith(".jar"); } }) .keySet(); }
/** * Exports the specified archive to an {@link OutputStream} * * @param archive * @return */ private void exportToOutputStream(final Archive<?> archive, final OutputStream out) { assert archive != null : "archive must be specified"; assert out != null : "outstream must be specified"; // Export final Class<? extends StreamExporter> exporter = this.getExporterClass(); assert exporter != null : "Exporter class must be specified"; try { archive.as(exporter).exportTo(out); } finally { try { out.close(); } catch (final IOException ioe) { log.warning("Could not close " + out + ": " + ioe); } } }