private void addDirectoryToZipEntryList( File directory, String currentPath, ImmutableMap.Builder<File, ZipEntry> zipEntriesBuilder) throws IOException { Preconditions.checkNotNull(currentPath); for (File inputFile : directory.listFiles()) { String childPath = currentPath + (currentPath.isEmpty() ? "" : "/") + inputFile.getName(); if (inputFile.isDirectory()) { addDirectoryToZipEntryList(inputFile, childPath, zipEntriesBuilder); } else { ZipEntry nextEntry = new ZipEntry(childPath); long fileLength = inputFile.length(); if (fileLength > maxDeflatedBytes || EXTENSIONS_NOT_TO_DEFLATE.contains(Files.getFileExtension(inputFile.getName()))) { nextEntry.setMethod(ZipEntry.STORED); nextEntry.setCompressedSize(inputFile.length()); nextEntry.setSize(inputFile.length()); HashCode crc = ByteStreams.hash(Files.newInputStreamSupplier(inputFile), Hashing.crc32()); nextEntry.setCrc(crc.padToLong()); } zipEntriesBuilder.put(inputFile, nextEntry); } } }
/** Test of getBlob method, of class FilesystemAsyncBlobStore. */ public void testGetBlob() throws IOException { String blobKey = TestUtils.createRandomBlobKey(); GetOptions options = null; Blob resultBlob; blobStore.createContainerInLocation(null, CONTAINER_NAME); resultBlob = blobStore.getBlob(CONTAINER_NAME, blobKey, options); assertNull(resultBlob, "Blob exists"); // create blob TestUtils.createBlobsInContainer(CONTAINER_NAME, blobKey); resultBlob = blobStore.getBlob(CONTAINER_NAME, blobKey, options); assertNotNull(resultBlob, "Blob exists"); // checks file content InputSupplier<FileInputStream> expectedFile = Files.newInputStreamSupplier(new File(TARGET_CONTAINER_NAME, blobKey)); assertTrue( ByteStreams.equal(expectedFile, resultBlob.getPayload()), "Blob payload differs from file content"); // metadata are verified in the test for blobMetadata, so no need to // perform a complete test here assertNotNull(resultBlob.getMetadata(), "Metadata null"); MutableBlobMetadata metadata = resultBlob.getMetadata(); assertEquals(blobKey, metadata.getName(), "Wrong blob metadata"); }
@Override public int execute(ExecutionContext context) { File inputDirectory = inputDirectoryPath.toFile(); Preconditions.checkState( inputDirectory.exists() && inputDirectory.isDirectory(), "%s must be a directory.", inputDirectoryPath); try { ImmutableMap.Builder<File, ZipEntry> zipEntriesBuilder = ImmutableMap.builder(); addDirectoryToZipEntryList(inputDirectory, "", zipEntriesBuilder); ImmutableMap<File, ZipEntry> zipEntries = zipEntriesBuilder.build(); if (!zipEntries.isEmpty()) { try (CustomZipOutputStream outputStream = ZipOutputStreams.newOutputStream(outputZipPath.toFile())) { for (Map.Entry<File, ZipEntry> zipEntry : zipEntries.entrySet()) { outputStream.putNextEntry(zipEntry.getValue()); ByteStreams.copy(Files.newInputStreamSupplier(zipEntry.getKey()), outputStream); outputStream.closeEntry(); } } } } catch (IOException e) { e.printStackTrace(context.getStdErr()); return 1; } return 0; }
public void testWritePayloadOnFile() throws IOException { String blobKey; File sourceFile; FilePayload filePayload; blobKey = TestUtils.createRandomBlobKey("writePayload-", ".img"); sourceFile = TestUtils.getImageForBlobPayload(); filePayload = new FilePayload(sourceFile); Blob blob = storageStrategy.newBlob(blobKey); blob.setPayload(filePayload); // write files storageStrategy.putBlob(CONTAINER_NAME, blob); // verify that the files is equal File blobFullPath = new File(TARGET_CONTAINER_NAME, blobKey); InputSupplier<FileInputStream> expectedInput = Files.newInputStreamSupplier(sourceFile); InputSupplier<FileInputStream> actualInput = Files.newInputStreamSupplier(blobFullPath); assertTrue(ByteStreams.equal(expectedInput, actualInput), "Files are not equal"); }
public static File readResourceToTempFile(String resourceName) throws IOException { InputSupplier<? extends InputStream> inSupplier; try { URL resourceURL = Resources.getResource(GroupLensRecommender.class, resourceName); inSupplier = Resources.newInputStreamSupplier(resourceURL); } catch (IllegalArgumentException iae) { File resourceFile = new File("src/main/java" + resourceName); inSupplier = Files.newInputStreamSupplier(resourceFile); } File tempFile = File.createTempFile("taste", null); tempFile.deleteOnExit(); Files.copy(inSupplier, tempFile); return tempFile; }
@Override public void run() { Asset asset; while ((asset = filesLeft.poll()) != null) { for (int i = 1; i < MAX_TRIES + 1; i++) { try { File file = new File(getAssetsDir(), "objects/" + asset.path); // does exist? create if (!file.exists()) { file.getParentFile().mkdirs(); file.createNewFile(); } File localMc = new File(minecraftDir, asset.path); BufferedInputStream stream; // check for local copy if (localMc.exists() && Constants.hash(localMc, "SHA1").equals(asset.hash)) // if so, copy stream = new BufferedInputStream(Files.newInputStreamSupplier(localMc).getInput()); else // otherwise download stream = new BufferedInputStream( new URL(Constants.ASSETS_URL + "/" + asset.path).openStream()); Files.write(ByteStreams.toByteArray(stream), file); stream.close(); // check hash... String hash = Constants.hash(file, "SHA1"); if (asset.hash.equals(hash)) break; // hashes are fine; else { file.delete(); getLogger() .error("download attempt " + i + " failed! : " + asset.hash + " != " + hash); } } catch (Exception e) { getLogger().error("Error downloading asset: " + asset.path); e.printStackTrace(); if (!errored) errored = true; } } } }
public static void setupClasspath(Path distributedClassPath, Job job) throws IOException { String classpathProperty = System.getProperty("druid.hadoop.internal.classpath"); if (classpathProperty == null) { classpathProperty = System.getProperty("java.class.path"); } String[] jarFiles = classpathProperty.split(File.pathSeparator); final Configuration conf = job.getConfiguration(); final FileSystem fs = distributedClassPath.getFileSystem(conf); if (fs instanceof LocalFileSystem) { return; } for (String jarFilePath : jarFiles) { File jarFile = new File(jarFilePath); if (jarFile.getName().endsWith(".jar")) { final Path hdfsPath = new Path(distributedClassPath, jarFile.getName()); if (!existing.contains(hdfsPath)) { if (jarFile.getName().matches(".*SNAPSHOT(-selfcontained)?\\.jar$") || !fs.exists(hdfsPath)) { log.info("Uploading jar to path[%s]", hdfsPath); ByteStreams.copy( Files.newInputStreamSupplier(jarFile), new OutputSupplier<OutputStream>() { @Override public OutputStream getOutput() throws IOException { return fs.create(hdfsPath); } }); } existing.add(hdfsPath); } DistributedCache.addFileToClassPath(hdfsPath, conf, fs); } } }
@Override public InputSupplier<? extends InputStream> getContentSupplier() { return Files.newInputStreamSupplier(file); }