public File getFile() { if (file == null) { file = new File(tmpDir, entry.getName()); copyTo(file); } return file; }
@Override public void unarchive(InputStream source, File target) { try (TarArchiveInputStream tIn = new TarArchiveInputStream(source)) { TarArchiveEntry entry = tIn.getNextTarEntry(); while (entry != null) { if (entry.isDirectory()) { entry = tIn.getNextTarEntry(); continue; } File curfile = new File(target, entry.getName()); File parent = curfile.getParentFile(); if (!parent.exists()) { parent.mkdirs(); } try (OutputStream out = new FileOutputStream(curfile)) { IOUtils.copy(tIn, out); entry = tIn.getNextTarEntry(); } } } catch (IOException e) { throw new IllegalStateException(e); } }
/** * Move to the next tar entry. * * @param rootDir an entry path * @return true if a next tar entry can be read, or if this entry name is a sub-folder of rootDir */ protected boolean hasNext(final String rootDir) { try { /* * if reader.available() is not equal to 0, then it means that this entry * has been loaded, but not read. */ while (reader.available() == 0 && (tarEntry = reader.getNextTarEntry()) == null) { // Next tar entry if (++inputPos >= input.length) { reader.close(); return false; } // Next archive file reader.close(); logger.info("Reading dump: {}", this.input[inputPos]); reader = getTarInputStream(input[inputPos]); } } catch (IOException e) { logger.error("Error while reading the input: {}\n{}", input[inputPos], e); } /* * When returning from this method, the inputstream is positionned at a regular file, * i.e., metadata, outgoing-triples.nt or incoming-triples.nt. */ if (tarEntry.isDirectory()) { return hasNext(rootDir); } return rootDir == null || tarEntry.getName().startsWith(rootDir) ? true : false; }
public static List<File> untar(final File inputtar, final File outputDir) throws IOException, ArchiveException { final List<File> unpackedFiles = new ArrayList<File>(); if (OMWPP.DEBUG) { Log.i("OMWPPtar", String.format("Unzipping tar file %s.", inputtar.getAbsoluteFile())); } final InputStream is = new FileInputStream(inputtar); final TarArchiveInputStream tarInputStream = (TarArchiveInputStream) new TarArchiveInputStream(is); TarArchiveEntry entry = null; while ((entry = (TarArchiveEntry) tarInputStream.getNextEntry()) != null) { if (OMWPP.DEBUG) Log.i("OMWPPtar", "Read entry: " + entry.getName()); String filename = entry.getName().substring(entry.getName().lastIndexOf("/") + 1); if (filename.toLowerCase().endsWith(".png") || filename.toLowerCase().endsWith(".jpg")) { if (OMWPP.DEBUG) Log.i("OMWPPtar", "Is background, extracting."); final File outputFile = new File(outputDir, filename); final OutputStream outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarInputStream, outputFileStream); outputFileStream.close(); unpackedFiles.add(outputFile); } else if (filename.toLowerCase().endsWith(".gz")) { // RECURSIVE CALL final File outputFile = new File(outputDir, filename); final OutputStream outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarInputStream, outputFileStream); outputFileStream.close(); gunzip(outputFile, SDROOT); outputFile.delete(); } else if (filename.toLowerCase().endsWith(".tar")) { // RECURSIVE CALL final File outputFile = new File(outputDir, filename); final OutputStream outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarInputStream, outputFileStream); outputFileStream.close(); untar(outputFile, SDROOT); outputFile.delete(); } else { if (OMWPP.DEBUG) Log.i("OMWPPtar", "Is not background, skipping."); } } tarInputStream.close(); return unpackedFiles; }
@Test public void downloadDroplet() { this.applicationId .then(this::uploadAndStartApplication) .flatMap( applicationId -> this.cloudFoundryClient .applicationsV2() .downloadDroplet( DownloadApplicationDropletRequest.builder() .applicationId(applicationId) .build())) .as(Stream::from) .reduceWith(ByteArrayOutputStream::new, ApplicationsTest::collectIntoByteArrayInputStream) .map( bytes -> { boolean staticFile = false; boolean indexFile = false; try { TarArchiveInputStream tis = new TarArchiveInputStream( new GZIPInputStream(new ByteArrayInputStream(bytes.toByteArray()))); for (TarArchiveEntry entry = tis.getNextTarEntry(); entry != null; entry = tis.getNextTarEntry()) { if (entry.getName().contains("Staticfile")) { staticFile = true; } if (entry.getName().contains("index.html")) { indexFile = true; } } } catch (IOException e) { throw new RuntimeException(e); } return staticFile && indexFile; }) .subscribe(testSubscriber().assertEquals(true)); }
/** * Unpacks a workflow into specified directory. To be more precise: workflow package should * contain only a single directory with the workflow. This method will extract its contents into * target directory. * * @param target new workflow root directory */ public void extract(File target) throws IOException { BufferedInputStream in = new BufferedInputStream(new FileInputStream(file)); GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn); TarArchiveEntry entry; while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) { // The archive should contain only a single directory: we want to unpack its content. final File outputFile = new File(target, entry.getName().substring(entry.getName().indexOf('/') + 1)); if (entry.isDirectory()) { FileUtils.forceMkdir(outputFile); } else { OutputStream outputFileStream = null; try { outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarIn, outputFileStream); } finally { IOUtils.closeQuietly(outputFileStream); } } } }
public static String getVersionFromArtifact(ByteBuffer artifact) throws IOException { String versionNumber = null; try (CompressorInputStream uncompressedInput = new GzipCompressorInputStream(new ByteArrayInputStream(artifact.array()))) { ArchiveInputStream input = new TarArchiveInputStream(uncompressedInput); TarArchiveEntry entry; while ((entry = (TarArchiveEntry) input.getNextEntry()) != null) { if (!entry.isDirectory() && entry.getName().endsWith(VERSION_FILE)) { versionNumber = IOUtils.toString(input, Charsets.UTF_8).trim(); break; } } } return versionNumber; }
public TarArchiveEntry map(final TarArchiveEntry entry) { final String name = entry.getName(); final TarArchiveEntry newEntry = new TarArchiveEntry(prefix + '/' + Utils.stripPath(strip, name), true); // Set ownership if (uid > -1) { newEntry.setUserId(uid); } else { newEntry.setUserId(entry.getUserId()); } if (gid > -1) { newEntry.setGroupId(gid); } else { newEntry.setGroupId(entry.getGroupId()); } if (user != null) { newEntry.setUserName(user); } else { newEntry.setUserName(entry.getUserName()); } if (group != null) { newEntry.setGroupName(group); } else { newEntry.setGroupName(entry.getGroupName()); } // Set permissions if (newEntry.isDirectory()) { if (dirMode > -1) { newEntry.setMode(dirMode); } else { newEntry.setMode(entry.getMode()); } } else { if (fileMode > -1) { newEntry.setMode(fileMode); } else { newEntry.setMode(entry.getMode()); } } newEntry.setSize(entry.getSize()); return newEntry; }
/* * Given a zip stream, unzips it and returns an input stream to the desired data file. * * @param importDataRecord ImportDataRecord * @param is InputStream * @return InputStream */ private InputStream readContent(ImportDataRecord importDataRecord, InputStream is) throws Exception { InputStream toReturn = null; try { // decompress .gz file if (LOG.isInfoEnabled()) { LOG.info("readContent(), decompressing: " + importDataRecord.getCanonicalPathToData()); } InputStream unzippedContent = new GzipCompressorInputStream(is); // if tarball, untar if (importDataRecord.getCanonicalPathToData().toLowerCase().endsWith("tar.gz")) { if (LOG.isInfoEnabled()) { LOG.info("readContent(), gzip file is a tarball, untarring"); } TarArchiveInputStream tis = new TarArchiveInputStream(unzippedContent); TarArchiveEntry entry = null; while ((entry = tis.getNextTarEntry()) != null) { String entryName = entry.getName(); String dataFile = importDataRecord.getDataFilename(); if (dataFile.contains(DatatypeMetadata.TUMOR_TYPE_TAG)) { dataFile = dataFile.replaceAll( DatatypeMetadata.TUMOR_TYPE_TAG, importDataRecord.getTumorType().toUpperCase()); } if (entryName.contains(dataFile)) { if (LOG.isInfoEnabled()) { LOG.info("Processing tar-archive: " + importDataRecord.getDataFilename()); } toReturn = tis; break; } } } else { toReturn = unzippedContent; } } catch (Exception e) { throw e; } // outta here return toReturn; }
/** * Unpack the content read from <i>source</i> into <i>targetFolder</i>. If the * <i>skipTopFolder</i> is set, then don't assume that the archive contains one single folder and * unpack the content of that folder, not including the folder itself. * * @param source The input source. Must be in <i>TAR</i> format. * @param targetFolder The destination folder for the unpack. Not used when a <tt>fileCatcher</tt> * is provided * @param skipTopFolder Set to <code>true</code> to unpack beneath the top folder of the archive. * The archive must consist of one single folder and nothing else in order for this to work. * @param fileCatcher Used when specific files should be picked from the archive without writing * them to disk. Can be <tt>null</tt>. * @throws IOException */ public static void unpack( InputStream source, File targetFolder, boolean skipTopFolder, FileCatcher fileCatcher) throws IOException { String topFolderName = null; Map<File, Map<Integer, List<String>>> chmodMap = new HashMap<File, Map<Integer, List<String>>>(); TarArchiveInputStream in = new TarArchiveInputStream(source); try { TarArchiveEntry te = in.getNextTarEntry(); if (te == null) { throw new IOException("No entry in the tar file"); } do { if (te.isGlobalPaxHeader()) continue; String name = te.getName(); if (name.startsWith("./._")) // MacOS specific extended attributes addition. Just skip it continue; if (skipTopFolder) { int firstSlash = name.indexOf('/'); if (firstSlash < 0) throw new IOException("Archive doesn't contain one single folder"); String tfName = name.substring(0, firstSlash); if (topFolderName == null) topFolderName = tfName; else if (!tfName.equals(topFolderName)) throw new IOException("Archive doesn't contain one single folder"); name = name.substring(firstSlash + 1); } if (name.length() == 0) continue; String linkName = te.getLinkName(); if (linkName != null) { if (linkName.trim().equals("")) linkName = null; } if (fileCatcher != null) { if (linkName == null && !te.isDirectory() && fileCatcher.accept(name)) { if (fileCatcher.catchData(name, in)) // We're done here return; } continue; } File outFile = new File(targetFolder, name); if (linkName != null) { if (!OsUtil.link(targetFolder, name, te.getLinkName())) throw new IOException( "Archive contains links but they are not supported on this platform"); } else { if (te.isDirectory()) { outFile.mkdirs(); } else { outFile.getParentFile().mkdirs(); OutputStream target = new FileOutputStream(outFile); StreamUtil.copy(in, target); target.close(); outFile.setLastModified(te.getModTime().getTime()); } registerChmodFile(chmodMap, targetFolder, Integer.valueOf(te.getMode()), name); } } while ((te = in.getNextTarEntry()) != null); } finally { StreamUtil.close(in); } chmod(chmodMap); }
public void execute() { if (repoDir == null) { log("repoDir attribute is empty !", LogLevel.ERR.getLevel()); throw new RuntimeException("Bad attributes for apt-repo task"); } log("repo dir: " + repoDir); File repoFolder = new File(repoDir); if (!repoFolder.exists()) { repoFolder.mkdirs(); } File[] files = repoFolder.listFiles( new FileFilter() { public boolean accept(File pathname) { if (pathname.getName().endsWith(FILE_DEB_EXT)) { return true; } return false; } }); Packages packages = new Packages(); for (int i = 0; i < files.length; i++) { File file = files[i]; PackageEntry packageEntry = new PackageEntry(); packageEntry.setSize(file.length()); packageEntry.setSha1(Utils.getDigest("SHA-1", file)); packageEntry.setSha256(Utils.getDigest("SHA-256", file)); packageEntry.setMd5sum(Utils.getDigest("MD5", file)); String fileName = file.getName(); packageEntry.setFilename(fileName); log("found deb: " + fileName); try { ArchiveInputStream control_tgz; ArArchiveEntry entry; TarArchiveEntry control_entry; ArchiveInputStream debStream = new ArchiveStreamFactory().createArchiveInputStream("ar", new FileInputStream(file)); while ((entry = (ArArchiveEntry) debStream.getNextEntry()) != null) { if (entry.getName().equals("control.tar.gz")) { ControlHandler controlHandler = new ControlHandler(); GZIPInputStream gzipInputStream = new GZIPInputStream(debStream); control_tgz = new ArchiveStreamFactory().createArchiveInputStream("tar", gzipInputStream); while ((control_entry = (TarArchiveEntry) control_tgz.getNextEntry()) != null) { log("control entry: " + control_entry.getName(), LogLevel.DEBUG.getLevel()); if (control_entry.getName().trim().equals(CONTROL_FILE_NAME)) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); IOUtils.copy(control_tgz, outputStream); String content_string = outputStream.toString("UTF-8"); outputStream.close(); controlHandler.setControlContent(content_string); log("control cont: " + outputStream.toString("utf-8"), LogLevel.DEBUG.getLevel()); break; } } control_tgz.close(); if (controlHandler.hasControlContent()) { controlHandler.handle(packageEntry); } else { throw new RuntimeException("no control content found for: " + file.getName()); } break; } } debStream.close(); packages.addPackageEntry(packageEntry); } catch (Exception e) { String msg = FAILED_TO_CREATE_APT_REPO + " " + file.getName(); log(msg, e, LogLevel.ERR.getLevel()); throw new RuntimeException(msg, e); } } try { File packagesFile = new File(repoDir, PACKAGES_GZ); packagesWriter = new BufferedWriter( new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(packagesFile)))); packagesWriter.write(packages.toString()); DefaultHashes hashes = Utils.getDefaultDigests(packagesFile); ReleaseInfo pinfo = new ReleaseInfo(PACKAGES_GZ, packagesFile.length(), hashes); Release release = new Release(); release.addInfo(pinfo); final File releaseFile = new File(repoDir, RELEASE); FileUtils.fileWrite(releaseFile, release.toString()); } catch (IOException e) { throw new RuntimeException("writing files failed", e); } finally { if (packagesWriter != null) { try { packagesWriter.close(); } catch (IOException e) { throw new RuntimeException("writing files failed", e); } } } }
/** * Untar an input file into an output file. * * <p>The output file is created in the output folder, having the same name as the input file, * minus the '.tar' extension. * * @param archiveFile input TAR file * @param lastModified when collection has been last modified * @throws FileNotFoundException * @throws ArchiveException */ private static List<FileWithMeta> performUnTar( File archiveFile, IonConfig config, Collection collection, String lastModified, Context context) throws FileNotFoundException, IOException, ArchiveException { File collectionFolder = FilePaths.getCollectionFolderPath(config, context); File collectionFolderTemp = FilePaths.getTempFilePath(collectionFolder); final List<FileWithMeta> untaredFiles = new LinkedList<>(); InputStream is = null; TarArchiveInputStream debInputStream = null; try { IonLog.d( TAG, String.format( "Untaring %s to dir %s.", archiveFile.getPath(), collectionFolder.getPath())); is = new FileInputStream(archiveFile); debInputStream = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream("tar", is); TarArchiveEntry entry; List<ArchiveIndex> index = null; boolean indexHasBeenRead = false; while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) { if (!indexHasBeenRead) { // get index.json InputStreamReader inputStreamReader = new InputStreamReader(debInputStream, "UTF-8"); index = Arrays.asList( GsonHolder.getInstance().fromJson(inputStreamReader, ArchiveIndex[].class)); indexHasBeenRead = true; continue; } // write the "content" files if (!entry.isDirectory()) { String archiveFileName = entry.getName(); ArchiveIndex fileInfo = ArchiveIndex.getByName(archiveFileName, index); if (fileInfo == null) { IonLog.w( TAG, "Skipping " + entry.getName() + " because it was not found in index.json."); continue; } IonLog.i(TAG, fileInfo.url); FileWithMeta fileWithMeta = getFilePath(fileInfo, collectionFolderTemp, config, context); File targetFile = fileWithMeta.file; FileUtils.createDir(targetFile.getParentFile()); targetFile = FileUtils.writeToFile(debInputStream, targetFile); if (targetFile != null) { untaredFiles.add(fileWithMeta); } } } } finally { // finished reading TAR archive if (is != null) { is.close(); } if (debInputStream != null) { debInputStream.close(); } if (archiveFile != null && archiveFile.exists()) { archiveFile.delete(); } } // if lastModified date was not passed, look if cache index entry exists for collection and // retrieve it from there if (collection != null && lastModified == null) { CollectionCacheIndex collectionCacheIndex = CollectionCacheIndex.retrieve(config, context); lastModified = collectionCacheIndex == null ? null : collectionCacheIndex.getLastModified(); IonLog.d(TAG, "Restoring last_modified from cache index: " + lastModified); } // delete old cache index entries of the collection in shared preferences and in memory cache CacheIndexStore.clearCollection(config, context); MemoryCache.clear(); // replace collection folder (containing json files) - deletes old file cache boolean jsonWriteSuccess = FileUtils.move(collectionFolderTemp, collectionFolder, true); if (!jsonWriteSuccess) { throw new IOException("JSON files could not be moved to final path."); } // replace media files in collection File mediaFolderTemp = FilePaths.getMediaFolderPath(config, context, true); File mediaFolder = FilePaths.getMediaFolderPath(config, context, false); if (mediaFolderTemp.exists()) { boolean mediaWriteSuccess = FileUtils.move(mediaFolderTemp, mediaFolder, true); if (!mediaWriteSuccess) { throw new IOException("Media files could not be moved to final path."); } } else { IonLog.w(TAG, "No media files were contained in archive."); } // add collection to file cache again if (collection != null) { MemoryCache.saveCollection(collection, config, context); try { saveCollectionToFileCache(config, collection, context); CollectionCacheIndex.save(config, context, lastModified); } catch (IOException e) { IonLog.e("ION Archive", "Collection could not be saved."); IonLog.ex(e); } } // cache index entries are not written yet at this point return untaredFiles; }
public RelativePath getRelativePath() { return new RelativePath(true, entry.getName().split("/")); }
public String getDisplayName() { return String.format("tar entry %s!%s", tarFile, entry.getName()); }
/** * Extracts files to the specified destination * * @param file the file to extract to * @param dest the destination directory * @throws IOException */ public static void unzipFileTo(String file, String dest) throws IOException { File target = new File(file); if (!target.exists()) throw new IllegalArgumentException("Archive doesnt exist"); FileInputStream fin = new FileInputStream(target); int BUFFER = 2048; byte data[] = new byte[BUFFER]; if (file.endsWith(".zip")) { // get the zip file content ZipInputStream zis = new ZipInputStream(fin); // get the zipped file list entry ZipEntry ze = zis.getNextEntry(); while (ze != null) { String fileName = ze.getName(); File newFile = new File(dest + File.separator + fileName); log.info("file unzip : " + newFile.getAbsoluteFile()); // create all non exists folders // else you will hit FileNotFoundException for compressed folder new File(newFile.getParent()).mkdirs(); FileOutputStream fos = new FileOutputStream(newFile); int len; while ((len = zis.read(data)) > 0) { fos.write(data, 0, len); } fos.close(); ze = zis.getNextEntry(); } zis.closeEntry(); zis.close(); } else if (file.endsWith(".tar.gz") || file.endsWith(".tgz")) { BufferedInputStream in = new BufferedInputStream(fin); GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn); TarArchiveEntry entry = null; /** Read the tar entries using the getNextEntry method * */ while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) { log.info("Extracting: " + entry.getName()); /** If the entry is a directory, create the directory. * */ if (entry.isDirectory()) { File f = new File(dest + File.separator + entry.getName()); f.mkdirs(); } /** * If the entry is a file,write the decompressed file to the disk and close destination * stream. */ else { int count; FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName()); BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER); while ((count = tarIn.read(data, 0, BUFFER)) != -1) { destStream.write(data, 0, count); } destStream.flush(); ; IOUtils.closeQuietly(destStream); } } /** Close the input stream * */ tarIn.close(); } else if (file.endsWith(".gz")) { GZIPInputStream is2 = new GZIPInputStream(fin); File extracted = new File(target.getParent(), target.getName().replace(".gz", "")); if (extracted.exists()) extracted.delete(); extracted.createNewFile(); OutputStream fos = FileUtils.openOutputStream(extracted); IOUtils.copyLarge(is2, fos); is2.close(); fos.flush(); fos.close(); } target.delete(); }