@Override public void unarchive(InputStream source, File target) { try (TarArchiveInputStream tIn = new TarArchiveInputStream(source)) { TarArchiveEntry entry = tIn.getNextTarEntry(); while (entry != null) { if (entry.isDirectory()) { entry = tIn.getNextTarEntry(); continue; } File curfile = new File(target, entry.getName()); File parent = curfile.getParentFile(); if (!parent.exists()) { parent.mkdirs(); } try (OutputStream out = new FileOutputStream(curfile)) { IOUtils.copy(tIn, out); entry = tIn.getNextTarEntry(); } } } catch (IOException e) { throw new IllegalStateException(e); } }
public FileTree visit(FileVisitor visitor) { if (!tarFile.exists()) { return this; } if (!tarFile.isFile()) { throw new InvalidUserDataException( String.format("Cannot expand %s as it is not a file.", this)); } AtomicBoolean stopFlag = new AtomicBoolean(); try { FileInputStream inputStream = new FileInputStream(tarFile); try { NoCloseTarInputStream tar = new NoCloseTarInputStream(inputStream); TarArchiveEntry entry; while (!stopFlag.get() && (entry = tar.getNextTarEntry()) != null) { if (entry.isDirectory()) { visitor.visitDir(new DetailsImpl(entry, tar, stopFlag)); } else { visitor.visitFile(new DetailsImpl(entry, tar, stopFlag)); } } } finally { inputStream.close(); } } catch (Exception e) { throw new GradleException(String.format("Could not expand %s.", this), e); } return this; }
/** * Move to the next tar entry. * * @param rootDir an entry path * @return true if a next tar entry can be read, or if this entry name is a sub-folder of rootDir */ protected boolean hasNext(final String rootDir) { try { /* * if reader.available() is not equal to 0, then it means that this entry * has been loaded, but not read. */ while (reader.available() == 0 && (tarEntry = reader.getNextTarEntry()) == null) { // Next tar entry if (++inputPos >= input.length) { reader.close(); return false; } // Next archive file reader.close(); logger.info("Reading dump: {}", this.input[inputPos]); reader = getTarInputStream(input[inputPos]); } } catch (IOException e) { logger.error("Error while reading the input: {}\n{}", input[inputPos], e); } /* * When returning from this method, the inputstream is positionned at a regular file, * i.e., metadata, outgoing-triples.nt or incoming-triples.nt. */ if (tarEntry.isDirectory()) { return hasNext(rootDir); } return rootDir == null || tarEntry.getName().startsWith(rootDir) ? true : false; }
public static String getVersionFromArtifact(ByteBuffer artifact) throws IOException { String versionNumber = null; try (CompressorInputStream uncompressedInput = new GzipCompressorInputStream(new ByteArrayInputStream(artifact.array()))) { ArchiveInputStream input = new TarArchiveInputStream(uncompressedInput); TarArchiveEntry entry; while ((entry = (TarArchiveEntry) input.getNextEntry()) != null) { if (!entry.isDirectory() && entry.getName().endsWith(VERSION_FILE)) { versionNumber = IOUtils.toString(input, Charsets.UTF_8).trim(); break; } } } return versionNumber; }
public TarArchiveEntry map(final TarArchiveEntry entry) { final String name = entry.getName(); final TarArchiveEntry newEntry = new TarArchiveEntry(prefix + '/' + Utils.stripPath(strip, name), true); // Set ownership if (uid > -1) { newEntry.setUserId(uid); } else { newEntry.setUserId(entry.getUserId()); } if (gid > -1) { newEntry.setGroupId(gid); } else { newEntry.setGroupId(entry.getGroupId()); } if (user != null) { newEntry.setUserName(user); } else { newEntry.setUserName(entry.getUserName()); } if (group != null) { newEntry.setGroupName(group); } else { newEntry.setGroupName(entry.getGroupName()); } // Set permissions if (newEntry.isDirectory()) { if (dirMode > -1) { newEntry.setMode(dirMode); } else { newEntry.setMode(entry.getMode()); } } else { if (fileMode > -1) { newEntry.setMode(fileMode); } else { newEntry.setMode(entry.getMode()); } } newEntry.setSize(entry.getSize()); return newEntry; }
/** * Unpacks a workflow into specified directory. To be more precise: workflow package should * contain only a single directory with the workflow. This method will extract its contents into * target directory. * * @param target new workflow root directory */ public void extract(File target) throws IOException { BufferedInputStream in = new BufferedInputStream(new FileInputStream(file)); GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn); TarArchiveEntry entry; while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) { // The archive should contain only a single directory: we want to unpack its content. final File outputFile = new File(target, entry.getName().substring(entry.getName().indexOf('/') + 1)); if (entry.isDirectory()) { FileUtils.forceMkdir(outputFile); } else { OutputStream outputFileStream = null; try { outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarIn, outputFileStream); } finally { IOUtils.closeQuietly(outputFileStream); } } } }
/** * Unpack the content read from <i>source</i> into <i>targetFolder</i>. If the * <i>skipTopFolder</i> is set, then don't assume that the archive contains one single folder and * unpack the content of that folder, not including the folder itself. * * @param source The input source. Must be in <i>TAR</i> format. * @param targetFolder The destination folder for the unpack. Not used when a <tt>fileCatcher</tt> * is provided * @param skipTopFolder Set to <code>true</code> to unpack beneath the top folder of the archive. * The archive must consist of one single folder and nothing else in order for this to work. * @param fileCatcher Used when specific files should be picked from the archive without writing * them to disk. Can be <tt>null</tt>. * @throws IOException */ public static void unpack( InputStream source, File targetFolder, boolean skipTopFolder, FileCatcher fileCatcher) throws IOException { String topFolderName = null; Map<File, Map<Integer, List<String>>> chmodMap = new HashMap<File, Map<Integer, List<String>>>(); TarArchiveInputStream in = new TarArchiveInputStream(source); try { TarArchiveEntry te = in.getNextTarEntry(); if (te == null) { throw new IOException("No entry in the tar file"); } do { if (te.isGlobalPaxHeader()) continue; String name = te.getName(); if (name.startsWith("./._")) // MacOS specific extended attributes addition. Just skip it continue; if (skipTopFolder) { int firstSlash = name.indexOf('/'); if (firstSlash < 0) throw new IOException("Archive doesn't contain one single folder"); String tfName = name.substring(0, firstSlash); if (topFolderName == null) topFolderName = tfName; else if (!tfName.equals(topFolderName)) throw new IOException("Archive doesn't contain one single folder"); name = name.substring(firstSlash + 1); } if (name.length() == 0) continue; String linkName = te.getLinkName(); if (linkName != null) { if (linkName.trim().equals("")) linkName = null; } if (fileCatcher != null) { if (linkName == null && !te.isDirectory() && fileCatcher.accept(name)) { if (fileCatcher.catchData(name, in)) // We're done here return; } continue; } File outFile = new File(targetFolder, name); if (linkName != null) { if (!OsUtil.link(targetFolder, name, te.getLinkName())) throw new IOException( "Archive contains links but they are not supported on this platform"); } else { if (te.isDirectory()) { outFile.mkdirs(); } else { outFile.getParentFile().mkdirs(); OutputStream target = new FileOutputStream(outFile); StreamUtil.copy(in, target); target.close(); outFile.setLastModified(te.getModTime().getTime()); } registerChmodFile(chmodMap, targetFolder, Integer.valueOf(te.getMode()), name); } } while ((te = in.getNextTarEntry()) != null); } finally { StreamUtil.close(in); } chmod(chmodMap); }
/** * Untar an input file into an output file. * * <p>The output file is created in the output folder, having the same name as the input file, * minus the '.tar' extension. * * @param archiveFile input TAR file * @param lastModified when collection has been last modified * @throws FileNotFoundException * @throws ArchiveException */ private static List<FileWithMeta> performUnTar( File archiveFile, IonConfig config, Collection collection, String lastModified, Context context) throws FileNotFoundException, IOException, ArchiveException { File collectionFolder = FilePaths.getCollectionFolderPath(config, context); File collectionFolderTemp = FilePaths.getTempFilePath(collectionFolder); final List<FileWithMeta> untaredFiles = new LinkedList<>(); InputStream is = null; TarArchiveInputStream debInputStream = null; try { IonLog.d( TAG, String.format( "Untaring %s to dir %s.", archiveFile.getPath(), collectionFolder.getPath())); is = new FileInputStream(archiveFile); debInputStream = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream("tar", is); TarArchiveEntry entry; List<ArchiveIndex> index = null; boolean indexHasBeenRead = false; while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) { if (!indexHasBeenRead) { // get index.json InputStreamReader inputStreamReader = new InputStreamReader(debInputStream, "UTF-8"); index = Arrays.asList( GsonHolder.getInstance().fromJson(inputStreamReader, ArchiveIndex[].class)); indexHasBeenRead = true; continue; } // write the "content" files if (!entry.isDirectory()) { String archiveFileName = entry.getName(); ArchiveIndex fileInfo = ArchiveIndex.getByName(archiveFileName, index); if (fileInfo == null) { IonLog.w( TAG, "Skipping " + entry.getName() + " because it was not found in index.json."); continue; } IonLog.i(TAG, fileInfo.url); FileWithMeta fileWithMeta = getFilePath(fileInfo, collectionFolderTemp, config, context); File targetFile = fileWithMeta.file; FileUtils.createDir(targetFile.getParentFile()); targetFile = FileUtils.writeToFile(debInputStream, targetFile); if (targetFile != null) { untaredFiles.add(fileWithMeta); } } } } finally { // finished reading TAR archive if (is != null) { is.close(); } if (debInputStream != null) { debInputStream.close(); } if (archiveFile != null && archiveFile.exists()) { archiveFile.delete(); } } // if lastModified date was not passed, look if cache index entry exists for collection and // retrieve it from there if (collection != null && lastModified == null) { CollectionCacheIndex collectionCacheIndex = CollectionCacheIndex.retrieve(config, context); lastModified = collectionCacheIndex == null ? null : collectionCacheIndex.getLastModified(); IonLog.d(TAG, "Restoring last_modified from cache index: " + lastModified); } // delete old cache index entries of the collection in shared preferences and in memory cache CacheIndexStore.clearCollection(config, context); MemoryCache.clear(); // replace collection folder (containing json files) - deletes old file cache boolean jsonWriteSuccess = FileUtils.move(collectionFolderTemp, collectionFolder, true); if (!jsonWriteSuccess) { throw new IOException("JSON files could not be moved to final path."); } // replace media files in collection File mediaFolderTemp = FilePaths.getMediaFolderPath(config, context, true); File mediaFolder = FilePaths.getMediaFolderPath(config, context, false); if (mediaFolderTemp.exists()) { boolean mediaWriteSuccess = FileUtils.move(mediaFolderTemp, mediaFolder, true); if (!mediaWriteSuccess) { throw new IOException("Media files could not be moved to final path."); } } else { IonLog.w(TAG, "No media files were contained in archive."); } // add collection to file cache again if (collection != null) { MemoryCache.saveCollection(collection, config, context); try { saveCollectionToFileCache(config, collection, context); CollectionCacheIndex.save(config, context, lastModified); } catch (IOException e) { IonLog.e("ION Archive", "Collection could not be saved."); IonLog.ex(e); } } // cache index entries are not written yet at this point return untaredFiles; }
public boolean isDirectory() { return entry.isDirectory(); }
/** * Extracts files to the specified destination * * @param file the file to extract to * @param dest the destination directory * @throws IOException */ public static void unzipFileTo(String file, String dest) throws IOException { File target = new File(file); if (!target.exists()) throw new IllegalArgumentException("Archive doesnt exist"); FileInputStream fin = new FileInputStream(target); int BUFFER = 2048; byte data[] = new byte[BUFFER]; if (file.endsWith(".zip")) { // get the zip file content ZipInputStream zis = new ZipInputStream(fin); // get the zipped file list entry ZipEntry ze = zis.getNextEntry(); while (ze != null) { String fileName = ze.getName(); File newFile = new File(dest + File.separator + fileName); log.info("file unzip : " + newFile.getAbsoluteFile()); // create all non exists folders // else you will hit FileNotFoundException for compressed folder new File(newFile.getParent()).mkdirs(); FileOutputStream fos = new FileOutputStream(newFile); int len; while ((len = zis.read(data)) > 0) { fos.write(data, 0, len); } fos.close(); ze = zis.getNextEntry(); } zis.closeEntry(); zis.close(); } else if (file.endsWith(".tar.gz") || file.endsWith(".tgz")) { BufferedInputStream in = new BufferedInputStream(fin); GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn); TarArchiveEntry entry = null; /** Read the tar entries using the getNextEntry method * */ while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) { log.info("Extracting: " + entry.getName()); /** If the entry is a directory, create the directory. * */ if (entry.isDirectory()) { File f = new File(dest + File.separator + entry.getName()); f.mkdirs(); } /** * If the entry is a file,write the decompressed file to the disk and close destination * stream. */ else { int count; FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName()); BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER); while ((count = tarIn.read(data, 0, BUFFER)) != -1) { destStream.write(data, 0, count); } destStream.flush(); ; IOUtils.closeQuietly(destStream); } } /** Close the input stream * */ tarIn.close(); } else if (file.endsWith(".gz")) { GZIPInputStream is2 = new GZIPInputStream(fin); File extracted = new File(target.getParent(), target.getName().replace(".gz", "")); if (extracted.exists()) extracted.delete(); extracted.createNewFile(); OutputStream fos = FileUtils.openOutputStream(extracted); IOUtils.copyLarge(is2, fos); is2.close(); fos.flush(); fos.close(); } target.delete(); }