public FileTree visit(FileVisitor visitor) { if (!tarFile.exists()) { return this; } if (!tarFile.isFile()) { throw new InvalidUserDataException( String.format("Cannot expand %s as it is not a file.", this)); } AtomicBoolean stopFlag = new AtomicBoolean(); try { FileInputStream inputStream = new FileInputStream(tarFile); try { NoCloseTarInputStream tar = new NoCloseTarInputStream(inputStream); TarArchiveEntry entry; while (!stopFlag.get() && (entry = tar.getNextTarEntry()) != null) { if (entry.isDirectory()) { visitor.visitDir(new DetailsImpl(entry, tar, stopFlag)); } else { visitor.visitFile(new DetailsImpl(entry, tar, stopFlag)); } } } finally { inputStream.close(); } } catch (Exception e) { throw new GradleException(String.format("Could not expand %s.", this), e); } return this; }
@Override public void unarchive(InputStream source, File target) { try (TarArchiveInputStream tIn = new TarArchiveInputStream(source)) { TarArchiveEntry entry = tIn.getNextTarEntry(); while (entry != null) { if (entry.isDirectory()) { entry = tIn.getNextTarEntry(); continue; } File curfile = new File(target, entry.getName()); File parent = curfile.getParentFile(); if (!parent.exists()) { parent.mkdirs(); } try (OutputStream out = new FileOutputStream(curfile)) { IOUtils.copy(tIn, out); entry = tIn.getNextTarEntry(); } } } catch (IOException e) { throw new IllegalStateException(e); } }
/** * Move to the next tar entry. * * @param rootDir an entry path * @return true if a next tar entry can be read, or if this entry name is a sub-folder of rootDir */ protected boolean hasNext(final String rootDir) { try { /* * if reader.available() is not equal to 0, then it means that this entry * has been loaded, but not read. */ while (reader.available() == 0 && (tarEntry = reader.getNextTarEntry()) == null) { // Next tar entry if (++inputPos >= input.length) { reader.close(); return false; } // Next archive file reader.close(); logger.info("Reading dump: {}", this.input[inputPos]); reader = getTarInputStream(input[inputPos]); } } catch (IOException e) { logger.error("Error while reading the input: {}\n{}", input[inputPos], e); } /* * When returning from this method, the inputstream is positionned at a regular file, * i.e., metadata, outgoing-triples.nt or incoming-triples.nt. */ if (tarEntry.isDirectory()) { return hasNext(rootDir); } return rootDir == null || tarEntry.getName().startsWith(rootDir) ? true : false; }
public static String getVersionFromArtifact(ByteBuffer artifact) throws IOException { String versionNumber = null; try (CompressorInputStream uncompressedInput = new GzipCompressorInputStream(new ByteArrayInputStream(artifact.array()))) { ArchiveInputStream input = new TarArchiveInputStream(uncompressedInput); TarArchiveEntry entry; while ((entry = (TarArchiveEntry) input.getNextEntry()) != null) { if (!entry.isDirectory() && entry.getName().endsWith(VERSION_FILE)) { versionNumber = IOUtils.toString(input, Charsets.UTF_8).trim(); break; } } } return versionNumber; }
public File getFile() { if (file == null) { file = new File(tmpDir, entry.getName()); copyTo(file); } return file; }
public static List<File> untar(final File inputtar, final File outputDir) throws IOException, ArchiveException { final List<File> unpackedFiles = new ArrayList<File>(); if (OMWPP.DEBUG) { Log.i("OMWPPtar", String.format("Unzipping tar file %s.", inputtar.getAbsoluteFile())); } final InputStream is = new FileInputStream(inputtar); final TarArchiveInputStream tarInputStream = (TarArchiveInputStream) new TarArchiveInputStream(is); TarArchiveEntry entry = null; while ((entry = (TarArchiveEntry) tarInputStream.getNextEntry()) != null) { if (OMWPP.DEBUG) Log.i("OMWPPtar", "Read entry: " + entry.getName()); String filename = entry.getName().substring(entry.getName().lastIndexOf("/") + 1); if (filename.toLowerCase().endsWith(".png") || filename.toLowerCase().endsWith(".jpg")) { if (OMWPP.DEBUG) Log.i("OMWPPtar", "Is background, extracting."); final File outputFile = new File(outputDir, filename); final OutputStream outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarInputStream, outputFileStream); outputFileStream.close(); unpackedFiles.add(outputFile); } else if (filename.toLowerCase().endsWith(".gz")) { // RECURSIVE CALL final File outputFile = new File(outputDir, filename); final OutputStream outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarInputStream, outputFileStream); outputFileStream.close(); gunzip(outputFile, SDROOT); outputFile.delete(); } else if (filename.toLowerCase().endsWith(".tar")) { // RECURSIVE CALL final File outputFile = new File(outputDir, filename); final OutputStream outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarInputStream, outputFileStream); outputFileStream.close(); untar(outputFile, SDROOT); outputFile.delete(); } else { if (OMWPP.DEBUG) Log.i("OMWPPtar", "Is not background, skipping."); } } tarInputStream.close(); return unpackedFiles; }
/* * Given a zip stream, unzips it and returns an input stream to the desired data file. * * @param importDataRecord ImportDataRecord * @param is InputStream * @return InputStream */ private InputStream readContent(ImportDataRecord importDataRecord, InputStream is) throws Exception { InputStream toReturn = null; try { // decompress .gz file if (LOG.isInfoEnabled()) { LOG.info("readContent(), decompressing: " + importDataRecord.getCanonicalPathToData()); } InputStream unzippedContent = new GzipCompressorInputStream(is); // if tarball, untar if (importDataRecord.getCanonicalPathToData().toLowerCase().endsWith("tar.gz")) { if (LOG.isInfoEnabled()) { LOG.info("readContent(), gzip file is a tarball, untarring"); } TarArchiveInputStream tis = new TarArchiveInputStream(unzippedContent); TarArchiveEntry entry = null; while ((entry = tis.getNextTarEntry()) != null) { String entryName = entry.getName(); String dataFile = importDataRecord.getDataFilename(); if (dataFile.contains(DatatypeMetadata.TUMOR_TYPE_TAG)) { dataFile = dataFile.replaceAll( DatatypeMetadata.TUMOR_TYPE_TAG, importDataRecord.getTumorType().toUpperCase()); } if (entryName.contains(dataFile)) { if (LOG.isInfoEnabled()) { LOG.info("Processing tar-archive: " + importDataRecord.getDataFilename()); } toReturn = tis; break; } } } else { toReturn = unzippedContent; } } catch (Exception e) { throw e; } // outta here return toReturn; }
private static void append( File file, FileFilter filter, int baseNameLen, String addedTopFolder, TarArchiveOutputStream tarOut) throws IOException { String name = file.getAbsolutePath(); if (name.length() <= baseNameLen) name = ""; else name = name.substring(baseNameLen); if (File.separatorChar == '\\') name = name.replace('\\', '/'); if (addedTopFolder != null) name = addedTopFolder + '/' + name; if (FileUtils.isSymlink(file)) { String linkTarget = FileUtils.readSymbolicLink(file); if (linkTarget != null) { TarArchiveEntry entry = new TarArchiveEntry(name, TarConstants.LF_SYMLINK); entry.setName(name); entry.setLinkName(linkTarget); tarOut.putArchiveEntry(entry); } return; } ArchiveEntry entry = tarOut.createArchiveEntry(file, name); tarOut.putArchiveEntry(entry); File[] children = file.listFiles(filter); if (children != null) { tarOut.closeArchiveEntry(); // This is a directory. Append its children for (File child : children) append(child, filter, baseNameLen, addedTopFolder, tarOut); return; } // Append the content of the file InputStream input = new FileInputStream(file); try { StreamUtil.copy(input, tarOut); tarOut.closeArchiveEntry(); } finally { StreamUtil.close(input); } }
private static void addControlEntry( final String pName, final String pContent, final TarArchiveOutputStream pOutput) throws IOException { final byte[] data = pContent.getBytes("UTF-8"); final TarArchiveEntry entry = new TarArchiveEntry("./" + pName, true); entry.setSize(data.length); entry.setNames("root", "root"); if (MAINTAINER_SCRIPTS.contains(pName)) { entry.setMode(PermMapper.toMode("755")); } else { entry.setMode(PermMapper.toMode("644")); } pOutput.putArchiveEntry(entry); pOutput.write(data); pOutput.closeArchiveEntry(); }
@Test public void downloadDroplet() { this.applicationId .then(this::uploadAndStartApplication) .flatMap( applicationId -> this.cloudFoundryClient .applicationsV2() .downloadDroplet( DownloadApplicationDropletRequest.builder() .applicationId(applicationId) .build())) .as(Stream::from) .reduceWith(ByteArrayOutputStream::new, ApplicationsTest::collectIntoByteArrayInputStream) .map( bytes -> { boolean staticFile = false; boolean indexFile = false; try { TarArchiveInputStream tis = new TarArchiveInputStream( new GZIPInputStream(new ByteArrayInputStream(bytes.toByteArray()))); for (TarArchiveEntry entry = tis.getNextTarEntry(); entry != null; entry = tis.getNextTarEntry()) { if (entry.getName().contains("Staticfile")) { staticFile = true; } if (entry.getName().contains("index.html")) { indexFile = true; } } } catch (IOException e) { throw new RuntimeException(e); } return staticFile && indexFile; }) .subscribe(testSubscriber().assertEquals(true)); }
/** Test case for PLXCOMP-220. */ public void testInvalidUidGid() { final TarArchiveEntry writtenEntry = new TarArchiveEntry("test.java"); writtenEntry.setUserId(-1); writtenEntry.setGroupId(-1); final byte[] buffer = new byte[defaultRcdsize()]; writtenEntry.writeEntryHeader(buffer); final TarArchiveEntry readEntry = new TarArchiveEntry(buffer); assertEquals(0, readEntry.getUserId()); assertEquals(0, readEntry.getGroupId()); }
/** * Unpacks a workflow into specified directory. To be more precise: workflow package should * contain only a single directory with the workflow. This method will extract its contents into * target directory. * * @param target new workflow root directory */ public void extract(File target) throws IOException { BufferedInputStream in = new BufferedInputStream(new FileInputStream(file)); GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn); TarArchiveEntry entry; while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) { // The archive should contain only a single directory: we want to unpack its content. final File outputFile = new File(target, entry.getName().substring(entry.getName().indexOf('/') + 1)); if (entry.isDirectory()) { FileUtils.forceMkdir(outputFile); } else { OutputStream outputFileStream = null; try { outputFileStream = new FileOutputStream(outputFile); IOUtils.copy(tarIn, outputFileStream); } finally { IOUtils.closeQuietly(outputFileStream); } } } }
void createArchive(Path projectPath, Path output, Config overrideParams) throws IOException { out.println("Creating " + output + "..."); ProjectArchive project = projectLoader.load(projectPath, WorkflowResourceMatcher.defaultMatcher(), overrideParams); ArchiveMetadata meta = project.getArchiveMetadata(); try (TarArchiveOutputStream tar = new TarArchiveOutputStream(new GzipCompressorOutputStream(Files.newOutputStream(output)))) { // default mode for file names longer than 100 bytes is throwing an exception (LONGFILE_ERROR) tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX); project.listFiles( (resourceName, absPath) -> { if (!Files.isDirectory(absPath)) { out.println(" Archiving " + resourceName); TarArchiveEntry e = buildTarArchiveEntry(project, absPath, resourceName); tar.putArchiveEntry(e); if (e.isSymbolicLink()) { out.println(" symlink -> " + e.getLinkName()); } else { try (InputStream in = Files.newInputStream(absPath)) { ByteStreams.copy(in, tar); } tar.closeArchiveEntry(); } } }); // create .digdag.dig // TODO set default time zone if not set? byte[] metaBody = yamlMapper.toYaml(meta).getBytes(StandardCharsets.UTF_8); TarArchiveEntry metaEntry = new TarArchiveEntry(ArchiveMetadata.FILE_NAME); metaEntry.setSize(metaBody.length); metaEntry.setModTime(new Date()); tar.putArchiveEntry(metaEntry); tar.write(metaBody); tar.closeArchiveEntry(); } out.println("Workflows:"); for (WorkflowDefinition workflow : meta.getWorkflowList().get()) { out.println(" " + workflow.getName()); } out.println(""); }
public long getLastModified() { return entry.getModTime().getTime(); }
public boolean isDirectory() { return entry.isDirectory(); }
public TarArchiveEntry map(final TarArchiveEntry entry) { final String name = entry.getName(); final TarArchiveEntry newEntry = new TarArchiveEntry(prefix + '/' + Utils.stripPath(strip, name), true); // Set ownership if (uid > -1) { newEntry.setUserId(uid); } else { newEntry.setUserId(entry.getUserId()); } if (gid > -1) { newEntry.setGroupId(gid); } else { newEntry.setGroupId(entry.getGroupId()); } if (user != null) { newEntry.setUserName(user); } else { newEntry.setUserName(entry.getUserName()); } if (group != null) { newEntry.setGroupName(group); } else { newEntry.setGroupName(entry.getGroupName()); } // Set permissions if (newEntry.isDirectory()) { if (dirMode > -1) { newEntry.setMode(dirMode); } else { newEntry.setMode(entry.getMode()); } } else { if (fileMode > -1) { newEntry.setMode(fileMode); } else { newEntry.setMode(entry.getMode()); } } newEntry.setSize(entry.getSize()); return newEntry; }
public void execute() { if (repoDir == null) { log("repoDir attribute is empty !", LogLevel.ERR.getLevel()); throw new RuntimeException("Bad attributes for apt-repo task"); } log("repo dir: " + repoDir); File repoFolder = new File(repoDir); if (!repoFolder.exists()) { repoFolder.mkdirs(); } File[] files = repoFolder.listFiles( new FileFilter() { public boolean accept(File pathname) { if (pathname.getName().endsWith(FILE_DEB_EXT)) { return true; } return false; } }); Packages packages = new Packages(); for (int i = 0; i < files.length; i++) { File file = files[i]; PackageEntry packageEntry = new PackageEntry(); packageEntry.setSize(file.length()); packageEntry.setSha1(Utils.getDigest("SHA-1", file)); packageEntry.setSha256(Utils.getDigest("SHA-256", file)); packageEntry.setMd5sum(Utils.getDigest("MD5", file)); String fileName = file.getName(); packageEntry.setFilename(fileName); log("found deb: " + fileName); try { ArchiveInputStream control_tgz; ArArchiveEntry entry; TarArchiveEntry control_entry; ArchiveInputStream debStream = new ArchiveStreamFactory().createArchiveInputStream("ar", new FileInputStream(file)); while ((entry = (ArArchiveEntry) debStream.getNextEntry()) != null) { if (entry.getName().equals("control.tar.gz")) { ControlHandler controlHandler = new ControlHandler(); GZIPInputStream gzipInputStream = new GZIPInputStream(debStream); control_tgz = new ArchiveStreamFactory().createArchiveInputStream("tar", gzipInputStream); while ((control_entry = (TarArchiveEntry) control_tgz.getNextEntry()) != null) { log("control entry: " + control_entry.getName(), LogLevel.DEBUG.getLevel()); if (control_entry.getName().trim().equals(CONTROL_FILE_NAME)) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); IOUtils.copy(control_tgz, outputStream); String content_string = outputStream.toString("UTF-8"); outputStream.close(); controlHandler.setControlContent(content_string); log("control cont: " + outputStream.toString("utf-8"), LogLevel.DEBUG.getLevel()); break; } } control_tgz.close(); if (controlHandler.hasControlContent()) { controlHandler.handle(packageEntry); } else { throw new RuntimeException("no control content found for: " + file.getName()); } break; } } debStream.close(); packages.addPackageEntry(packageEntry); } catch (Exception e) { String msg = FAILED_TO_CREATE_APT_REPO + " " + file.getName(); log(msg, e, LogLevel.ERR.getLevel()); throw new RuntimeException(msg, e); } } try { File packagesFile = new File(repoDir, PACKAGES_GZ); packagesWriter = new BufferedWriter( new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(packagesFile)))); packagesWriter.write(packages.toString()); DefaultHashes hashes = Utils.getDefaultDigests(packagesFile); ReleaseInfo pinfo = new ReleaseInfo(PACKAGES_GZ, packagesFile.length(), hashes); Release release = new Release(); release.addInfo(pinfo); final File releaseFile = new File(repoDir, RELEASE); FileUtils.fileWrite(releaseFile, release.toString()); } catch (IOException e) { throw new RuntimeException("writing files failed", e); } finally { if (packagesWriter != null) { try { packagesWriter.close(); } catch (IOException e) { throw new RuntimeException("writing files failed", e); } } } }
/** * Unpack the content read from <i>source</i> into <i>targetFolder</i>. If the * <i>skipTopFolder</i> is set, then don't assume that the archive contains one single folder and * unpack the content of that folder, not including the folder itself. * * @param source The input source. Must be in <i>TAR</i> format. * @param targetFolder The destination folder for the unpack. Not used when a <tt>fileCatcher</tt> * is provided * @param skipTopFolder Set to <code>true</code> to unpack beneath the top folder of the archive. * The archive must consist of one single folder and nothing else in order for this to work. * @param fileCatcher Used when specific files should be picked from the archive without writing * them to disk. Can be <tt>null</tt>. * @throws IOException */ public static void unpack( InputStream source, File targetFolder, boolean skipTopFolder, FileCatcher fileCatcher) throws IOException { String topFolderName = null; Map<File, Map<Integer, List<String>>> chmodMap = new HashMap<File, Map<Integer, List<String>>>(); TarArchiveInputStream in = new TarArchiveInputStream(source); try { TarArchiveEntry te = in.getNextTarEntry(); if (te == null) { throw new IOException("No entry in the tar file"); } do { if (te.isGlobalPaxHeader()) continue; String name = te.getName(); if (name.startsWith("./._")) // MacOS specific extended attributes addition. Just skip it continue; if (skipTopFolder) { int firstSlash = name.indexOf('/'); if (firstSlash < 0) throw new IOException("Archive doesn't contain one single folder"); String tfName = name.substring(0, firstSlash); if (topFolderName == null) topFolderName = tfName; else if (!tfName.equals(topFolderName)) throw new IOException("Archive doesn't contain one single folder"); name = name.substring(firstSlash + 1); } if (name.length() == 0) continue; String linkName = te.getLinkName(); if (linkName != null) { if (linkName.trim().equals("")) linkName = null; } if (fileCatcher != null) { if (linkName == null && !te.isDirectory() && fileCatcher.accept(name)) { if (fileCatcher.catchData(name, in)) // We're done here return; } continue; } File outFile = new File(targetFolder, name); if (linkName != null) { if (!OsUtil.link(targetFolder, name, te.getLinkName())) throw new IOException( "Archive contains links but they are not supported on this platform"); } else { if (te.isDirectory()) { outFile.mkdirs(); } else { outFile.getParentFile().mkdirs(); OutputStream target = new FileOutputStream(outFile); StreamUtil.copy(in, target); target.close(); outFile.setLastModified(te.getModTime().getTime()); } registerChmodFile(chmodMap, targetFolder, Integer.valueOf(te.getMode()), name); } } while ((te = in.getNextTarEntry()) != null); } finally { StreamUtil.close(in); } chmod(chmodMap); }
/** * tar a file * * @param entry the file to tar * @param tOut the output stream * @param vPath the path name of the file to tar * @throws IOException on error */ protected void tarFile(ArchiveEntry entry, TarArchiveOutputStream tOut, String vPath) throws ArchiverException, IOException { // don't add "" to the archive if (vPath.length() <= 0) { return; } if (entry.getResource().isDirectory() && !vPath.endsWith("/")) { vPath += "/"; } if (vPath.startsWith("/") && !options.getPreserveLeadingSlashes()) { int l = vPath.length(); if (l <= 1) { // we would end up adding "" to the archive return; } vPath = vPath.substring(1, l); } int pathLength = vPath.length(); InputStream fIn = null; try { TarArchiveEntry te; if (!longFileMode.isGnuMode() && pathLength >= org.apache.commons.compress.archivers.tar.TarConstants.NAMELEN) { int maxPosixPathLen = org.apache.commons.compress.archivers.tar.TarConstants.NAMELEN + org.apache.commons.compress.archivers.tar.TarConstants.PREFIXLEN; if (longFileMode.isPosixMode()) { } else if (longFileMode.isPosixWarnMode()) { if (pathLength > maxPosixPathLen) { getLogger() .warn("Entry: " + vPath + " longer than " + maxPosixPathLen + " characters."); if (!longWarningGiven) { getLogger() .warn( "Resulting tar file can only be processed " + "successfully by GNU compatible tar commands"); longWarningGiven = true; } } } else if (longFileMode.isOmitMode()) { getLogger().info("Omitting: " + vPath); return; } else if (longFileMode.isWarnMode()) { getLogger() .warn( "Entry: " + vPath + " longer than " + org.apache.commons.compress.archivers.tar.TarConstants.NAMELEN + " characters."); if (!longWarningGiven) { getLogger() .warn( "Resulting tar file can only be processed " + "successfully by GNU compatible tar commands"); longWarningGiven = true; } } else if (longFileMode.isFailMode()) { throw new ArchiverException( "Entry: " + vPath + " longer than " + org.apache.commons.compress.archivers.tar.TarConstants.NAMELEN + " characters."); } else { throw new IllegalStateException("Non gnu mode should never get here?"); } } if (entry.getType() == ArchiveEntry.SYMLINK) { final SymlinkDestinationSupplier plexusIoSymlinkResource = (SymlinkDestinationSupplier) entry.getResource(); te = new TarArchiveEntry(vPath, TarArchiveEntry.LF_SYMLINK); te.setLinkName(plexusIoSymlinkResource.getSymlinkDestination()); } else { te = new TarArchiveEntry(vPath); } long teLastModified = entry.getResource().getLastModified(); te.setModTime( teLastModified == PlexusIoResource.UNKNOWN_MODIFICATION_DATE ? System.currentTimeMillis() : teLastModified); if (entry.getType() == ArchiveEntry.SYMLINK) { te.setSize(0); } else if (!entry.getResource().isDirectory()) { final long size = entry.getResource().getSize(); te.setSize(size == PlexusIoResource.UNKNOWN_RESOURCE_SIZE ? 0 : size); } te.setMode(entry.getMode()); PlexusIoResourceAttributes attributes = entry.getResourceAttributes(); te.setUserName( (attributes != null && attributes.getUserName() != null) ? attributes.getUserName() : options.getUserName()); te.setGroupName( (attributes != null && attributes.getGroupName() != null) ? attributes.getGroupName() : options.getGroup()); final int userId = (attributes != null && attributes.getUserId() != null) ? attributes.getUserId() : options.getUid(); if (userId >= 0) { te.setUserId(userId); } final int groupId = (attributes != null && attributes.getGroupId() != null) ? attributes.getGroupId() : options.getGid(); if (groupId >= 0) { te.setGroupId(groupId); } tOut.putArchiveEntry(te); try { if (entry.getResource().isFile() && !(entry.getType() == ArchiveEntry.SYMLINK)) { fIn = entry.getInputStream(); Streams.copyFullyDontCloseOutput(fIn, tOut, "xAR"); } } catch (Throwable e) { getLogger().warn("When creating tar entry", e); } finally { tOut.closeArchiveEntry(); } } finally { IOUtil.close(fIn); } }
public RelativePath getRelativePath() { return new RelativePath(true, entry.getName().split("/")); }
public void produce(final DataConsumer pReceiver) throws IOException { String user = Producers.ROOT_NAME; int uid = Producers.ROOT_UID; String group = Producers.ROOT_NAME; int gid = Producers.ROOT_UID; int filemode = TarEntry.DEFAULT_FILE_MODE; int dirmode = TarEntry.DEFAULT_DIR_MODE; String prefix = ""; if (fileset instanceof Tar.TarFileSet) { Tar.TarFileSet tarfileset = (Tar.TarFileSet) fileset; user = tarfileset.getUserName(); uid = tarfileset.getUid(); group = tarfileset.getGroup(); gid = tarfileset.getGid(); filemode = tarfileset.getMode(); dirmode = tarfileset.getDirMode(tarfileset.getProject()); prefix = tarfileset.getPrefix(tarfileset.getProject()); } final DirectoryScanner scanner = fileset.getDirectoryScanner(fileset.getProject()); scanner.scan(); final File basedir = scanner.getBasedir(); for (String directory : scanner.getIncludedDirectories()) { String name = directory.replace('\\', '/'); final TarArchiveEntry entry = new TarArchiveEntry(prefix + "/" + name); entry.setUserName(user); entry.setUserId(uid); entry.setGroupName(group); entry.setGroupId(gid); entry.setMode(dirmode); pReceiver.onEachDir(entry); } for (String filename : scanner.getIncludedFiles()) { final String name = filename.replace('\\', '/'); final File file = new File(basedir, name); final InputStream inputStream = new FileInputStream(file); try { final String entryName = prefix + "/" + name; final File entryPath = new File(entryName); final boolean symbolicLink = SymlinkUtils.isSymbolicLink(entryPath); final TarArchiveEntry e; if (symbolicLink) { e = new TarArchiveEntry(entryName, TarConstants.LF_SYMLINK); e.setLinkName(SymlinkUtils.readSymbolicLink(entryPath)); } else { e = new TarArchiveEntry(entryName, true); } e.setUserId(uid); e.setGroupId(gid); e.setUserName(user); e.setGroupName(group); e.setMode(filemode); e.setSize(file.length()); pReceiver.onEachFile(inputStream, e); } finally { inputStream.close(); } } }
private TarArchiveEntry buildTarArchiveEntry(ProjectArchive project, Path absPath, String name) throws IOException { TarArchiveEntry e; if (Files.isSymbolicLink(absPath)) { e = new TarArchiveEntry(name, TarConstants.LF_SYMLINK); Path rawDest = Files.readSymbolicLink(absPath); Path normalizedAbsDest = absPath.getParent().resolve(rawDest).normalize(); try { project.pathToResourceName(normalizedAbsDest); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Invalid symbolic link: " + ex.getMessage()); } // absolute path will be invalid on a server. convert it to a relative path Path normalizedRelativeDest = absPath.getParent().relativize(normalizedAbsDest); String linkName = normalizedRelativeDest.toString(); // TarArchiveEntry(File) does this normalization but setLinkName doesn't. So do it here: linkName = linkName.replace(File.separatorChar, '/'); e.setLinkName(linkName); } else { e = new TarArchiveEntry(absPath.toFile(), name); try { int mode = 0; for (PosixFilePermission perm : Files.getPosixFilePermissions(absPath)) { switch (perm) { case OWNER_READ: mode |= 0400; break; case OWNER_WRITE: mode |= 0200; break; case OWNER_EXECUTE: mode |= 0100; break; case GROUP_READ: mode |= 0040; break; case GROUP_WRITE: mode |= 0020; break; case GROUP_EXECUTE: mode |= 0010; break; case OTHERS_READ: mode |= 0004; break; case OTHERS_WRITE: mode |= 0002; break; case OTHERS_EXECUTE: mode |= 0001; break; default: // ignore } } e.setMode(mode); } catch (UnsupportedOperationException ex) { // ignore custom mode } } return e; }
/** * Extracts files to the specified destination * * @param file the file to extract to * @param dest the destination directory * @throws IOException */ public static void unzipFileTo(String file, String dest) throws IOException { File target = new File(file); if (!target.exists()) throw new IllegalArgumentException("Archive doesnt exist"); FileInputStream fin = new FileInputStream(target); int BUFFER = 2048; byte data[] = new byte[BUFFER]; if (file.endsWith(".zip")) { // get the zip file content ZipInputStream zis = new ZipInputStream(fin); // get the zipped file list entry ZipEntry ze = zis.getNextEntry(); while (ze != null) { String fileName = ze.getName(); File newFile = new File(dest + File.separator + fileName); log.info("file unzip : " + newFile.getAbsoluteFile()); // create all non exists folders // else you will hit FileNotFoundException for compressed folder new File(newFile.getParent()).mkdirs(); FileOutputStream fos = new FileOutputStream(newFile); int len; while ((len = zis.read(data)) > 0) { fos.write(data, 0, len); } fos.close(); ze = zis.getNextEntry(); } zis.closeEntry(); zis.close(); } else if (file.endsWith(".tar.gz") || file.endsWith(".tgz")) { BufferedInputStream in = new BufferedInputStream(fin); GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn); TarArchiveEntry entry = null; /** Read the tar entries using the getNextEntry method * */ while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) { log.info("Extracting: " + entry.getName()); /** If the entry is a directory, create the directory. * */ if (entry.isDirectory()) { File f = new File(dest + File.separator + entry.getName()); f.mkdirs(); } /** * If the entry is a file,write the decompressed file to the disk and close destination * stream. */ else { int count; FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName()); BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER); while ((count = tarIn.read(data, 0, BUFFER)) != -1) { destStream.write(data, 0, count); } destStream.flush(); ; IOUtils.closeQuietly(destStream); } } /** Close the input stream * */ tarIn.close(); } else if (file.endsWith(".gz")) { GZIPInputStream is2 = new GZIPInputStream(fin); File extracted = new File(target.getParent(), target.getName().replace(".gz", "")); if (extracted.exists()) extracted.delete(); extracted.createNewFile(); OutputStream fos = FileUtils.openOutputStream(extracted); IOUtils.copyLarge(is2, fos); is2.close(); fos.flush(); fos.close(); } target.delete(); }
public String getDisplayName() { return String.format("tar entry %s!%s", tarFile, entry.getName()); }
/** * Untar an input file into an output file. * * <p>The output file is created in the output folder, having the same name as the input file, * minus the '.tar' extension. * * @param archiveFile input TAR file * @param lastModified when collection has been last modified * @throws FileNotFoundException * @throws ArchiveException */ private static List<FileWithMeta> performUnTar( File archiveFile, IonConfig config, Collection collection, String lastModified, Context context) throws FileNotFoundException, IOException, ArchiveException { File collectionFolder = FilePaths.getCollectionFolderPath(config, context); File collectionFolderTemp = FilePaths.getTempFilePath(collectionFolder); final List<FileWithMeta> untaredFiles = new LinkedList<>(); InputStream is = null; TarArchiveInputStream debInputStream = null; try { IonLog.d( TAG, String.format( "Untaring %s to dir %s.", archiveFile.getPath(), collectionFolder.getPath())); is = new FileInputStream(archiveFile); debInputStream = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream("tar", is); TarArchiveEntry entry; List<ArchiveIndex> index = null; boolean indexHasBeenRead = false; while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) { if (!indexHasBeenRead) { // get index.json InputStreamReader inputStreamReader = new InputStreamReader(debInputStream, "UTF-8"); index = Arrays.asList( GsonHolder.getInstance().fromJson(inputStreamReader, ArchiveIndex[].class)); indexHasBeenRead = true; continue; } // write the "content" files if (!entry.isDirectory()) { String archiveFileName = entry.getName(); ArchiveIndex fileInfo = ArchiveIndex.getByName(archiveFileName, index); if (fileInfo == null) { IonLog.w( TAG, "Skipping " + entry.getName() + " because it was not found in index.json."); continue; } IonLog.i(TAG, fileInfo.url); FileWithMeta fileWithMeta = getFilePath(fileInfo, collectionFolderTemp, config, context); File targetFile = fileWithMeta.file; FileUtils.createDir(targetFile.getParentFile()); targetFile = FileUtils.writeToFile(debInputStream, targetFile); if (targetFile != null) { untaredFiles.add(fileWithMeta); } } } } finally { // finished reading TAR archive if (is != null) { is.close(); } if (debInputStream != null) { debInputStream.close(); } if (archiveFile != null && archiveFile.exists()) { archiveFile.delete(); } } // if lastModified date was not passed, look if cache index entry exists for collection and // retrieve it from there if (collection != null && lastModified == null) { CollectionCacheIndex collectionCacheIndex = CollectionCacheIndex.retrieve(config, context); lastModified = collectionCacheIndex == null ? null : collectionCacheIndex.getLastModified(); IonLog.d(TAG, "Restoring last_modified from cache index: " + lastModified); } // delete old cache index entries of the collection in shared preferences and in memory cache CacheIndexStore.clearCollection(config, context); MemoryCache.clear(); // replace collection folder (containing json files) - deletes old file cache boolean jsonWriteSuccess = FileUtils.move(collectionFolderTemp, collectionFolder, true); if (!jsonWriteSuccess) { throw new IOException("JSON files could not be moved to final path."); } // replace media files in collection File mediaFolderTemp = FilePaths.getMediaFolderPath(config, context, true); File mediaFolder = FilePaths.getMediaFolderPath(config, context, false); if (mediaFolderTemp.exists()) { boolean mediaWriteSuccess = FileUtils.move(mediaFolderTemp, mediaFolder, true); if (!mediaWriteSuccess) { throw new IOException("Media files could not be moved to final path."); } } else { IonLog.w(TAG, "No media files were contained in archive."); } // add collection to file cache again if (collection != null) { MemoryCache.saveCollection(collection, config, context); try { saveCollectionToFileCache(config, collection, context); CollectionCacheIndex.save(config, context, lastModified); } catch (IOException e) { IonLog.e("ION Archive", "Collection could not be saved."); IonLog.ex(e); } } // cache index entries are not written yet at this point return untaredFiles; }
private void run(File f) throws IOException, ParserException { writeLogFile(logFile, "Start HDT on file " + f.getAbsolutePath(), false); long start = System.currentTimeMillis(); OutputStream os = new FileOutputStream(f.getAbsolutePath() + ".hdt.tar.bz2", false); OutputStream bzos = new BZip2CompressorOutputStream(os); TarArchiveOutputStream aos = new TarArchiveOutputStream(bzos); HDT hdt = HDTManager.generateHDT( f.getAbsolutePath(), "urn:rdfcomp", RDFNotation.parse("ntriples"), new HDTSpecification(), null); hdt.saveToHDT(tmpDir + "/" + name + "_data.hdt", null); long saveToHDT = System.currentTimeMillis() - start; File filePrefix = new File(tmpDir + "/" + name + "_data.hdt"); TarArchiveEntry entry = new TarArchiveEntry(filePrefix, "mappings.hdt"); entry.setSize(filePrefix.length()); aos.putArchiveEntry(entry); IOUtils.copy(new FileInputStream(filePrefix), aos); aos.closeArchiveEntry(); aos.finish(); aos.close(); bzos.close(); os.close(); long saveToTarBzip2 = System.currentTimeMillis() - saveToHDT; long overall = System.currentTimeMillis() - start; String log = "Original size: " + f.length() + "B = " + f.length() / 1024 + " KB" + " =" + f.length() / (1024 * 1024) + " MB"; writeLogFile(logFile, log, true); Model model = ModelLoader.getModel(file.getAbsolutePath()); long ntBzip2 = computeOrginalNTriple(model, file); log = "NT+BZIP size: " + ntBzip2 + " B= " + ntBzip2 / 1024 + " KB= " + ntBzip2 / (1024 * 1024) + " MB"; writeLogFile(logFile, log, true); log = "HDT size: " + filePrefix.length() + " B= " + filePrefix.length() / 1024 + " KB= " + filePrefix.length() / (1024 * 1024) + " MB"; writeLogFile(logFile, log, true); long hdtBzip2Size = new File(f.getAbsolutePath() + ".hdt.tar.bz2").length(); log = "HDT+BZIP2 size: " + hdtBzip2Size + " B= " + hdtBzip2Size / 1024 + " KB= " + hdtBzip2Size / (1024 * 1024) + " MB"; writeLogFile(logFile, log, true); double ratio = new Double(filePrefix.length()) / new Double(ntBzip2); log = "HDT / NTBZip2 ratio= " + ratio; writeLogFile(logFile, log, true); ratio = new Double(hdtBzip2Size) / new Double(ntBzip2); log = "HDT+BZIP2 / NTBZip2 ratio= " + ratio + " \n\n"; writeLogFile(logFile, log, true); log = "Time HDT: " + saveToHDT + "ms = " + saveToHDT / 1000 + " s "; writeLogFile(logFile, log, true); log += "Time HDT+BZIP: " + saveToTarBzip2 + "ms =" + saveToTarBzip2 / 1000 + "s"; writeLogFile(logFile, log, true); log += "Time overall: " + overall + "ms = " + overall / 1000 + " s "; writeLogFile(logFile, log, true); }
public long getSize() { return entry.getSize(); }