/** * 文件解压 * * @param file 压缩文件 * @param destDir 解压目录 */ public static void decompress(File file, File destDir, String encoding) { if (!file.exists()) { return; } InputStream is = null; ZipArchiveInputStream zis = null; try { is = new FileInputStream(file); zis = new ZipArchiveInputStream(new FileInputStream(file), encoding); ArchiveEntry archiveEntry = null; while ((archiveEntry = zis.getNextEntry()) != null) { CompressUtilsHelper.decompress(zis, archiveEntry, destDir); } } catch (Exception e) { throw new CommonsException( "Decompress " + file.getPath() + " to " + destDir.getPath() + " failed!", e); } finally { IOUtils.closeQuietly(zis); IOUtils.closeQuietly(is); } }
private void readDataFromZip() { FileInputStream fis = null; ZipArchiveInputStream zis = null; try { // Create input stream fis = new FileInputStream(this.filePath); zis = new ZipArchiveInputStream(new BufferedInputStream(fis)); ArchiveEntry entry; // Extract files while ((entry = zis.getNextEntry()) != null) { if (!entry.isDirectory() && this.innerFileName.equals(entry.getName())) { System.out.println( "LOCATED INNER FILE THAT SHOULD BE PROCESSED <" + entry.getName() + ">"); extractTo(zis, entry); } } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeQuietly(zis); IOUtils.closeQuietly(fis); } }
private void extract(final String inputFile, final String outputDir) throws IOException { FileInputStream fileInputStream = null; ZipArchiveInputStream zipArchiveInputStream = null; FileOutputStream fileOutputStream = null; try { Log.d(this.getClass().getName(), "Will extract " + inputFile + " to " + outputDir); byte[] buffer = new byte[8192]; fileInputStream = new FileInputStream(inputFile); // We use null as encoding. zipArchiveInputStream = new ZipArchiveInputStream(fileInputStream, null, true); ArchiveEntry entry; while ((entry = zipArchiveInputStream.getNextEntry()) != null) { Log.d(this.getClass().getName(), "Extracting entry " + entry.getName()); File file = new File(outputDir, entry.getName()); if (entry.isDirectory()) { file.mkdirs(); } else { file.getParentFile().mkdirs(); fileOutputStream = new FileOutputStream(file); int bytesRead; while ((bytesRead = zipArchiveInputStream.read(buffer, 0, buffer.length)) != -1) fileOutputStream.write(buffer, 0, bytesRead); fileOutputStream.close(); fileOutputStream = null; } } // Delete the zip file File zipFile = new File(inputFile); zipFile.delete(); } catch (Exception e) { Log.e("UnzipperTask", "Error unzipping file: " + inputFile + ", " + e); } finally { try { zipArchiveInputStream.close(); fileInputStream.close(); if (fileOutputStream != null) { fileOutputStream.close(); } } catch (NullPointerException ex) { Log.e(this.getClass().getName(), "Error closing the file streams.", ex); } catch (IOException ex) { Log.e(this.getClass().getName(), "Error closing the file streams.", ex); } } }
/** * Load this package in memory from an InputStream. It may be installed later using {@link * #install(XWikiContext)}. * * @param file an InputStream of a zipped package file * @param context current XWikiContext * @return an empty string, useless. * @throws IOException while reading the ZipFile * @throws XWikiException when package content is broken * @since 2.3M2 */ public String Import(InputStream file, XWikiContext context) throws IOException, XWikiException { ZipArchiveInputStream zis; ArchiveEntry entry; Document description = null; try { zis = new ZipArchiveInputStream(file, XAR_FILENAME_ENCODING, false); List<XWikiDocument> docsToLoad = new LinkedList<XWikiDocument>(); /* * Loop 1: Cycle through the zip input stream and load out all of the documents, when we find the * package.xml file we put it aside to so that we only include documents which are in the file. */ while ((entry = zis.getNextEntry()) != null) { if (entry.isDirectory() || (entry.getName().indexOf("META-INF") != -1)) { // The entry is either a directory or is something inside of the META-INF dir. // (we use that directory to put meta data such as LICENSE/NOTICE files.) continue; } else if (entry.getName().compareTo(DefaultPackageFileName) == 0) { // The entry is the manifest (package.xml). Read this differently. description = fromXml(new CloseShieldInputStream(zis)); } else { XWikiDocument doc = null; try { doc = readFromXML(new CloseShieldInputStream(zis)); } catch (Throwable ex) { LOGGER.warn( "Failed to parse document [" + entry.getName() + "] from XML during import, thus it will not be installed. " + "The error was: " + ex.getMessage()); // It will be listed in the "failed documents" section after the import. addToErrors(entry.getName().replaceAll("/", "."), context); continue; } // Run all of the registered DocumentFilters on this document and // if no filters throw exceptions, add it to the list to import. try { this.filter(doc, context); docsToLoad.add(doc); } catch (ExcludeDocumentException e) { LOGGER.info("Skip the document '" + doc.getDocumentReference() + "'"); } } } // Make sure a manifest was included in the package... if (description == null) { throw new PackageException( XWikiException.ERROR_XWIKI_UNKNOWN, "Could not find the package definition"); } /* * Loop 2: Cycle through the list of documents and if they are in the manifest then add them, otherwise log * a warning and add them to the skipped list. */ for (XWikiDocument doc : docsToLoad) { if (documentExistInPackageFile(doc.getFullName(), doc.getLanguage(), description)) { this.add(doc, context); } else { LOGGER.warn( "document " + doc.getDocumentReference() + " does not exist in package definition." + " It will not be installed."); // It will be listed in the "skipped documents" section after the // import. addToSkipped(doc.getFullName(), context); } } updateFileInfos(description); } catch (DocumentException e) { throw new PackageException(XWikiException.ERROR_XWIKI_UNKNOWN, "Error when reading the XML"); } return ""; }
private XarMergeResult importXARToWiki( XarFile previousXarFile, InputStream xarInputStream, String wiki, PackageConfiguration configuration) throws IOException { XarMergeResult mergeResult = new XarMergeResult(); ZipArchiveInputStream zis = new ZipArchiveInputStream(xarInputStream); XWikiContext xcontext = getXWikiContext(); String currentWiki = xcontext.getDatabase(); try { xcontext.setDatabase(wiki); this.observation.notify(new XARImportingEvent(), null, xcontext); for (ArchiveEntry entry = zis.getNextEntry(); entry != null; entry = zis.getNextEntry()) { if (!entry.isDirectory()) { DocumentImporterHandler documentHandler = new DocumentImporterHandler(this, this.componentManager, wiki); try { documentHandler.setPreviousXarFile(previousXarFile); documentHandler.setConfiguration(configuration); parseDocument(zis, documentHandler); if (documentHandler.getMergeResult() != null) { mergeResult.addMergeResult(documentHandler.getMergeResult()); } if (configuration.isLogEnabled()) { this.logger.info( "Successfully imported document [{}] in language [{}]", documentHandler.getDocument().getDocumentReference(), documentHandler.getDocument().getRealLanguage()); } } catch (NotADocumentException e) { // Impossible to know that before parsing this.logger.debug("Entry [" + entry + "] is not a document", e); } catch (Exception e) { this.logger.error("Failed to parse document [" + entry.getName() + "]", e); if (configuration.isLogEnabled()) { this.logger.info( "Failed to import document [{}] in language [{}]", documentHandler.getDocument().getDocumentReference(), documentHandler.getDocument().getRealLanguage()); } } } } } finally { this.observation.notify(new XARImportedEvent(), null, xcontext); xcontext.setDatabase(currentWiki); } return mergeResult; }