Example #1
0
  /**
   * Extracts the given archive file into the given directory
   *
   * @param sourceArchive Archive to extract
   * @param destinationDirectory Directory to extract archive to
   */
  private static void extractFiles(File sourceArchive, File destinationDirectory) {
    ArchiveInputStream archiveInputStream = null;
    try {
      archiveInputStream = createArchiveInputStream(sourceArchive);
      ArchiveEntry zipEntry;
      while ((zipEntry = archiveInputStream.getNextEntry()) != null) {
        // Validate entry name before extracting
        String validatedEntryName = validateEntryName(zipEntry.getName());

        if (StringUtils.isNotBlank(validatedEntryName)) {
          extractFile(
              sourceArchive,
              destinationDirectory,
              archiveInputStream,
              validatedEntryName,
              zipEntry.getLastModifiedDate(),
              zipEntry.isDirectory());
        }
      }

    } catch (IOException ioe) {
      throw new RuntimeException("Error while extracting " + sourceArchive.getPath(), ioe);
    } finally {
      IOUtils.closeQuietly(archiveInputStream);
    }
  }
  /** Extracts the archive resource and then runs the batch-import process on it. */
  protected void importDataArchive(
      final Resource resource,
      final ArchiveInputStream resourceStream,
      BatchImportOptions options) {

    final File tempDir = Files.createTempDir();
    try {
      ArchiveEntry archiveEntry;
      while ((archiveEntry = resourceStream.getNextEntry()) != null) {
        final File entryFile = new File(tempDir, archiveEntry.getName());
        if (archiveEntry.isDirectory()) {
          entryFile.mkdirs();
        } else {
          entryFile.getParentFile().mkdirs();

          Files.copy(
              new InputSupplier<InputStream>() {
                @Override
                public InputStream getInput() throws IOException {
                  return new CloseShieldInputStream(resourceStream);
                }
              },
              entryFile);
        }
      }

      importDataDirectory(tempDir, null, options);
    } catch (IOException e) {
      throw new RuntimeException(
          "Failed to extract data from '" + resource + "' to '" + tempDir + "' for batch import.",
          e);
    } finally {
      FileUtils.deleteQuietly(tempDir);
    }
  }
  private void readDataFromZip() {
    FileInputStream fis = null;
    ZipArchiveInputStream zis = null;

    try {
      // Create input stream
      fis = new FileInputStream(this.filePath);
      zis = new ZipArchiveInputStream(new BufferedInputStream(fis));

      ArchiveEntry entry;

      // Extract files
      while ((entry = zis.getNextEntry()) != null) {

        if (!entry.isDirectory() && this.innerFileName.equals(entry.getName())) {
          System.out.println(
              "LOCATED INNER FILE THAT SHOULD BE PROCESSED <" + entry.getName() + ">");
          extractTo(zis, entry);
        }
      }

    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    } finally {
      IOUtils.closeQuietly(zis);
      IOUtils.closeQuietly(fis);
    }
  }
Example #4
0
  private void unpackSphinx() throws MavenReportException {
    if (!sphinxSourceDirectory.exists() && !sphinxSourceDirectory.mkdirs()) {
      throw new MavenReportException(
          "Could not generate the temporary directory "
              + sphinxSourceDirectory.getAbsolutePath()
              + " for the sphinx sources");
    }

    if (verbose) {
      getLog().info("Unpacking sphinx to " + sphinxSourceDirectory.getAbsolutePath());
    }
    try {
      ArchiveInputStream input =
          new ArchiveStreamFactory()
              .createArchiveInputStream("jar", SphinxMojo.class.getResourceAsStream("/sphinx.jar"));
      ArchiveEntry entry = input.getNextEntry();

      while (entry != null) {
        File archiveEntry = new File(sphinxSourceDirectory, entry.getName());
        archiveEntry.getParentFile().mkdirs();
        if (entry.isDirectory()) {
          archiveEntry.mkdir();
          entry = input.getNextEntry();
          continue;
        }
        OutputStream out = new FileOutputStream(archiveEntry);
        IOUtils.copy(input, out);
        out.close();
        entry = input.getNextEntry();
      }
      input.close();
    } catch (Exception ex) {
      throw new MavenReportException("Could not unpack the sphinx source", ex);
    }
  }
  private void extract(final String inputFile, final String outputDir) throws IOException {
    FileInputStream fileInputStream = null;
    ZipArchiveInputStream zipArchiveInputStream = null;
    FileOutputStream fileOutputStream = null;
    try {

      Log.d(this.getClass().getName(), "Will extract " + inputFile + " to " + outputDir);

      byte[] buffer = new byte[8192];
      fileInputStream = new FileInputStream(inputFile);

      // We use null as encoding.
      zipArchiveInputStream = new ZipArchiveInputStream(fileInputStream, null, true);
      ArchiveEntry entry;
      while ((entry = zipArchiveInputStream.getNextEntry()) != null) {
        Log.d(this.getClass().getName(), "Extracting entry " + entry.getName());
        File file = new File(outputDir, entry.getName());
        if (entry.isDirectory()) {
          file.mkdirs();
        } else {
          file.getParentFile().mkdirs();
          fileOutputStream = new FileOutputStream(file);
          int bytesRead;
          while ((bytesRead = zipArchiveInputStream.read(buffer, 0, buffer.length)) != -1)
            fileOutputStream.write(buffer, 0, bytesRead);
          fileOutputStream.close();
          fileOutputStream = null;
        }
      }
      // Delete the zip file
      File zipFile = new File(inputFile);
      zipFile.delete();
    } catch (Exception e) {
      Log.e("UnzipperTask", "Error unzipping file: " + inputFile + ", " + e);
    } finally {
      try {
        zipArchiveInputStream.close();
        fileInputStream.close();
        if (fileOutputStream != null) {
          fileOutputStream.close();
        }
      } catch (NullPointerException ex) {
        Log.e(this.getClass().getName(), "Error closing the file streams.", ex);
      } catch (IOException ex) {
        Log.e(this.getClass().getName(), "Error closing the file streams.", ex);
      }
    }
  }
  /**
   * 解压文件
   *
   * @param zis 压缩文件流
   * @param archiveEntry 文件
   * @param destDir 解压目录
   */
  public static void decompress(
      ZipArchiveInputStream zis, ArchiveEntry archiveEntry, File destDir) {

    String entryName = archiveEntry.getName();
    try {
      File file = new File(destDir, entryName);
      if (archiveEntry.isDirectory()) {
        FileUtilsHelper.mkdir(file);
      } else {
        FileUtilsHelper.copyFile(zis, file);
      }
    } catch (Exception e) {
      throw new CommonsException(
          "Decompress " + entryName + " to " + destDir.getPath() + " failed!", e);
    }
  }
  /**
   * Fills the file and directory maps with resources read from the archive.
   *
   * @param src the archive to scan.
   * @param encoding encoding used to encode file names inside the archive.
   * @param fileEntries Map (name to resource) of non-directory resources found inside the archive.
   * @param matchFileEntries Map (name to resource) of non-directory resources found inside the
   *     archive that matched all include patterns and didn't match any exclude patterns.
   * @param dirEntries Map (name to resource) of directory resources found inside the archive.
   * @param matchDirEntries Map (name to resource) of directory resources found inside the archive
   *     that matched all include patterns and didn't match any exclude patterns.
   */
  protected void fillMapsFromArchive(
      Resource src,
      String encoding,
      Map fileEntries,
      Map matchFileEntries,
      Map dirEntries,
      Map matchDirEntries) {
    ArchiveEntry entry = null;
    ArchiveInputStream ai = null;

    try {
      try {
        ai = StreamHelper.getInputStream(factory, src, encoding);
        if (ai == null) {
          ai = factory.getArchiveStream(new BufferedInputStream(src.getInputStream()), encoding);
        }
      } catch (IOException ex) {
        throw new BuildException("problem opening " + src, ex);
      }
      while ((entry = ai.getNextEntry()) != null) {
        if (skipUnreadable && !ai.canReadEntryData(entry)) {
          log(Messages.skippedIsUnreadable(entry));
          continue;
        }
        Resource r = builder.buildResource(src, encoding, entry);
        String name = entry.getName();
        if (entry.isDirectory()) {
          name = trimSeparator(name);
          dirEntries.put(name, r);
          if (match(name)) {
            matchDirEntries.put(name, r);
          }
        } else {
          fileEntries.put(name, r);
          if (match(name)) {
            matchFileEntries.put(name, r);
          }
        }
      }
    } catch (IOException ex) {
      throw new BuildException("problem reading " + src, ex);
    } finally {
      FileUtils.close(ai);
    }
  }
  public void extractTo(ZipArchiveInputStream zis, ArchiveEntry entry) throws IOException {
    int count = 0;
    int curCount = 0;
    byte[] data = new byte[BUFFER];

    while ((count = zis.read(data, 0, BUFFER)) != -1) {
      curCount += count;
      processInputData(entry.getName(), curCount);
    }
  }
Example #9
0
 /**
  * Searches for an entry inside the zip stream by entry path. If there are alternative extensions,
  * will also look for entry with alternative extension. The search stops reading the stream when
  * the entry is found, so calling read on the stream will read the returned entry.
  *
  * <p>The zip input stream doesn't support mark/reset so once this method is used you cannot go
  * back - either the stream was fully read (when entry is not found) or the stream was read until
  * the current entry.
  *
  * @param zis The ar input stream
  * @param entryPath The entry path to search for
  * @param alternativeExtensions List of alternative file extensions to try if the main entry path
  *     is not found.
  * @return The entry if found, null otherwise
  * @throws IOException On failure to read the stream
  */
 public static ArchiveEntry locateArchiveEntry(
     ArchiveInputStream zis, String entryPath, List<String> alternativeExtensions)
     throws IOException {
   ArchiveEntry archiveEntry;
   while ((archiveEntry = zis.getNextEntry()) != null) {
     String zipEntryName = archiveEntry.getName();
     if (zipEntryName.equals(entryPath)) {
       return archiveEntry;
     } else if (alternativeExtensions != null) {
       String basePath = PathUtils.stripExtension(entryPath);
       for (String alternativeExtension : alternativeExtensions) {
         String alternativeSourcePath = basePath + "." + alternativeExtension;
         if (zipEntryName.equals(alternativeSourcePath)) {
           return archiveEntry;
         }
       }
     }
   }
   return null;
 }
  private void handleTARArchive(
      ArchiveStoreContext ctx,
      FreenetURI key,
      InputStream data,
      String element,
      ArchiveExtractCallback callback,
      MutableBoolean gotElement,
      boolean throwAtExit,
      ClientContext context)
      throws ArchiveFailureException, ArchiveRestartException {
    if (logMINOR) Logger.minor(this, "Handling a TAR Archive");
    TarArchiveInputStream tarIS = null;
    try {
      tarIS = new TarArchiveInputStream(data);

      // MINOR: Assumes the first entry in the tarball is a directory.
      ArchiveEntry entry;

      byte[] buf = new byte[32768];
      HashSet<String> names = new HashSet<String>();
      boolean gotMetadata = false;

      outerTAR:
      while (true) {
        try {
          entry = tarIS.getNextEntry();
        } catch (IllegalArgumentException e) {
          // Annoyingly, it can throw this on some corruptions...
          throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e);
        }
        if (entry == null) break;
        if (entry.isDirectory()) continue;
        String name = stripLeadingSlashes(entry.getName());
        if (names.contains(name)) {
          Logger.error(this, "Duplicate key " + name + " in archive " + key);
          continue;
        }
        long size = entry.getSize();
        if (name.equals(".metadata")) gotMetadata = true;
        if (size > maxArchivedFileSize && !name.equals(element)) {
          addErrorElement(
              ctx,
              key,
              name,
              "File too big: "
                  + size
                  + " greater than current archived file size limit "
                  + maxArchivedFileSize,
              true);
        } else {
          // Read the element
          long realLen = 0;
          Bucket output = tempBucketFactory.makeBucket(size);
          OutputStream out = output.getOutputStream();

          try {
            int readBytes;
            while ((readBytes = tarIS.read(buf)) > 0) {
              out.write(buf, 0, readBytes);
              readBytes += realLen;
              if (readBytes > maxArchivedFileSize) {
                addErrorElement(
                    ctx,
                    key,
                    name,
                    "File too big: "
                        + maxArchivedFileSize
                        + " greater than current archived file size limit "
                        + maxArchivedFileSize,
                    true);
                out.close();
                out = null;
                output.free();
                continue outerTAR;
              }
            }

          } finally {
            if (out != null) out.close();
          }
          if (size <= maxArchivedFileSize) {
            addStoreElement(ctx, key, name, output, gotElement, element, callback, context);
            names.add(name);
            trimStoredData();
          } else {
            // We are here because they asked for this file.
            callback.gotBucket(output, context);
            gotElement.value = true;
            addErrorElement(
                ctx,
                key,
                name,
                "File too big: "
                    + size
                    + " greater than current archived file size limit "
                    + maxArchivedFileSize,
                true);
          }
        }
      }

      // If no metadata, generate some
      if (!gotMetadata) {
        generateMetadata(ctx, key, names, gotElement, element, callback, context);
        trimStoredData();
      }
      if (throwAtExit) throw new ArchiveRestartException("Archive changed on re-fetch");

      if ((!gotElement.value) && element != null) callback.notInArchive(context);

    } catch (IOException e) {
      throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e);
    } finally {
      Closer.close(tarIS);
    }
  }
    public boolean accept(Path file) {
      try {
        FileSystem fs = file.getFileSystem(conf);
        boolean unpack = conf.getBoolean(unpackParamName, true);

        if (defaultIgnores.accept(file) && fs.getFileStatus(file).isDir() == false) {
          String URI = file.toUri().toString();

          // detect whether a file is likely to be an archive
          // TODO extend to other known types
          if (unpack && URI.toLowerCase().endsWith(".zip")) {
            FSDataInputStream fis = null;
            try {
              fis = fs.open(file);
              ArchiveInputStream input =
                  new ArchiveStreamFactory().createArchiveInputStream(new BufferedInputStream(fis));
              ArchiveEntry entry = null;
              while ((entry = input.getNextEntry()) != null) {
                String name = entry.getName();
                long size = entry.getSize();
                byte[] content = new byte[(int) size];
                input.read(content);
                key.set(name);
                // fill the values for the content object
                value.setUrl(name);
                value.setContent(content);

                writer.append(key, value);
                counter++;
                if (reporter != null) {
                  reporter.incrCounter(Counters.DOC_COUNT, 1);
                }
              }

            } catch (ArchiveException e) {
              // TODO Auto-generated catch block
              e.printStackTrace();
            } finally {
              fis.close();
            }

          } else {
            // Hmm, kind of dangerous to do this
            byte[] fileBArray = new byte[(int) fs.getFileStatus(file).getLen()];
            FSDataInputStream fis = null;
            try {
              fis = fs.open(file);
              fis.readFully(0, fileBArray);
              fis.close();
              key.set(URI);
              // fill the values for the content object
              value.setUrl(URI);
              value.setContent(fileBArray);

              writer.append(key, value);
              counter++;
              if (reporter != null) {
                reporter.incrCounter(Counters.DOC_COUNT, 1);
              }
            } catch (FileNotFoundException e) {
              throw new RuntimeException(e);
            } catch (IOException e) {
              throw new RuntimeException(e);
            }
          }
        }
        // if it is a directory, accept it so we can possibly recurse on
        // it,
        // otherwise we don't care about actually accepting the file,
        // since
        // all the work is done in the accept method here.
        return fs.getFileStatus(file).isDir();
      } catch (IOException e) {
        log.error("Exception", e);
      }
      return false;
    }
Example #12
0
  public static int unpackFileToFolder(
      @Nonnull final Log logger,
      @Nullable final String folder,
      @Nonnull final File archiveFile,
      @Nonnull final File destinationFolder,
      final boolean makeAllExecutable)
      throws IOException {
    final String normalizedName = archiveFile.getName().toLowerCase(Locale.ENGLISH);

    final ArchEntryGetter entryGetter;

    boolean modeZipFile = false;

    final ZipFile theZipFile;
    final ArchiveInputStream archInputStream;
    if (normalizedName.endsWith(".zip")) {
      logger.debug("Detected ZIP archive");

      modeZipFile = true;

      theZipFile = new ZipFile(archiveFile);
      archInputStream = null;
      entryGetter =
          new ArchEntryGetter() {
            private final Enumeration<ZipArchiveEntry> iterator = theZipFile.getEntries();

            @Override
            @Nullable
            public ArchiveEntry getNextEntry() throws IOException {
              ArchiveEntry result = null;
              if (this.iterator.hasMoreElements()) {
                result = this.iterator.nextElement();
              }
              return result;
            }
          };
    } else {
      theZipFile = null;
      final InputStream in = new BufferedInputStream(new FileInputStream(archiveFile));
      try {
        if (normalizedName.endsWith(".tar.gz")) {
          logger.debug("Detected TAR.GZ archive");
          archInputStream = new TarArchiveInputStream(new GZIPInputStream(in));

          entryGetter =
              new ArchEntryGetter() {
                @Override
                @Nullable
                public ArchiveEntry getNextEntry() throws IOException {
                  return ((TarArchiveInputStream) archInputStream).getNextTarEntry();
                }
              };

        } else {
          logger.debug("Detected OTHER archive");
          archInputStream = ARCHIVE_STREAM_FACTORY.createArchiveInputStream(in);
          logger.debug("Created archive stream : " + archInputStream.getClass().getName());

          entryGetter =
              new ArchEntryGetter() {
                @Override
                @Nullable
                public ArchiveEntry getNextEntry() throws IOException {
                  return archInputStream.getNextEntry();
                }
              };
        }

      } catch (ArchiveException ex) {
        IOUtils.closeQuietly(in);
        throw new IOException("Can't recognize or read archive file : " + archiveFile, ex);
      } catch (CantReadArchiveEntryException ex) {
        IOUtils.closeQuietly(in);
        throw new IOException("Can't read entry from archive file : " + archiveFile, ex);
      }
    }

    try {

      final String normalizedFolder =
          folder == null ? null : FilenameUtils.normalize(folder, true) + '/';

      int unpackedFilesCounter = 0;
      while (true) {
        final ArchiveEntry entry = entryGetter.getNextEntry();
        if (entry == null) {
          break;
        }
        final String normalizedPath = FilenameUtils.normalize(entry.getName(), true);

        logger.debug("Detected archive entry : " + normalizedPath);

        if (normalizedFolder == null || normalizedPath.startsWith(normalizedFolder)) {
          final File targetFile =
              new File(
                  destinationFolder,
                  normalizedFolder == null
                      ? normalizedPath
                      : normalizedPath.substring(normalizedFolder.length()));
          if (entry.isDirectory()) {
            logger.debug("Folder : " + normalizedPath);
            if (!targetFile.exists() && !targetFile.mkdirs()) {
              throw new IOException("Can't create folder " + targetFile);
            }
          } else {
            final File parent = targetFile.getParentFile();

            if (parent != null && !parent.isDirectory() && !parent.mkdirs()) {
              throw new IOException("Can't create folder : " + parent);
            }

            final FileOutputStream fos = new FileOutputStream(targetFile);

            try {
              if (modeZipFile) {
                logger.debug("Unpacking ZIP entry : " + normalizedPath);

                final InputStream zipEntryInStream =
                    theZipFile.getInputStream((ZipArchiveEntry) entry);
                try {
                  if (IOUtils.copy(zipEntryInStream, fos) != entry.getSize()) {
                    throw new IOException(
                        "Can't unpack file, illegal unpacked length : " + entry.getName());
                  }
                } finally {
                  IOUtils.closeQuietly(zipEntryInStream);
                }
              } else {
                logger.debug("Unpacking archive entry : " + normalizedPath);

                if (!archInputStream.canReadEntryData(entry)) {
                  throw new IOException("Can't read archive entry data : " + normalizedPath);
                }
                if (IOUtils.copy(archInputStream, fos) != entry.getSize()) {
                  throw new IOException(
                      "Can't unpack file, illegal unpacked length : " + entry.getName());
                }
              }
            } finally {
              fos.close();
            }

            if (makeAllExecutable) {
              try {
                targetFile.setExecutable(true, true);
              } catch (SecurityException ex) {
                throw new IOException("Can't make file executable : " + targetFile, ex);
              }
            }
            unpackedFilesCounter++;
          }
        } else {
          logger.debug("Archive entry " + normalizedPath + " ignored");
        }
      }
      return unpackedFilesCounter;
    } finally {
      IOUtils.closeQuietly(theZipFile);
      IOUtils.closeQuietly(archInputStream);
    }
  }
Example #13
0
  /**
   * Load this package in memory from an InputStream. It may be installed later using {@link
   * #install(XWikiContext)}.
   *
   * @param file an InputStream of a zipped package file
   * @param context current XWikiContext
   * @return an empty string, useless.
   * @throws IOException while reading the ZipFile
   * @throws XWikiException when package content is broken
   * @since 2.3M2
   */
  public String Import(InputStream file, XWikiContext context) throws IOException, XWikiException {
    ZipArchiveInputStream zis;
    ArchiveEntry entry;
    Document description = null;

    try {
      zis = new ZipArchiveInputStream(file, XAR_FILENAME_ENCODING, false);

      List<XWikiDocument> docsToLoad = new LinkedList<XWikiDocument>();
      /*
       * Loop 1: Cycle through the zip input stream and load out all of the documents, when we find the
       * package.xml file we put it aside to so that we only include documents which are in the file.
       */
      while ((entry = zis.getNextEntry()) != null) {
        if (entry.isDirectory() || (entry.getName().indexOf("META-INF") != -1)) {
          // The entry is either a directory or is something inside of the META-INF dir.
          // (we use that directory to put meta data such as LICENSE/NOTICE files.)
          continue;
        } else if (entry.getName().compareTo(DefaultPackageFileName) == 0) {
          // The entry is the manifest (package.xml). Read this differently.
          description = fromXml(new CloseShieldInputStream(zis));
        } else {
          XWikiDocument doc = null;
          try {
            doc = readFromXML(new CloseShieldInputStream(zis));
          } catch (Throwable ex) {
            LOGGER.warn(
                "Failed to parse document ["
                    + entry.getName()
                    + "] from XML during import, thus it will not be installed. "
                    + "The error was: "
                    + ex.getMessage());
            // It will be listed in the "failed documents" section after the import.
            addToErrors(entry.getName().replaceAll("/", "."), context);

            continue;
          }

          // Run all of the registered DocumentFilters on this document and
          // if no filters throw exceptions, add it to the list to import.
          try {
            this.filter(doc, context);
            docsToLoad.add(doc);
          } catch (ExcludeDocumentException e) {
            LOGGER.info("Skip the document '" + doc.getDocumentReference() + "'");
          }
        }
      }
      // Make sure a manifest was included in the package...
      if (description == null) {
        throw new PackageException(
            XWikiException.ERROR_XWIKI_UNKNOWN, "Could not find the package definition");
      }
      /*
       * Loop 2: Cycle through the list of documents and if they are in the manifest then add them, otherwise log
       * a warning and add them to the skipped list.
       */
      for (XWikiDocument doc : docsToLoad) {
        if (documentExistInPackageFile(doc.getFullName(), doc.getLanguage(), description)) {
          this.add(doc, context);
        } else {
          LOGGER.warn(
              "document "
                  + doc.getDocumentReference()
                  + " does not exist in package definition."
                  + " It will not be installed.");
          // It will be listed in the "skipped documents" section after the
          // import.
          addToSkipped(doc.getFullName(), context);
        }
      }

      updateFileInfos(description);
    } catch (DocumentException e) {
      throw new PackageException(XWikiException.ERROR_XWIKI_UNKNOWN, "Error when reading the XML");
    }

    return "";
  }
  private XarMergeResult importXARToWiki(
      XarFile previousXarFile,
      InputStream xarInputStream,
      String wiki,
      PackageConfiguration configuration)
      throws IOException {
    XarMergeResult mergeResult = new XarMergeResult();

    ZipArchiveInputStream zis = new ZipArchiveInputStream(xarInputStream);

    XWikiContext xcontext = getXWikiContext();

    String currentWiki = xcontext.getDatabase();
    try {
      xcontext.setDatabase(wiki);

      this.observation.notify(new XARImportingEvent(), null, xcontext);

      for (ArchiveEntry entry = zis.getNextEntry(); entry != null; entry = zis.getNextEntry()) {
        if (!entry.isDirectory()) {
          DocumentImporterHandler documentHandler =
              new DocumentImporterHandler(this, this.componentManager, wiki);

          try {
            documentHandler.setPreviousXarFile(previousXarFile);
            documentHandler.setConfiguration(configuration);

            parseDocument(zis, documentHandler);

            if (documentHandler.getMergeResult() != null) {
              mergeResult.addMergeResult(documentHandler.getMergeResult());
            }

            if (configuration.isLogEnabled()) {
              this.logger.info(
                  "Successfully imported document [{}] in language [{}]",
                  documentHandler.getDocument().getDocumentReference(),
                  documentHandler.getDocument().getRealLanguage());
            }
          } catch (NotADocumentException e) {
            // Impossible to know that before parsing
            this.logger.debug("Entry [" + entry + "] is not a document", e);
          } catch (Exception e) {
            this.logger.error("Failed to parse document [" + entry.getName() + "]", e);

            if (configuration.isLogEnabled()) {
              this.logger.info(
                  "Failed to import document [{}] in language [{}]",
                  documentHandler.getDocument().getDocumentReference(),
                  documentHandler.getDocument().getRealLanguage());
            }
          }
        }
      }
    } finally {
      this.observation.notify(new XARImportedEvent(), null, xcontext);

      xcontext.setDatabase(currentWiki);
    }

    return mergeResult;
  }