/**
   * Import zipped documents
   *
   * @param path Where import into the repository.
   * @param is The zip file to import.
   */
  private synchronized String importZip(String path, InputStream is)
      throws PathNotFoundException, ItemExistsException, AccessDeniedException, RepositoryException,
          IOException, DatabaseException, ExtensionException, AutomationException {
    log.debug("importZip({}, {})", path, is);
    java.io.File tmpIn = null;
    java.io.File tmpOut = null;
    String errorMsg = null;

    try {
      // Create temporal
      tmpIn = File.createTempFile("okm", ".zip");
      tmpOut = FileUtils.createTempDir();
      FileOutputStream fos = new FileOutputStream(tmpIn);
      IOUtils.copy(is, fos);
      fos.close();

      // Unzip files
      File fileTmpIn = new File(tmpIn);
      fileTmpIn.archiveCopyAllTo(tmpOut);
      File.umount();

      // Import files
      StringWriter out = new StringWriter();
      ImpExpStats stats =
          RepositoryImporter.importDocuments(
              null, tmpOut, path, false, false, false, out, new TextInfoDecorator(tmpOut));

      if (!stats.isOk()) {
        errorMsg = out.toString();
      }

      out.close();
    } catch (IOException e) {
      log.error("Error importing zip", e);
      throw e;
    } finally {
      IOUtils.closeQuietly(is);

      if (tmpIn != null) {
        org.apache.commons.io.FileUtils.deleteQuietly(tmpIn);
      }

      if (tmpOut != null) {
        org.apache.commons.io.FileUtils.deleteQuietly(tmpOut);
      }
    }

    log.debug("importZip: {}", errorMsg);
    return errorMsg;
  }
Example #2
0
  /** Get instance */
  public static SessionFactory getSessionFactory(String hbm2ddl) {
    if (sessionFactory == null) {
      try {
        // Configure Hibernate
        Configuration cfg = getConfiguration().configure();
        cfg.setProperty("hibernate.dialect", Config.HIBERNATE_DIALECT);
        cfg.setProperty("hibernate.connection.datasource", Config.HIBERNATE_DATASOURCE);
        cfg.setProperty("hibernate.hbm2ddl.auto", hbm2ddl);
        cfg.setProperty("hibernate.show_sql", Config.HIBERNATE_SHOW_SQL);
        cfg.setProperty("hibernate.generate_statistics", Config.HIBERNATE_STATISTICS);
        cfg.setProperty("hibernate.search.analyzer", Config.HIBERNATE_SEARCH_ANALYZER);
        cfg.setProperty(
            "hibernate.search.default.directory_provider",
            "org.hibernate.search.store.FSDirectoryProvider");
        cfg.setProperty("hibernate.search.default.indexBase", Config.HIBERNATE_SEARCH_INDEX_HOME);
        cfg.setProperty("hibernate.search.default.optimizer.operation_limit.max", "500");
        cfg.setProperty("hibernate.search.default.optimizer.transaction_limit.max", "75");
        cfg.setProperty("hibernate.worker.execution", "async");

        // http://relation.to/Bloggers/PostgreSQLAndBLOBs
        // cfg.setProperty("hibernate.jdbc.use_streams_for_binary", "false");

        // Show configuration
        log.info("Hibernate 'hibernate.dialect' = {}", cfg.getProperty("hibernate.dialect"));
        log.info(
            "Hibernate 'hibernate.connection.datasource' = {}",
            cfg.getProperty("hibernate.connection.datasource"));
        log.info(
            "Hibernate 'hibernate.hbm2ddl.auto' = {}", cfg.getProperty("hibernate.hbm2ddl.auto"));
        log.info("Hibernate 'hibernate.show_sql' = {}", cfg.getProperty("hibernate.show_sql"));
        log.info(
            "Hibernate 'hibernate.generate_statistics' = {}",
            cfg.getProperty("hibernate.generate_statistics"));
        log.info(
            "Hibernate 'hibernate.search.default.directory_provider' = {}",
            cfg.getProperty("hibernate.search.default.directory_provider"));
        log.info(
            "Hibernate 'hibernate.search.default.indexBase' = {}",
            cfg.getProperty("hibernate.search.default.indexBase"));

        if (HBM2DDL_CREATE.equals(hbm2ddl)) {
          // In case of database schema creation, also clean filesystem data.
          // This means, conversion cache, file datastore and Lucene indexes.
          log.info("Cleaning filesystem data from: {}", Config.REPOSITORY_HOME);
          FileUtils.deleteQuietly(new File(Config.REPOSITORY_HOME));
        }

        // Create database schema, if needed
        sessionFactory = cfg.buildSessionFactory();

        if (HBM2DDL_CREATE.equals(hbm2ddl)) {
          log.info("Executing specific import for: {}", Config.HIBERNATE_DIALECT);
          InputStream is = ConfigUtils.getResourceAsStream("default.sql");
          String adapted = DatabaseDialectAdapter.dialectAdapter(is, Config.HIBERNATE_DIALECT);
          executeImport(new StringReader(adapted));
          IOUtils.closeQuietly(is);
        }

        if (HBM2DDL_CREATE.equals(hbm2ddl) || HBM2DDL_UPDATE.equals(hbm2ddl)) {
          // Create or update translations
          for (String res : ConfigUtils.getResources("i18n")) {
            String oldTrans = null;
            String langId = null;

            // Preserve translation changes
            if (HBM2DDL_UPDATE.equals(hbm2ddl)) {
              langId = FileUtils.getFileName(res);
              log.info("Preserving translations for: {}", langId);
              oldTrans = preserveTranslations(langId);
            }

            InputStream isLang = ConfigUtils.getResourceAsStream("i18n/" + res);
            log.info("Importing translation: {}", res);
            executeImport(new InputStreamReader(isLang));
            IOUtils.closeQuietly(isLang);

            // Apply previous translation changes
            if (HBM2DDL_UPDATE.equals(hbm2ddl)) {
              if (oldTrans != null) {
                log.info("Restoring translations for: {}", langId);
                executeImport(new StringReader(oldTrans));
              }
            }
          }

          // Replace "create" or "update" by "none" to prevent repository reset on restart
          if (Boolean.parseBoolean(Config.HIBERNATE_CREATE_AUTOFIX)) {
            log.info("Executing Hibernate create autofix");
            hibernateCreateAutofix(Config.HOME_DIR + "/" + Config.OPENKM_CONFIG);
          } else {
            log.info(
                "Hibernate create autofix not executed because of {}={}",
                Config.PROPERTY_HIBERNATE_CREATE_AUTOFIX,
                Config.HIBERNATE_CREATE_AUTOFIX);
          }
        }
      } catch (HibernateException e) {
        log.error(e.getMessage(), e);
        throw new ExceptionInInitializerError(e);
      } catch (URISyntaxException e) {
        log.error(e.getMessage(), e);
        throw new ExceptionInInitializerError(e);
      } catch (IOException e) {
        log.error(e.getMessage(), e);
        throw new ExceptionInInitializerError(e);
      }
    }

    return sessionFactory;
  }