Пример #1
0
  public void runSQLTemplate(String path, boolean failOnError)
      throws IOException, NamingException, SQLException {

    DB db = DBFactoryUtil.getDB();

    db.runSQLTemplate(path, failOnError);
  }
Пример #2
0
  protected void verifyOracleNewLine() throws Exception {
    DB db = DBFactoryUtil.getDB();

    String dbType = db.getType();

    if (!dbType.equals(DB.TYPE_ORACLE)) {
      return;
    }

    // This is a workaround for a limitation in Oracle sqlldr's inability
    // insert new line characters for long varchar columns. See
    // http://forums.liferay.com/index.php?showtopic=2761&hl=oracle for more
    // information. Check several articles because some articles may not
    // have new lines.

    boolean checkNewLine = false;

    List<JournalArticle> articles =
        JournalArticleLocalServiceUtil.getArticles(DEFAULT_GROUP_ID, 0, NUM_OF_ARTICLES);

    for (JournalArticle article : articles) {
      String content = article.getContent();

      if ((content != null) && content.contains("\\n")) {
        articles = JournalArticleLocalServiceUtil.getArticles(DEFAULT_GROUP_ID);

        for (int j = 0; j < articles.size(); j++) {
          article = articles.get(j);

          JournalArticleLocalServiceUtil.checkNewLine(
              article.getGroupId(), article.getArticleId(), article.getVersion());
        }

        checkNewLine = true;

        break;
      }
    }

    // Only process this once

    if (!checkNewLine) {
      if (_log.isInfoEnabled()) {
        _log.info("Do not fix oracle new line");
      }

      return;
    } else {
      if (_log.isInfoEnabled()) {
        _log.info("Fix oracle new line");
      }
    }
  }
Пример #3
0
  private void _reloadSQLTransformer() {
    if (_transformedSqls == null) {
      _transformedSqls = new ConcurrentHashMap<String, String>();
    } else {
      _transformedSqls.clear();
    }

    _vendorDB2 = false;
    _vendorDerby = false;
    _vendorFirebird = false;
    // _vendorHypersonic = false;
    _vendorInformix = false;
    _vendorIngres = false;
    _vendorInterbase = false;
    _vendorMySQL = false;
    _vendorOracle = false;
    _vendorPostgreSQL = false;
    _vendorSQLServer = false;
    _vendorSybase = false;

    DB db = DBFactoryUtil.getDB();

    String dbType = db.getType();

    _db = db;

    if (dbType.equals(DB.TYPE_DB2)) {
      _vendorDB2 = true;
    } else if (dbType.equals(DB.TYPE_DERBY)) {
      _vendorDerby = true;
    } else if (dbType.equals(DB.TYPE_FIREBIRD)) {
      _vendorFirebird = true;
    } else if (dbType.equals(DB.TYPE_HYPERSONIC)) {
      // _vendorHypersonic = true;
    } else if (dbType.equals(DB.TYPE_INFORMIX)) {
      _vendorInformix = true;
    } else if (dbType.equals(DB.TYPE_INGRES)) {
      _vendorIngres = true;
    } else if (dbType.equals(DB.TYPE_INTERBASE)) {
      _vendorInterbase = true;
    } else if (dbType.equals(DB.TYPE_MYSQL)) {
      _vendorMySQL = true;
    } else if (db.getType().equals(DB.TYPE_ORACLE)) {
      _vendorOracle = true;
    } else if (dbType.equals(DB.TYPE_POSTGRESQL)) {
      _vendorPostgreSQL = true;
    } else if (dbType.equals(DB.TYPE_SQLSERVER)) {
      _vendorSQLServer = true;
    } else if (dbType.equals(DB.TYPE_SYBASE)) {
      _vendorSybase = true;
    }
  }
  /**
   * Performs a SQL query.
   *
   * @param sql the sql query
   */
  protected void runSQL(String sql) {
    try {
      DataSource dataSource = ratingsStatsPersistence.getDataSource();

      DB db = DBFactoryUtil.getDB();

      sql = db.buildSQL(sql);
      sql = PortalUtil.transformSQL(sql);

      SqlUpdate sqlUpdate = SqlUpdateFactoryUtil.getSqlUpdate(dataSource, sql, new int[0]);

      sqlUpdate.update();
    } catch (Exception e) {
      throw new SystemException(e);
    }
  }
Пример #5
0
  private String _transform(String sql) {
    if (sql == null) {
      return sql;
    }

    String newSQL = sql;

    newSQL = _replaceBitwiseCheck(newSQL);
    newSQL = _replaceBoolean(newSQL);
    newSQL = _replaceCastLong(newSQL);
    newSQL = _replaceCastText(newSQL);
    newSQL = _replaceIntegerDivision(newSQL);

    if (_vendorDB2) {
      newSQL = _replaceLike(newSQL);
    } else if (_vendorDerby) {
      newSQL = _replaceUnion(newSQL);
    } else if (_vendorMySQL) {
      DB db = DBFactoryUtil.getDB();

      if (!db.isSupportsStringCaseSensitiveQuery()) {
        newSQL = _removeLower(newSQL);
      }
    } else if (_vendorPostgreSQL) {
      newSQL = _replaceNegativeComparison(newSQL);
    } else if (_vendorSQLServer) {
      newSQL = _replaceMod(newSQL);
    } else if (_vendorSybase) {
      newSQL = _replaceMod(newSQL);
      newSQL = _replaceReplace(newSQL);
    }

    if (_log.isDebugEnabled()) {
      _log.debug("Original SQL " + sql);
      _log.debug("Modified SQL " + newSQL);
    }

    return newSQL;
  }
  protected Scheduler initializeScheduler(String propertiesPrefix, boolean useQuartzCluster)
      throws Exception {

    StdSchedulerFactory schedulerFactory = new StdSchedulerFactory();

    Properties properties = _props.getProperties(propertiesPrefix, true);

    if (useQuartzCluster) {
      DB db = DBFactoryUtil.getDB();

      String dbType = db.getType();

      if (dbType.equals(DB.TYPE_SQLSERVER)) {
        String lockHandlerClassName =
            properties.getProperty("org.quartz.jobStore.lockHandler.class");

        if (Validator.isNull(lockHandlerClassName)) {
          properties.setProperty(
              "org.quartz.jobStore.lockHandler.class", UpdateLockRowSemaphore.class.getName());
        }
      }

      if (GetterUtil.getBoolean(_props.get(PropsKeys.CLUSTER_LINK_ENABLED))) {

        if (dbType.equals(DB.TYPE_HYPERSONIC)) {
          _log.error("Unable to cluster scheduler on Hypersonic");
        } else {
          properties.put("org.quartz.jobStore.isClustered", Boolean.TRUE.toString());
        }
      }
    }

    schedulerFactory.initialize(properties);

    return schedulerFactory.getScheduler();
  }
Пример #7
0
  public static synchronized void init() {
    if (_initialized) {
      return;
    }

    StopWatch stopWatch = new StopWatch();

    stopWatch.start();

    // Set the default locale used by Liferay. This locale is no longer set
    // at the VM level. See LEP-2584.

    String userLanguage = SystemProperties.get("user.language");
    String userCountry = SystemProperties.get("user.country");
    String userVariant = SystemProperties.get("user.variant");

    LocaleUtil.setDefault(userLanguage, userCountry, userVariant);

    // Set the default time zone used by Liferay. This time zone is no
    // longer set at the VM level. See LEP-2584.

    String userTimeZone = SystemProperties.get("user.timezone");

    TimeZoneUtil.setDefault(userTimeZone);

    // Shared class loader

    try {
      PortalClassLoaderUtil.setClassLoader(ClassLoaderUtil.getContextClassLoader());
    } catch (Exception e) {
      e.printStackTrace();
    }

    // Properties

    com.liferay.portal.kernel.util.PropsUtil.setProps(new PropsImpl());

    // Log4J

    if (GetterUtil.getBoolean(SystemProperties.get("log4j.configure.on.startup"), true)) {

      ClassLoader classLoader = InitUtil.class.getClassLoader();

      Log4JUtil.configureLog4J(classLoader);
    }

    // Shared log

    try {
      LogFactoryUtil.setLogFactory(new Log4jLogFactoryImpl());
    } catch (Exception e) {
      e.printStackTrace();
    }

    // Log sanitizer

    SanitizerLogWrapper.init();

    // Java properties

    JavaDetector.isJDK5();

    // Security manager

    SecurityManagerUtil.init();

    if (SecurityManagerUtil.ENABLED) {
      com.liferay.portal.kernel.util.PropsUtil.setProps(
          DoPrivilegedUtil.wrap(com.liferay.portal.kernel.util.PropsUtil.getProps()));

      LogFactoryUtil.setLogFactory(DoPrivilegedUtil.wrap(LogFactoryUtil.getLogFactory()));
    }

    // Cache registry

    CacheRegistryUtil.setCacheRegistry(DoPrivilegedUtil.wrap(new CacheRegistryImpl()));

    // Configuration factory

    ConfigurationFactoryUtil.setConfigurationFactory(
        DoPrivilegedUtil.wrap(new ConfigurationFactoryImpl()));

    // Data source factory

    DataSourceFactoryUtil.setDataSourceFactory(DoPrivilegedUtil.wrap(new DataSourceFactoryImpl()));

    // DB factory

    DBFactoryUtil.setDBFactory(DoPrivilegedUtil.wrap(new DBFactoryImpl()));

    // ROME

    XmlReader.setDefaultEncoding(StringPool.UTF8);

    if (_PRINT_TIME) {
      System.out.println("InitAction takes " + stopWatch.getTime() + " ms");
    }

    _initialized = true;
  }
  protected boolean isSupportsUpdateWithInnerJoin() {
    DB db = DBFactoryUtil.getDB();

    return db.isSupportsUpdateWithInnerJoin();
  }
Пример #9
0
  protected void doConvert() throws Exception {
    DataSource dataSource = getDataSource();

    Dialect dialect = DialectDetector.getDialect(dataSource);

    DB db = DBFactoryUtil.getDB(dialect);

    List<String> modelNames = ModelHintsUtil.getModels();

    List<Tuple> tableDetails = new ArrayList<Tuple>();

    Connection connection = dataSource.getConnection();

    try {
      MaintenanceUtil.appendStatus("Collecting information for database tables to migration");

      for (String modelName : modelNames) {
        if (!modelName.contains(".model.")) {
          continue;
        }

        String implClassName =
            modelName.replaceFirst("(\\.model\\.)(\\p{Upper}.*)", "$1impl.$2Impl");

        if (_log.isDebugEnabled()) {
          _log.debug("Loading class " + implClassName);
        }

        Class<?> implClass = getImplClass(implClassName);

        if (implClass == null) {
          _log.error("Unable to load class " + implClassName);

          continue;
        }

        Field[] fields = implClass.getFields();

        for (Field field : fields) {
          Tuple tuple = null;

          String fieldName = field.getName();

          if (fieldName.equals("TABLE_NAME")) {
            tuple = getTableDetails(implClass, field, fieldName);
          } else if (fieldName.startsWith("MAPPING_TABLE_") && fieldName.endsWith("_NAME")) {

            tuple = getTableDetails(implClass, field, fieldName);
          }

          if (tuple != null) {
            tableDetails.add(tuple);
          }
        }
      }

      for (Tuple tuple : _UNMAPPED_TABLES) {
        tableDetails.add(tuple);
      }

      if (_log.isDebugEnabled()) {
        _log.debug("Migrating database tables");
      }

      for (int i = 0; i < tableDetails.size(); i++) {
        if ((i > 0) && (i % (tableDetails.size() / 4) == 0)) {
          MaintenanceUtil.appendStatus((i * 100 / tableDetails.size()) + "%");
        }

        Tuple tuple = tableDetails.get(i);

        String table = (String) tuple.getObject(0);
        Object[][] columns = (Object[][]) tuple.getObject(1);
        String sqlCreate = (String) tuple.getObject(2);

        migrateTable(db, connection, table, columns, sqlCreate);
      }
    } finally {
      DataAccess.cleanUp(connection);
    }

    MaintenanceUtil.appendStatus("Please change your JDBC settings before restarting server");

    ShutdownUtil.shutdown(0);
  }
  protected boolean isSupportsAlterColumnType() {
    DB db = DBFactoryUtil.getDB();

    return db.isSupportsAlterColumnType();
  }
  protected boolean isSupportsStringCaseSensitiveQuery() {
    DB db = DBFactoryUtil.getDB();

    return db.isSupportsStringCaseSensitiveQuery();
  }
  protected long increment() {
    DB db = DBFactoryUtil.getDB();

    return db.increment();
  }
  protected long increment(String name) {
    DB db = DBFactoryUtil.getDB();

    return db.increment(name);
  }
  protected void runSQL(String[] sqls) throws Exception {
    DB db = DBFactoryUtil.getDB();

    db.runSQL(sqls);
  }
  protected void runSQL(String template) throws IOException, SQLException {
    DB db = DBFactoryUtil.getDB();

    db.runSQL(template);
  }
Пример #16
0
  private static void _deleteTempImages() throws Exception {
    DB db = DBFactoryUtil.getDB();

    db.runSQL(_DELETE_TEMP_IMAGES_1);
    db.runSQL(_DELETE_TEMP_IMAGES_2);
  }
  public SampleSQLBuilder(Map<String, String> arguments) throws Exception {
    String baseDir = arguments.get("sample.sql.base.dir");

    _dbType = arguments.get("sample.sql.db.type");
    _maxBlogsEntryCommentCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.blogs.entry.comment.count"));
    _maxBlogsEntryCount = GetterUtil.getInteger(arguments.get("sample.sql.max.blogs.entry.count"));
    _maxDDLRecordCount = GetterUtil.getInteger(arguments.get("sample.sql.max.ddl.record.count"));
    _maxDDLRecordSetCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.ddl.record.set.count"));
    _maxDLFileEntryCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.dl.file.entry.count"));
    _maxDLFileEntrySize = GetterUtil.getInteger(arguments.get("sample.sql.max.dl.file.entry.size"));
    _maxDLFolderCount = GetterUtil.getInteger(arguments.get("sample.sql.max.dl.folder.count"));
    _maxDLFolderDepth = GetterUtil.getInteger(arguments.get("sample.sql.max.dl.folder.depth"));
    _maxGroupCount = GetterUtil.getInteger(arguments.get("sample.sql.max.group.count"));
    _maxJournalArticleCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.journal.article.count"));
    _maxJournalArticleSize =
        GetterUtil.getInteger(arguments.get("sample.sql.max.journal.article.size"));
    _maxMBCategoryCount = GetterUtil.getInteger(arguments.get("sample.sql.max.mb.category.count"));
    _maxMBMessageCount = GetterUtil.getInteger(arguments.get("sample.sql.max.mb.message.count"));
    _maxMBThreadCount = GetterUtil.getInteger(arguments.get("sample.sql.max.mb.thread.count"));
    _maxUserCount = GetterUtil.getInteger(arguments.get("sample.sql.max.user.count"));
    _maxUserToGroupCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.user.to.group.count"));
    _maxWikiNodeCount = GetterUtil.getInteger(arguments.get("sample.sql.max.wiki.node.count"));
    _maxWikiPageCommentCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.wiki.page.comment.count"));
    _maxWikiPageCount = GetterUtil.getInteger(arguments.get("sample.sql.max.wiki.page.count"));
    _optimizeBufferSize = GetterUtil.getInteger(arguments.get("sample.sql.optimize.buffer.size"));
    _outputDir = arguments.get("sample.sql.output.dir");
    _outputMerge = GetterUtil.getBoolean(arguments.get("sample.sql.output.merge"));

    _dataFactory =
        new DataFactory(
            baseDir,
            _maxBlogsEntryCount,
            _maxGroupCount,
            _maxJournalArticleSize,
            _maxMBCategoryCount,
            _maxMBThreadCount,
            _maxMBMessageCount,
            _maxUserToGroupCount);

    _db = DBFactoryUtil.getDB(_dbType);

    if (_db instanceof MySQLDB) {
      _db = new SampleMySQLDB();
    }

    // Clean up previous output

    FileUtil.delete(_outputDir + "/sample-" + _dbType + ".sql");
    FileUtil.deltree(_outputDir + "/output");

    // Generic

    _tempDir = new File(_outputDir, "temp");

    _tempDir.mkdirs();

    final CharPipe charPipe = new CharPipe(_PIPE_BUFFER_SIZE);

    generateSQL(charPipe);

    try {

      // Specific

      compressSQL(charPipe.getReader());

      // Merge

      mergeSQL();
    } finally {
      FileUtil.deltree(_tempDir);
    }

    StringBundler sb = new StringBundler();

    List<String> keys = ListUtil.fromMapKeys(arguments);

    Collections.sort(keys);

    for (String key : keys) {
      if (!key.startsWith("sample.sql")) {
        continue;
      }

      String value = arguments.get(key);

      sb.append(key);
      sb.append(StringPool.EQUAL);
      sb.append(value);
      sb.append(StringPool.NEW_LINE);
    }

    FileUtil.write(new File(_outputDir, "benchmarks-actual.properties"), sb.toString());
  }
Пример #18
0
  private static void _updateCompanyKey() throws Exception {
    DB db = DBFactoryUtil.getDB();

    db.runSQL("update Company set key_ = null");
  }
Пример #19
0
  public void runSQLTemplate(String path) throws IOException, NamingException, SQLException {

    DB db = DBFactoryUtil.getDB();

    db.runSQLTemplate(path);
  }
Пример #20
0
  public void runSQL(String[] templates) throws IOException, SQLException {
    DB db = DBFactoryUtil.getDB();

    db.runSQL(templates);
  }