public void runSQLTemplate(String path, boolean failOnError)
      throws IOException, NamingException, SQLException {

    DB db = DBFactoryUtil.getDB();

    db.runSQLTemplate(path, failOnError);
  }
示例#2
0
  protected void verifyOracleNewLine() throws Exception {
    DB db = DBFactoryUtil.getDB();

    String dbType = db.getType();

    if (!dbType.equals(DB.TYPE_ORACLE)) {
      return;
    }

    // This is a workaround for a limitation in Oracle sqlldr's inability
    // insert new line characters for long varchar columns. See
    // http://forums.liferay.com/index.php?showtopic=2761&hl=oracle for more
    // information. Check several articles because some articles may not
    // have new lines.

    boolean checkNewLine = false;

    List<JournalArticle> articles =
        JournalArticleLocalServiceUtil.getArticles(DEFAULT_GROUP_ID, 0, NUM_OF_ARTICLES);

    for (JournalArticle article : articles) {
      String content = article.getContent();

      if ((content != null) && content.contains("\\n")) {
        articles = JournalArticleLocalServiceUtil.getArticles(DEFAULT_GROUP_ID);

        for (int j = 0; j < articles.size(); j++) {
          article = articles.get(j);

          JournalArticleLocalServiceUtil.checkNewLine(
              article.getGroupId(), article.getArticleId(), article.getVersion());
        }

        checkNewLine = true;

        break;
      }
    }

    // Only process this once

    if (!checkNewLine) {
      if (_log.isInfoEnabled()) {
        _log.info("Do not fix oracle new line");
      }

      return;
    } else {
      if (_log.isInfoEnabled()) {
        _log.info("Fix oracle new line");
      }
    }
  }
示例#3
0
  private void _reloadSQLTransformer() {
    if (_transformedSqls == null) {
      _transformedSqls = new ConcurrentHashMap<String, String>();
    } else {
      _transformedSqls.clear();
    }

    _vendorDB2 = false;
    _vendorDerby = false;
    _vendorFirebird = false;
    // _vendorHypersonic = false;
    _vendorInformix = false;
    _vendorIngres = false;
    _vendorInterbase = false;
    _vendorMySQL = false;
    _vendorOracle = false;
    _vendorPostgreSQL = false;
    _vendorSQLServer = false;
    _vendorSybase = false;

    DB db = DBFactoryUtil.getDB();

    String dbType = db.getType();

    _db = db;

    if (dbType.equals(DB.TYPE_DB2)) {
      _vendorDB2 = true;
    } else if (dbType.equals(DB.TYPE_DERBY)) {
      _vendorDerby = true;
    } else if (dbType.equals(DB.TYPE_FIREBIRD)) {
      _vendorFirebird = true;
    } else if (dbType.equals(DB.TYPE_HYPERSONIC)) {
      // _vendorHypersonic = true;
    } else if (dbType.equals(DB.TYPE_INFORMIX)) {
      _vendorInformix = true;
    } else if (dbType.equals(DB.TYPE_INGRES)) {
      _vendorIngres = true;
    } else if (dbType.equals(DB.TYPE_INTERBASE)) {
      _vendorInterbase = true;
    } else if (dbType.equals(DB.TYPE_MYSQL)) {
      _vendorMySQL = true;
    } else if (db.getType().equals(DB.TYPE_ORACLE)) {
      _vendorOracle = true;
    } else if (dbType.equals(DB.TYPE_POSTGRESQL)) {
      _vendorPostgreSQL = true;
    } else if (dbType.equals(DB.TYPE_SQLSERVER)) {
      _vendorSQLServer = true;
    } else if (dbType.equals(DB.TYPE_SYBASE)) {
      _vendorSybase = true;
    }
  }
  /**
   * Performs a SQL query.
   *
   * @param sql the sql query
   */
  protected void runSQL(String sql) {
    try {
      DataSource dataSource = ratingsStatsPersistence.getDataSource();

      DB db = DBFactoryUtil.getDB();

      sql = db.buildSQL(sql);
      sql = PortalUtil.transformSQL(sql);

      SqlUpdate sqlUpdate = SqlUpdateFactoryUtil.getSqlUpdate(dataSource, sql, new int[0]);

      sqlUpdate.update();
    } catch (Exception e) {
      throw new SystemException(e);
    }
  }
示例#5
0
  private String _transform(String sql) {
    if (sql == null) {
      return sql;
    }

    String newSQL = sql;

    newSQL = _replaceBitwiseCheck(newSQL);
    newSQL = _replaceBoolean(newSQL);
    newSQL = _replaceCastLong(newSQL);
    newSQL = _replaceCastText(newSQL);
    newSQL = _replaceIntegerDivision(newSQL);

    if (_vendorDB2) {
      newSQL = _replaceLike(newSQL);
    } else if (_vendorDerby) {
      newSQL = _replaceUnion(newSQL);
    } else if (_vendorMySQL) {
      DB db = DBFactoryUtil.getDB();

      if (!db.isSupportsStringCaseSensitiveQuery()) {
        newSQL = _removeLower(newSQL);
      }
    } else if (_vendorPostgreSQL) {
      newSQL = _replaceNegativeComparison(newSQL);
    } else if (_vendorSQLServer) {
      newSQL = _replaceMod(newSQL);
    } else if (_vendorSybase) {
      newSQL = _replaceMod(newSQL);
      newSQL = _replaceReplace(newSQL);
    }

    if (_log.isDebugEnabled()) {
      _log.debug("Original SQL " + sql);
      _log.debug("Modified SQL " + newSQL);
    }

    return newSQL;
  }
  protected Scheduler initializeScheduler(String propertiesPrefix, boolean useQuartzCluster)
      throws Exception {

    StdSchedulerFactory schedulerFactory = new StdSchedulerFactory();

    Properties properties = _props.getProperties(propertiesPrefix, true);

    if (useQuartzCluster) {
      DB db = DBFactoryUtil.getDB();

      String dbType = db.getType();

      if (dbType.equals(DB.TYPE_SQLSERVER)) {
        String lockHandlerClassName =
            properties.getProperty("org.quartz.jobStore.lockHandler.class");

        if (Validator.isNull(lockHandlerClassName)) {
          properties.setProperty(
              "org.quartz.jobStore.lockHandler.class", UpdateLockRowSemaphore.class.getName());
        }
      }

      if (GetterUtil.getBoolean(_props.get(PropsKeys.CLUSTER_LINK_ENABLED))) {

        if (dbType.equals(DB.TYPE_HYPERSONIC)) {
          _log.error("Unable to cluster scheduler on Hypersonic");
        } else {
          properties.put("org.quartz.jobStore.isClustered", Boolean.TRUE.toString());
        }
      }
    }

    schedulerFactory.initialize(properties);

    return schedulerFactory.getScheduler();
  }
示例#7
0
  protected void doConvert() throws Exception {
    DataSource dataSource = getDataSource();

    Dialect dialect = DialectDetector.getDialect(dataSource);

    DB db = DBFactoryUtil.getDB(dialect);

    List<String> modelNames = ModelHintsUtil.getModels();

    List<Tuple> tableDetails = new ArrayList<Tuple>();

    Connection connection = dataSource.getConnection();

    try {
      MaintenanceUtil.appendStatus("Collecting information for database tables to migration");

      for (String modelName : modelNames) {
        if (!modelName.contains(".model.")) {
          continue;
        }

        String implClassName =
            modelName.replaceFirst("(\\.model\\.)(\\p{Upper}.*)", "$1impl.$2Impl");

        if (_log.isDebugEnabled()) {
          _log.debug("Loading class " + implClassName);
        }

        Class<?> implClass = getImplClass(implClassName);

        if (implClass == null) {
          _log.error("Unable to load class " + implClassName);

          continue;
        }

        Field[] fields = implClass.getFields();

        for (Field field : fields) {
          Tuple tuple = null;

          String fieldName = field.getName();

          if (fieldName.equals("TABLE_NAME")) {
            tuple = getTableDetails(implClass, field, fieldName);
          } else if (fieldName.startsWith("MAPPING_TABLE_") && fieldName.endsWith("_NAME")) {

            tuple = getTableDetails(implClass, field, fieldName);
          }

          if (tuple != null) {
            tableDetails.add(tuple);
          }
        }
      }

      for (Tuple tuple : _UNMAPPED_TABLES) {
        tableDetails.add(tuple);
      }

      if (_log.isDebugEnabled()) {
        _log.debug("Migrating database tables");
      }

      for (int i = 0; i < tableDetails.size(); i++) {
        if ((i > 0) && (i % (tableDetails.size() / 4) == 0)) {
          MaintenanceUtil.appendStatus((i * 100 / tableDetails.size()) + "%");
        }

        Tuple tuple = tableDetails.get(i);

        String table = (String) tuple.getObject(0);
        Object[][] columns = (Object[][]) tuple.getObject(1);
        String sqlCreate = (String) tuple.getObject(2);

        migrateTable(db, connection, table, columns, sqlCreate);
      }
    } finally {
      DataAccess.cleanUp(connection);
    }

    MaintenanceUtil.appendStatus("Please change your JDBC settings before restarting server");

    ShutdownUtil.shutdown(0);
  }
  public SampleSQLBuilder(Map<String, String> arguments) throws Exception {
    String baseDir = arguments.get("sample.sql.base.dir");

    _dbType = arguments.get("sample.sql.db.type");
    _maxBlogsEntryCommentCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.blogs.entry.comment.count"));
    _maxBlogsEntryCount = GetterUtil.getInteger(arguments.get("sample.sql.max.blogs.entry.count"));
    _maxDDLRecordCount = GetterUtil.getInteger(arguments.get("sample.sql.max.ddl.record.count"));
    _maxDDLRecordSetCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.ddl.record.set.count"));
    _maxDLFileEntryCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.dl.file.entry.count"));
    _maxDLFileEntrySize = GetterUtil.getInteger(arguments.get("sample.sql.max.dl.file.entry.size"));
    _maxDLFolderCount = GetterUtil.getInteger(arguments.get("sample.sql.max.dl.folder.count"));
    _maxDLFolderDepth = GetterUtil.getInteger(arguments.get("sample.sql.max.dl.folder.depth"));
    _maxGroupCount = GetterUtil.getInteger(arguments.get("sample.sql.max.group.count"));
    _maxJournalArticleCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.journal.article.count"));
    _maxJournalArticleSize =
        GetterUtil.getInteger(arguments.get("sample.sql.max.journal.article.size"));
    _maxMBCategoryCount = GetterUtil.getInteger(arguments.get("sample.sql.max.mb.category.count"));
    _maxMBMessageCount = GetterUtil.getInteger(arguments.get("sample.sql.max.mb.message.count"));
    _maxMBThreadCount = GetterUtil.getInteger(arguments.get("sample.sql.max.mb.thread.count"));
    _maxUserCount = GetterUtil.getInteger(arguments.get("sample.sql.max.user.count"));
    _maxUserToGroupCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.user.to.group.count"));
    _maxWikiNodeCount = GetterUtil.getInteger(arguments.get("sample.sql.max.wiki.node.count"));
    _maxWikiPageCommentCount =
        GetterUtil.getInteger(arguments.get("sample.sql.max.wiki.page.comment.count"));
    _maxWikiPageCount = GetterUtil.getInteger(arguments.get("sample.sql.max.wiki.page.count"));
    _optimizeBufferSize = GetterUtil.getInteger(arguments.get("sample.sql.optimize.buffer.size"));
    _outputDir = arguments.get("sample.sql.output.dir");
    _outputMerge = GetterUtil.getBoolean(arguments.get("sample.sql.output.merge"));

    _dataFactory =
        new DataFactory(
            baseDir,
            _maxBlogsEntryCount,
            _maxGroupCount,
            _maxJournalArticleSize,
            _maxMBCategoryCount,
            _maxMBThreadCount,
            _maxMBMessageCount,
            _maxUserToGroupCount);

    _db = DBFactoryUtil.getDB(_dbType);

    if (_db instanceof MySQLDB) {
      _db = new SampleMySQLDB();
    }

    // Clean up previous output

    FileUtil.delete(_outputDir + "/sample-" + _dbType + ".sql");
    FileUtil.deltree(_outputDir + "/output");

    // Generic

    _tempDir = new File(_outputDir, "temp");

    _tempDir.mkdirs();

    final CharPipe charPipe = new CharPipe(_PIPE_BUFFER_SIZE);

    generateSQL(charPipe);

    try {

      // Specific

      compressSQL(charPipe.getReader());

      // Merge

      mergeSQL();
    } finally {
      FileUtil.deltree(_tempDir);
    }

    StringBundler sb = new StringBundler();

    List<String> keys = ListUtil.fromMapKeys(arguments);

    Collections.sort(keys);

    for (String key : keys) {
      if (!key.startsWith("sample.sql")) {
        continue;
      }

      String value = arguments.get(key);

      sb.append(key);
      sb.append(StringPool.EQUAL);
      sb.append(value);
      sb.append(StringPool.NEW_LINE);
    }

    FileUtil.write(new File(_outputDir, "benchmarks-actual.properties"), sb.toString());
  }
  protected boolean isSupportsStringCaseSensitiveQuery() {
    DB db = DBFactoryUtil.getDB();

    return db.isSupportsStringCaseSensitiveQuery();
  }
  protected boolean isSupportsUpdateWithInnerJoin() {
    DB db = DBFactoryUtil.getDB();

    return db.isSupportsUpdateWithInnerJoin();
  }
  protected long increment(String name) {
    DB db = DBFactoryUtil.getDB();

    return db.increment(name);
  }
  protected boolean isSupportsAlterColumnType() {
    DB db = DBFactoryUtil.getDB();

    return db.isSupportsAlterColumnType();
  }
  protected void runSQL(String template) throws IOException, SQLException {
    DB db = DBFactoryUtil.getDB();

    db.runSQL(template);
  }
  protected long increment() {
    DB db = DBFactoryUtil.getDB();

    return db.increment();
  }
  protected void runSQL(String[] sqls) throws Exception {
    DB db = DBFactoryUtil.getDB();

    db.runSQL(sqls);
  }
示例#16
0
  private static void _deleteTempImages() throws Exception {
    DB db = DBFactoryUtil.getDB();

    db.runSQL(_DELETE_TEMP_IMAGES_1);
    db.runSQL(_DELETE_TEMP_IMAGES_2);
  }
示例#17
0
  private static void _updateCompanyKey() throws Exception {
    DB db = DBFactoryUtil.getDB();

    db.runSQL("update Company set key_ = null");
  }
示例#18
0
  public void runSQLTemplate(String path) throws IOException, NamingException, SQLException {

    DB db = DBFactoryUtil.getDB();

    db.runSQLTemplate(path);
  }
示例#19
0
  public void runSQL(String[] templates) throws IOException, SQLException {
    DB db = DBFactoryUtil.getDB();

    db.runSQL(templates);
  }