@Override
  public Result execute(Result prevResult, int k) throws KettleException {

    Result result = prevResult;

    ML_Classify direct = new ML_Classify();
    direct.setName(this.getRecordsetName());

    direct.setRecordsetName(this.getRecordsetName());
    direct.setModel(model);
    direct.setIndependentVar(independentVar);
    direct.setClassifyType(classifyType);
    direct.setDataType(dataType);

    direct.setRidge(ridge);
    direct.setEpsilon(epsilon);
    direct.setMaxIter(maxIter);
    direct.setPasses(passes);
    direct.setAlpha(alpha);

    // private Text algType; //NaiveBayes, Logistic
    // private Text dependentVar; // 1
    // private Text independentVar; // 2

    // ml.setIterations(this.getIterations());
    // ml.setThreshold(this.getThreshold());
    // ml.setThreshold(this.getThreshold());

    logBasic("{Iterate Job} Execute = " + direct.ecl());

    logBasic("{Iterate Job} Previous =" + result.getLogText());

    result.setResult(true);

    RowMetaAndData data = new RowMetaAndData();
    data.addValue("ecl", Value.VALUE_TYPE_STRING, direct.ecl());

    List list = result.getRows();
    list.add(data);
    String eclCode = "";

    if (list == null) {
      list = new ArrayList();
    } else {

      for (int i = 0; i < list.size(); i++) {
        RowMetaAndData rowData = (RowMetaAndData) list.get(i);
        String code = rowData.getString("ecl", null);
        if (code != null) {
          eclCode += code;
        }
      }
      logBasic("{Iterate Job} ECL Code =" + eclCode);
    }

    result.setRows(list);

    return result;
  }
 public ResultSet getTableTypes() throws SQLException {
   List<RowMetaAndData> rowAndDatas = new ArrayList<RowMetaAndData>();
   RowMetaAndData rd = new RowMetaAndData();
   rd.addValue("TABLE_TYPE", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
   rowAndDatas.add(rd);
   KettleJDBCResultSet rs = new KettleJDBCResultSet(null, rowAndDatas, "*");
   return rs;
 }
예제 #3
0
  @Override
  public Result execute(Result prevResult, int k) throws KettleException {

    Result result = modifyResults(prevResult);
    if (result.isStopped()) {
      return result;
    }
    Rollup rollup = new Rollup();
    rollup.setName(this.getRecordsetName());
    rollup.setRecordset(this.getRecordset());
    rollup.setRecordFormat(this.getRecordset());
    rollup.setRunLocal(this.getRunLocal());

    rollup.setCondition(this.getCondition());
    rollup.setFieldlist(this.getFieldlist());

    if (this.group.equalsIgnoreCase("yes")) {
      rollup.setGroup("GROUP");
    } else {
      rollup.setGroup("");
    }

    rollup.setTransformName(this.getTransformName());

    rollup.setTransform(generateEclForMapperGrid());

    logBasic("{rollup Job} Execute = " + rollup.ecl());

    logBasic("{rollup Job} Previous =" + result.getLogText());

    result.setResult(true);

    RowMetaAndData data = new RowMetaAndData();
    data.addValue("ecl", Value.VALUE_TYPE_STRING, rollup.ecl());

    List list = result.getRows();
    list.add(data);
    String eclCode = parseEclFromRowData(list);
    /*
    String eclCode = "";
    if (list == null) {
        list = new ArrayList();
    } else {

        for (int i = 0; i < list.size(); i++) {
            RowMetaAndData rowData = (RowMetaAndData) list.get(i);
            String code = rowData.getString("ecl", null);
            if (code != null) {
                eclCode += code;
            }
        }
        logBasic("{Iterate Job} ECL Code =" + eclCode);
    }
    */
    result.setRows(list);

    return result;
  }
  private synchronized ObjectId insertDatabaseAttribute(
      ObjectId id_database, String code, String value_str) throws KettleException {
    ObjectId id = repository.connectionDelegate.getNextDatabaseAttributeID();

    RowMetaAndData table = new RowMetaAndData();

    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE,
            ValueMetaInterface.TYPE_INTEGER),
        id);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE,
            ValueMetaInterface.TYPE_INTEGER),
        id_database);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, ValueMetaInterface.TYPE_STRING),
        code);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR,
            ValueMetaInterface.TYPE_STRING),
        value_str);

    /*
     * If we have prepared the insert, we don't do it again. We assume that all the step insert statements come one
     * after the other.
     */
    repository
        .connectionDelegate
        .getDatabase()
        .prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE);
    repository.connectionDelegate.getDatabase().setValuesInsert(table);
    repository.connectionDelegate.getDatabase().insertRow();
    repository.connectionDelegate.getDatabase().closeInsert();

    if (log.isDebug()) {
      log.logDebug("saved database attribute [" + code + "]");
    }

    return id;
  }
  private synchronized ObjectId insertCluster(ClusterSchema clusterSchema) throws KettleException {
    if (getClusterID(clusterSchema.getName()) != null) {
      // This cluster schema name is already in use. Throw an exception.
      throw new KettleObjectExistsException(
          "Failed to create object in repository. Object ["
              + clusterSchema.getName()
              + "] already exists.");
    }

    ObjectId id = repository.connectionDelegate.getNextClusterID();

    RowMetaAndData table = new RowMetaAndData();

    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, ValueMetaInterface.TYPE_INTEGER),
        id);
    table.addValue(
        new ValueMeta(KettleDatabaseRepository.FIELD_CLUSTER_NAME, ValueMetaInterface.TYPE_STRING),
        clusterSchema.getName());
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, ValueMetaInterface.TYPE_STRING),
        clusterSchema.getBasePort());
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE,
            ValueMetaInterface.TYPE_STRING),
        clusterSchema.getSocketsBufferSize());
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL,
            ValueMetaInterface.TYPE_STRING),
        clusterSchema.getSocketsFlushInterval());
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED,
            ValueMetaInterface.TYPE_BOOLEAN),
        Boolean.valueOf(clusterSchema.isSocketsCompressed()));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, ValueMetaInterface.TYPE_BOOLEAN),
        Boolean.valueOf(clusterSchema.isDynamic()));

    repository
        .connectionDelegate
        .getDatabase()
        .prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_CLUSTER);
    repository.connectionDelegate.getDatabase().setValuesInsert(table);
    repository.connectionDelegate.getDatabase().insertRow();
    repository.connectionDelegate.getDatabase().closeInsert();

    return id;
  }
  public ResultSet getCatalogs() throws SQLException {
    List<RowMetaAndData> rowAndDatas = new ArrayList<RowMetaAndData>();
    // if USE_TRANSNAME_AS_SCHEMA is true, then we use the filename or
    // transformation name as the schema
    if (!isDir) {

      RowMetaAndData rd = new RowMetaAndData();
      rd.addValue("TABLE_CAT", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
      rowAndDatas.add(rd);
      KettleJDBCResultSet rs = new KettleJDBCResultSet(null, rowAndDatas, "*");
      return rs;
    }

    Set<String> set = this.stepsMap.keySet();
    for (Iterator<String> iterator = set.iterator(); iterator.hasNext(); ) {
      String name = iterator.next();
      RowMetaAndData rd = new RowMetaAndData();
      rd.addValue("TABLE_CAT", ValueMetaInterface.TYPE_STRING, name);
      rowAndDatas.add(rd);
    }
    KettleJDBCResultSet rs = new KettleJDBCResultSet(null, rowAndDatas, "*");
    return rs;
  }
예제 #7
0
  public RowMetaAndData fillTableRow() {
    RowMetaAndData r = new RowMetaAndData();
    r.addValue(new ValueMeta("ID_USER", ValueMetaInterface.TYPE_INTEGER), new Long(getID()));
    r.addValue(new ValueMeta("LOGIN", ValueMetaInterface.TYPE_STRING), login);
    r.addValue(
        new ValueMeta("PASSWORD", ValueMetaInterface.TYPE_STRING), Encr.encryptPassword(password));
    r.addValue(new ValueMeta("NAME", ValueMetaInterface.TYPE_STRING), name);
    r.addValue(new ValueMeta("DESCRIPTION", ValueMetaInterface.TYPE_STRING), description);
    r.addValue(new ValueMeta("ENABLED", ValueMetaInterface.TYPE_BOOLEAN), Boolean.valueOf(enabled));
    r.addValue(
        new ValueMeta("ID_PROFILE", ValueMetaInterface.TYPE_INTEGER),
        Long.valueOf(profile.getID()));

    return r;
  }
예제 #8
0
  @Override
  public Result execute(Result prevResult, int k) throws KettleException {

    Result result = prevResult;

    logBasic("{Group job} Creating Group object");

    Group group = new Group();

    logBasic("{Group job} Group object created");

    group.setName(this.getRecordSetName());
    group.setRecordSet(this.getRecordSet());
    group.setBreakCriteria(this.getBreakCriteria());
    group.setIsAll(this.getIsAll());
    group.setRunLocal(this.getIsRunLocal());

    logBasic("{Group job} Execute = " + group.ecl());

    logBasic("{Group job} Previous = " + result.getLogText());

    result.setResult(true);

    RowMetaAndData data = new RowMetaAndData();
    data.addValue("ecl", Value.VALUE_TYPE_STRING, group.ecl());

    List list = result.getRows();
    list.add(data);
    String eclCode = "";
    if (list == null) {
      list = new ArrayList();
    } else {
      for (int i = 0; i < list.size(); i++) {
        RowMetaAndData rowData = (RowMetaAndData) list.get(i);
        String code = rowData.getString("ecl", null);
        if (code != null) eclCode += code;
      }
      logBasic("{Group job} ECL Code = " + eclCode);
    }
    result.setRows(list);

    return result;
  }
  public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException {
    log.debug("getPrimaryKeys........");

    List<RowMetaAndData> rowAndDatas = new ArrayList<RowMetaAndData>();

    RowMetaAndData rd = new RowMetaAndData();
    rd.addValue("TABLE_CAT", ValueMetaInterface.TYPE_STRING, catalog);
    rd.addValue("TABLE_SCHEM", ValueMetaInterface.TYPE_STRING, schema);
    rd.addValue("TABLE_NAME", ValueMetaInterface.TYPE_STRING, table);
    rd.addValue("COLUMN_NAME", ValueMetaInterface.TYPE_STRING, "");
    rd.addValue("KEY_SEQ", ValueMetaInterface.TYPE_INTEGER, "1");
    rd.addValue("PK_NAME", ValueMetaInterface.TYPE_STRING, "");
    rowAndDatas.add(rd);
    KettleJDBCResultSet rs = new KettleJDBCResultSet(null, rowAndDatas, "*");
    return rs;
  }
예제 #10
0
  @Override
  public Result execute(Result prevResult, int k) throws KettleException {

    Result result = prevResult;

    Sort sort = new Sort();
    sort.setFields(getFields());
    sort.setDatasetName(getDatasetName());
    sort.setName(getRecordsetName());
    logBasic("{Sort Job} Execute = " + sort.ecl());

    logBasic("{Sort Job} Previous =" + result.getLogText());

    result.setResult(true);

    RowMetaAndData data = new RowMetaAndData();
    data.addValue("ecl", Value.VALUE_TYPE_STRING, sort.ecl());

    List list = result.getRows();
    list.add(data);
    String eclCode = "";
    if (list == null) {
      list = new ArrayList();
    } else {

      for (int i = 0; i < list.size(); i++) {
        RowMetaAndData rowData = (RowMetaAndData) list.get(i);
        String code = rowData.getString("ecl", null);
        if (code != null) {
          eclCode += code;
        }
      }
      logBasic("{Sort Job} ECL Code =" + eclCode);
    }

    result.setRows(list);

    return result;
  }
  public synchronized void updateCluster(ClusterSchema clusterSchema) throws KettleException {
    RowMetaAndData table = new RowMetaAndData();

    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, ValueMetaInterface.TYPE_INTEGER),
        clusterSchema.getObjectId());
    table.addValue(
        new ValueMeta(KettleDatabaseRepository.FIELD_CLUSTER_NAME, ValueMetaInterface.TYPE_STRING),
        clusterSchema.getName());
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, ValueMetaInterface.TYPE_STRING),
        clusterSchema.getBasePort());
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE,
            ValueMetaInterface.TYPE_STRING),
        clusterSchema.getSocketsBufferSize());
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL,
            ValueMetaInterface.TYPE_STRING),
        clusterSchema.getSocketsFlushInterval());
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED,
            ValueMetaInterface.TYPE_BOOLEAN),
        Boolean.valueOf(clusterSchema.isSocketsCompressed()));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, ValueMetaInterface.TYPE_BOOLEAN),
        Boolean.valueOf(clusterSchema.isDynamic()));

    repository.connectionDelegate.updateTableRow(
        KettleDatabaseRepository.TABLE_R_CLUSTER,
        KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER,
        table,
        clusterSchema.getObjectId());
  }
예제 #12
0
  public ResultSet getTables(
      String catalog, String schemaPattern, String tableNamePattern, String[] types)
      throws SQLException {
    List<RowMetaAndData> rowAndDatas = new ArrayList<RowMetaAndData>();
    // get the steps from *.ktr or *.job
    log.debug(
        "catalog:"
            + catalog
            + " , schemaPattern:"
            + schemaPattern
            + " , tableNamePattern:"
            + tableNamePattern);
    if (!isDir) {
      Set<Map.Entry<String, String[]>> tables = this.stepsMap.entrySet();
      log.debug("tables:" + tables);
      for (Iterator<Map.Entry<String, String[]>> iterator = tables.iterator();
          iterator.hasNext(); ) {
        Map.Entry<String, String[]> o = iterator.next();
        String[] values = (o.getValue());
        for (int i = 0; i < values.length; i++) {
          RowMetaAndData rd = new RowMetaAndData();
          rd.addValue("TABLE_CAT", ValueMetaInterface.TYPE_STRING, "jdbckettle");
          rd.addValue("TABLE_SCHEM", ValueMetaInterface.TYPE_STRING, "jdbckettle");
          rd.addValue("TABLE_NAME", ValueMetaInterface.TYPE_STRING, values[i]);

          rd.addValue("TABLE_TYPE", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
          rd.addValue("REMARKS", ValueMetaInterface.TYPE_STRING, "");
          rd.addValue("TYPE_CAT", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
          rd.addValue("TYPE_SCHEM", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
          rd.addValue("TYPE_NAME", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
          rd.addValue("SELF_REFERENCING_COL_NAME", ValueMetaInterface.TYPE_STRING, "");
          rd.addValue("REF_GENERATION", ValueMetaInterface.TYPE_STRING, "");
          rowAndDatas.add(rd);
        }
      }
    } else {
      Set<Map.Entry<String, String[]>> tables = this.stepsMap.entrySet();
      // for BIRT special schema

      boolean isBirtSchema = this.stepsMap.keySet().contains(schemaPattern);
      for (Iterator<Map.Entry<String, String[]>> iterator = tables.iterator();
          iterator.hasNext(); ) {
        Map.Entry<String, String[]> o = iterator.next();
        String schema = o.getKey();

        if ((!schema.equals(schemaPattern)) && isBirtSchema) {
          continue;
        }
        String[] values = (o.getValue());
        //				log.debug("getTables:"
        //						+ java.util.Arrays.toString(values));

        for (int i = 0; i < values.length; i++) {
          RowMetaAndData rd = new RowMetaAndData();
          rd.addValue("TABLE_CAT", ValueMetaInterface.TYPE_STRING, "jdbckettle");
          rd.addValue("TABLE_SCHEM", ValueMetaInterface.TYPE_STRING, "jdbckettle");
          rd.addValue("TABLE_NAME", ValueMetaInterface.TYPE_STRING, values[i]);

          rd.addValue("TABLE_TYPE", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
          rd.addValue("REMARKS", ValueMetaInterface.TYPE_STRING, "");
          rd.addValue("TYPE_CAT", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
          rd.addValue("TYPE_SCHEM", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
          rd.addValue("TYPE_NAME", ValueMetaInterface.TYPE_STRING, Constants.TABLE_TYPE_TABLE);
          rd.addValue("SELF_REFERENCING_COL_NAME", ValueMetaInterface.TYPE_STRING, "");
          rd.addValue("REF_GENERATION", ValueMetaInterface.TYPE_STRING, "");
          rowAndDatas.add(rd);
        }
      }
    }
    KettleJDBCResultSet rs = new KettleJDBCResultSet(null, rowAndDatas, "*");
    return rs;
  }
예제 #13
0
  public ResultSet getColumns(
      String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern)
      throws SQLException {

    log.debug(
        "catalog:"
            + catalog
            + " , schemaPattern:"
            + schemaPattern
            + " , tableNamePattern:"
            + tableNamePattern
            + " ,columnNamePattern:"
            + columnNamePattern);

    List<RowMetaAndData> rowAndDatas = new ArrayList<RowMetaAndData>();
    // if USE_TRANSNAME_AS_SCHEMA is true, then we use the filename or
    // transformation name as the schema
    if (!isDir) {

      log.debug(helper.getRowMeta(tableNamePattern));
      RowMeta rm = helper.getRowMeta(tableNamePattern);
      ColInfo[] colInfo = KettleHelper.convert(rm);
      String[] columns = rm.getFieldNames();
      for (int i = 0; columns != null && i < columns.length; i++) {

        RowMetaAndData rd = new RowMetaAndData();
        rd.addValue("TABLE_CAT", ValueMetaInterface.TYPE_STRING, catalog);
        rd.addValue("TABLE_SCHEM", ValueMetaInterface.TYPE_STRING, schemaPattern);
        rd.addValue("TABLE_NAME", ValueMetaInterface.TYPE_STRING, tableNamePattern);
        rd.addValue("COLUMN_NAME", ValueMetaInterface.TYPE_STRING, columns[i]);
        rd.addValue("DATA_TYPE", ValueMetaInterface.TYPE_INTEGER, colInfo[i].jdbcType);
        rd.addValue("TYPE_NAME", ValueMetaInterface.TYPE_STRING, "");
        rd.addValue("COLUMN_SIZE", ValueMetaInterface.TYPE_INTEGER, columns.length);
        rd.addValue("BUFFER_LENGTH", ValueMetaInterface.TYPE_INTEGER, "20");

        rd.addValue("DECIMAL_DIGITS", ValueMetaInterface.TYPE_INTEGER, "20");
        rd.addValue("NUM_PREC_RADIX", ValueMetaInterface.TYPE_INTEGER, "20");
        rd.addValue("NULLABLE", ValueMetaInterface.TYPE_INTEGER, "20");
        rd.addValue("REMARKS", ValueMetaInterface.TYPE_STRING, "");
        rd.addValue("COLUMN_DEF", ValueMetaInterface.TYPE_STRING, "");
        rd.addValue("SQL_DATA_TYPE", ValueMetaInterface.TYPE_INTEGER, "20");
        rd.addValue("SQL_DATETIME_SUB", ValueMetaInterface.TYPE_INTEGER, "20");
        rd.addValue("CHAR_OCTET_LENGTH", ValueMetaInterface.TYPE_INTEGER, "1");
        rd.addValue("ORDINAL_POSITION", ValueMetaInterface.TYPE_INTEGER, "20");
        rd.addValue("IS_NULLABLE", ValueMetaInterface.TYPE_STRING, "0");
        rd.addValue("SCOPE_CATALOG", ValueMetaInterface.TYPE_STRING, "0");
        rd.addValue("SCOPE_SCHEMA", ValueMetaInterface.TYPE_STRING, "0");
        rd.addValue("SCOPE_TABLE", ValueMetaInterface.TYPE_STRING, "0");
        rd.addValue("SOURCE_DATA_TYPE", ValueMetaInterface.TYPE_INTEGER, "1");
        rowAndDatas.add(rd);
      }
      KettleJDBCResultSet rs = new KettleJDBCResultSet(null, rowAndDatas, "*");
      return rs;
    }

    //		log.debug("getRowMeta:" + helper.getRowMeta(tableNamePattern));
    RowMeta rm = helper.getRowMeta(tableNamePattern);
    ColInfo[] colInfo = KettleHelper.convert(rm);
    String[] columns = rm.getFieldNames();
    for (int i = 0; columns != null && i < columns.length; i++) {
      String name = columns[i];
      RowMetaAndData rd = new RowMetaAndData();
      rd.addValue("TABLE_CAT", ValueMetaInterface.TYPE_STRING, catalog);
      rd.addValue("TABLE_SCHEM", ValueMetaInterface.TYPE_STRING, schemaPattern);
      rd.addValue("TABLE_NAME", ValueMetaInterface.TYPE_STRING, tableNamePattern);
      rd.addValue("COLUMN_NAME", ValueMetaInterface.TYPE_STRING, name);
      rd.addValue("DATA_TYPE", ValueMetaInterface.TYPE_INTEGER, colInfo[i].getJdbcType());
      rd.addValue("TYPE_NAME", ValueMetaInterface.TYPE_STRING, "");
      rd.addValue("COLUMN_SIZE", ValueMetaInterface.TYPE_INTEGER, columns.length);
      rd.addValue("BUFFER_LENGTH", ValueMetaInterface.TYPE_INTEGER, name);
      rd.addValue("DECIMAL_DIGITS", ValueMetaInterface.TYPE_INTEGER, "20");
      rd.addValue("NUM_PREC_RADIX", ValueMetaInterface.TYPE_INTEGER, "20");
      rd.addValue("NULLABLE", ValueMetaInterface.TYPE_INTEGER, "20");
      rd.addValue("REMARKS", ValueMetaInterface.TYPE_STRING, name);
      rd.addValue("COLUMN_DEF", ValueMetaInterface.TYPE_STRING, name);
      rd.addValue("SQL_DATA_TYPE", ValueMetaInterface.TYPE_INTEGER, "20");
      rd.addValue("SQL_DATETIME_SUB", ValueMetaInterface.TYPE_INTEGER, "20");
      rd.addValue("CHAR_OCTET_LENGTH", ValueMetaInterface.TYPE_INTEGER, "1");
      rd.addValue("ORDINAL_POSITION", ValueMetaInterface.TYPE_INTEGER, "20");
      rd.addValue("IS_NULLABLE", ValueMetaInterface.TYPE_STRING, "0");
      rd.addValue("SCOPE_CATALOG", ValueMetaInterface.TYPE_STRING, "0");
      rd.addValue("SCOPE_SCHEMA", ValueMetaInterface.TYPE_STRING, "0");
      rd.addValue("SCOPE_TABLE", ValueMetaInterface.TYPE_STRING, "0");
      rd.addValue("SOURCE_DATA_TYPE", ValueMetaInterface.TYPE_INTEGER, "1");
      rowAndDatas.add(rd);
    }

    KettleJDBCResultSet rs = new KettleJDBCResultSet(null, rowAndDatas, "*");
    return rs;
  }
  // CHECKSTYLE:LineLength:OFF
  public synchronized ObjectId insertStep(
      ObjectId id_transformation,
      String name,
      String description,
      String steptype,
      boolean distribute,
      long copies,
      long gui_location_x,
      long gui_location_y,
      boolean gui_draw,
      String copiesString)
      throws KettleException {
    ObjectId id = repository.connectionDelegate.getNextStepID();

    ObjectId id_step_type = getStepTypeID(steptype);

    RowMetaAndData table = new RowMetaAndData();

    table.addValue(
        new ValueMeta(KettleDatabaseRepository.FIELD_STEP_ID_STEP, ValueMetaInterface.TYPE_INTEGER),
        id);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER),
        id_transformation);
    table.addValue(
        new ValueMeta(KettleDatabaseRepository.FIELD_STEP_NAME, ValueMetaInterface.TYPE_STRING),
        name);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, ValueMetaInterface.TYPE_STRING),
        description);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, ValueMetaInterface.TYPE_INTEGER),
        id_step_type);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, ValueMetaInterface.TYPE_BOOLEAN),
        Boolean.valueOf(distribute));
    table.addValue(
        new ValueMeta(KettleDatabaseRepository.FIELD_STEP_COPIES, ValueMetaInterface.TYPE_INTEGER),
        new Long(copies));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, ValueMetaInterface.TYPE_INTEGER),
        new Long(gui_location_x));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, ValueMetaInterface.TYPE_INTEGER),
        new Long(gui_location_y));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, ValueMetaInterface.TYPE_BOOLEAN),
        Boolean.valueOf(gui_draw));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, ValueMetaInterface.TYPE_STRING),
        copiesString);

    repository
        .connectionDelegate
        .getDatabase()
        .prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_STEP);
    repository.connectionDelegate.getDatabase().setValuesInsert(table);
    repository.connectionDelegate.getDatabase().insertRow();
    repository.connectionDelegate.getDatabase().closeInsert();

    return id;
  }
  public synchronized void updateDatabase(
      ObjectId id_database,
      String name,
      String type,
      String access,
      String host,
      String dbname,
      String port,
      String user,
      String pass,
      String servername,
      String data_tablespace,
      String index_tablespace)
      throws KettleException {
    ObjectId id_database_type = getDatabaseTypeID(type);
    ObjectId id_database_contype = getDatabaseConTypeID(access);

    RowMetaAndData table = new RowMetaAndData();
    table.addValue(
        new ValueMeta(KettleDatabaseRepository.FIELD_DATABASE_NAME, ValueMetaInterface.TYPE_STRING),
        name);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE,
            ValueMetaInterface.TYPE_INTEGER),
        id_database_type);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE,
            ValueMetaInterface.TYPE_INTEGER),
        id_database_contype);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, ValueMetaInterface.TYPE_STRING),
        host);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, ValueMetaInterface.TYPE_STRING),
        dbname);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_PORT, ValueMetaInterface.TYPE_INTEGER),
        new Long(Const.toInt(port, -1)));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_USERNAME, ValueMetaInterface.TYPE_STRING),
        user);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, ValueMetaInterface.TYPE_STRING),
        Encr.encryptPasswordIfNotUsingVariables(pass));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, ValueMetaInterface.TYPE_STRING),
        servername);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, ValueMetaInterface.TYPE_STRING),
        data_tablespace);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, ValueMetaInterface.TYPE_STRING),
        index_tablespace);

    repository.connectionDelegate.updateTableRow(
        KettleDatabaseRepository.TABLE_R_DATABASE,
        KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE,
        table,
        id_database);
  }
  public synchronized ObjectId insertDatabase(
      String name,
      String type,
      String access,
      String host,
      String dbname,
      String port,
      String user,
      String pass,
      String servername,
      String data_tablespace,
      String index_tablespace)
      throws KettleException {

    ObjectId id = repository.connectionDelegate.getNextDatabaseID();

    ObjectId id_database_type = getDatabaseTypeID(type);
    if (id_database_type == null) {
      // New support database type: add it!

      id_database_type = repository.connectionDelegate.getNextDatabaseTypeID();

      String tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE;
      RowMetaInterface tableMeta = new RowMeta();

      tableMeta.addValueMeta(
          new ValueMeta(
              KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE,
              ValueMetaInterface.TYPE_INTEGER,
              5,
              0));
      tableMeta.addValueMeta(
          new ValueMeta(
              KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE,
              ValueMetaInterface.TYPE_STRING,
              KettleDatabaseRepository.REP_STRING_CODE_LENGTH,
              0));
      tableMeta.addValueMeta(
          new ValueMeta(
              KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION,
              ValueMetaInterface.TYPE_STRING,
              KettleDatabaseRepository.REP_STRING_LENGTH,
              0));

      repository.connectionDelegate.getDatabase().prepareInsert(tableMeta, tablename);

      Object[] tableData = new Object[3];
      int tableIndex = 0;

      tableData[tableIndex++] = new LongObjectId(id_database_type).longValue();
      tableData[tableIndex++] = type;
      tableData[tableIndex++] = type;

      repository.connectionDelegate.getDatabase().setValuesInsert(tableMeta, tableData);
      repository.connectionDelegate.getDatabase().insertRow();
      repository.connectionDelegate.getDatabase().closeInsert();
    }

    ObjectId id_database_contype = getDatabaseConTypeID(access);

    RowMetaAndData table = new RowMetaAndData();
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, ValueMetaInterface.TYPE_INTEGER),
        id);
    table.addValue(
        new ValueMeta(KettleDatabaseRepository.FIELD_DATABASE_NAME, ValueMetaInterface.TYPE_STRING),
        name);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE,
            ValueMetaInterface.TYPE_INTEGER),
        id_database_type);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE,
            ValueMetaInterface.TYPE_INTEGER),
        id_database_contype);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, ValueMetaInterface.TYPE_STRING),
        host);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, ValueMetaInterface.TYPE_STRING),
        dbname);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_PORT, ValueMetaInterface.TYPE_INTEGER),
        new Long(Const.toInt(port, -1)));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_USERNAME, ValueMetaInterface.TYPE_STRING),
        user);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, ValueMetaInterface.TYPE_STRING),
        Encr.encryptPasswordIfNotUsingVariables(pass));
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, ValueMetaInterface.TYPE_STRING),
        servername);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, ValueMetaInterface.TYPE_STRING),
        data_tablespace);
    table.addValue(
        new ValueMeta(
            KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, ValueMetaInterface.TYPE_STRING),
        index_tablespace);

    repository
        .connectionDelegate
        .getDatabase()
        .prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_DATABASE);
    repository.connectionDelegate.getDatabase().setValuesInsert(table);
    repository.connectionDelegate.getDatabase().insertRow();
    repository.connectionDelegate.getDatabase().closeInsert();

    return id;
  }