예제 #1
0
 /**
  * This method traverses the set of selections and renders those selections to the SQL string
  * buffer. This method determines the SQL column aliases. It also calls getBusinessColumnSQL()
  * which renders each individual business column in three different ways. Either as an MQL
  * Formula, an aggregate function, or as a standard SQL column.
  *
  * @param sql sql string buffer
  * @param model business model
  * @param databaseMeta database metadata
  * @param selections sql selections
  * @param disableDistinct if true, disable distinct rendering
  * @param group if true, disable distinct rendering
  * @param locale locale string
  * @param columnsMap map of column aliases to populate
  */
 protected void generateSelect(
     SQLQueryModel query,
     LogicalModel model,
     DatabaseMeta databaseMeta,
     List<Selection> selections,
     boolean disableDistinct,
     int limit,
     boolean group,
     String locale,
     Map<LogicalTable, String> tableAliases,
     Map<String, String> columnsMap,
     Map<String, Object> parameters,
     boolean genAsPreparedStatement) {
   query.setDistinct(!disableDistinct && !group);
   query.setLimit(limit);
   for (int i = 0; i < selections.size(); i++) {
     // in some database implementations, the "as" name has a finite length;
     // for instance, oracle cannot handle a name longer than 30 characters.
     // So, we map a short name here to the longer id, and replace the id
     // later in the resultset metadata.
     String alias = null;
     if (columnsMap != null) {
       alias = databaseMeta.generateColumnAlias(i, selections.get(i).getLogicalColumn().getId());
       columnsMap.put(alias, selections.get(i).getLogicalColumn().getId());
       alias = databaseMeta.quoteField(alias);
     } else {
       alias = databaseMeta.quoteField(selections.get(i).getLogicalColumn().getId());
     }
     SqlAndTables sqlAndTables =
         getBusinessColumnSQL(
             model,
             selections.get(i),
             tableAliases,
             parameters,
             genAsPreparedStatement,
             databaseMeta,
             locale);
     query.addSelection(sqlAndTables.getSql(), alias);
   }
 }
예제 #2
0
 /**
  * this method adds the order by statements to the query model
  *
  * @param query sql query model
  * @param model business model
  * @param orderBy list of order bys
  * @param databaseMeta database info
  * @param locale locale string
  */
 protected void generateOrderBy(
     SQLQueryModel query,
     LogicalModel model,
     List<Order> orderBy,
     DatabaseMeta databaseMeta,
     String locale,
     Map<LogicalTable, String> tableAliases,
     Map<String, String> columnsMap,
     Map<String, Object> parameters,
     boolean genAsPreparedStatement) {
   if (orderBy != null) {
     for (Order orderItem : orderBy) {
       LogicalColumn businessColumn = orderItem.getSelection().getLogicalColumn();
       String alias = null;
       if (columnsMap != null) {
         // The column map is a unique mapping of Column alias to the column ID
         // Here we have the column ID and we need the alias.
         // We need to do the order by on the alias, not the column name itself.
         // For most databases, it can be both, but the alias is more standard.
         //
         // Using the column name and not the alias caused an issue on Apache Derby.
         //
         for (String key : columnsMap.keySet()) {
           String value = columnsMap.get(key);
           if (value.equals(businessColumn.getId())) {
             // Found it: the alias is the key
             alias = key;
             break;
           }
         }
       }
       SqlAndTables sqlAndTables =
           getBusinessColumnSQL(
               model,
               orderItem.getSelection(),
               tableAliases,
               parameters,
               genAsPreparedStatement,
               databaseMeta,
               locale);
       query.addOrderBy(
           sqlAndTables.getSql(),
           databaseMeta.quoteField(alias),
           orderItem.getType() != Type.ASC ? OrderType.DESCENDING : null);
     }
   }
 }
예제 #3
0
  private void loadAllTableDataIntoTheCache() throws KettleException {
    DatabaseMeta dbMeta = meta.getDatabaseMeta();

    try {
      // We only want to get the used table fields...
      //
      String sql = "SELECT ";

      for (int i = 0; i < meta.getStreamKeyField1().length; i++) {
        if (i > 0) {
          sql += ", ";
        }
        sql += dbMeta.quoteField(meta.getTableKeyField()[i]);
      }

      // Also grab the return field...
      //
      for (int i = 0; i < meta.getReturnValueField().length; i++) {
        sql += ", " + dbMeta.quoteField(meta.getReturnValueField()[i]);
      }
      // The schema/table
      //
      sql +=
          " FROM "
              + dbMeta.getQuotedSchemaTableCombination(
                  environmentSubstitute(meta.getSchemaName()),
                  environmentSubstitute(meta.getTablename()));

      // order by?
      if (meta.getOrderByClause() != null && meta.getOrderByClause().length() != 0) {
        sql += " ORDER BY " + meta.getOrderByClause();
      }

      // Now that we have the SQL constructed, let's store the rows...
      //
      List<Object[]> rows = data.db.getRows(sql, 0);
      if (rows != null && rows.size() > 0) {
        RowMetaInterface returnRowMeta = data.db.getReturnRowMeta();
        // Copy the data into 2 parts: key and value...
        //
        for (Object[] row : rows) {
          int index = 0;
          RowMeta keyMeta = new RowMeta();
          Object[] keyData = new Object[meta.getStreamKeyField1().length];
          for (int i = 0; i < meta.getStreamKeyField1().length; i++) {
            keyData[i] = row[index];
            keyMeta.addValueMeta(returnRowMeta.getValueMeta(index++));
          }
          // RowMeta valueMeta = new RowMeta();
          Object[] valueData = new Object[data.returnMeta.size()];
          for (int i = 0; i < data.returnMeta.size(); i++) {
            valueData[i] = row[index++];
            // valueMeta.addValueMeta(returnRowMeta.getValueMeta(index++));
          }
          // Store the data...
          //
          storeRowInCache(keyMeta, keyData, valueData);
          incrementLinesInput();
        }
      }
    } catch (Exception e) {
      throw new KettleException(e);
    }
  }
예제 #4
0
  /**
   * This method first traverses the set of included business tables and renders those tables to the
   * SQL string buffer. Second, it traverses the list of joins and renders those in the WHERE
   * clause. Finally, it traverses the constraints and adds them to the where or having clauses.
   *
   * @param query sql query model
   * @param usedBusinessTables used business tables in query
   * @param model the current business model
   * @param path the join path
   * @param conditions the where conditions
   * @param databaseMeta database metadata
   * @param locale locale string
   */
  protected void generateFromAndWhere(
      SQLQueryModel query,
      List<LogicalTable> usedBusinessTables,
      LogicalModel model,
      Path path,
      List<Constraint> conditions,
      Map<LogicalTable, String> tableAliases,
      Map<Constraint, SqlOpenFormula> constraintFormulaMap,
      Map<String, Object> parameters,
      boolean genAsPreparedStatement,
      DatabaseMeta databaseMeta,
      String locale)
      throws PentahoMetadataException {

    // Boolean delayConditionOnOuterJoin = null;
    // Object val = null;
    // FROM TABLES
    for (int i = 0; i < usedBusinessTables.size(); i++) {
      LogicalTable businessTable = usedBusinessTables.get(i);
      String schemaName = null;
      if (businessTable.getProperty(SqlPhysicalTable.TARGET_SCHEMA) != null) {
        schemaName =
            databaseMeta.quoteField(
                (String) businessTable.getProperty(SqlPhysicalTable.TARGET_SCHEMA));
      }
      // ToDo: Allow table-level override of delaying conditions.
      //      val = businessTable.getProperty("delay_table_outer_join_conditions");
      //      if ( (val != null) && (val instanceof Boolean) ) {
      //        delayConditionOnOuterJoin = (Boolean)val;
      //      } else {
      //        delayConditionOnOuterJoin = null;
      //      }

      // this code allows subselects to drive the physical model.
      // TODO: make this key off a metadata flag vs. the
      // beginning of the table name.

      String tableName = (String) businessTable.getProperty(SqlPhysicalTable.TARGET_TABLE);
      TargetTableType type =
          (TargetTableType) businessTable.getProperty(SqlPhysicalTable.TARGET_TABLE_TYPE);
      if (type == TargetTableType.INLINE_SQL) {
        tableName = "(" + tableName + ")"; // $NON-NLS-1$ //$NON-NLS-2$
      } else {
        tableName = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
      }
      query.addTable(tableName, databaseMeta.quoteField(tableAliases.get(businessTable)));
    }

    // JOIN CONDITIONS
    if (path != null) {
      for (int i = 0; i < path.size(); i++) {
        LogicalRelationship relation = path.getRelationship(i);
        String joinFormula =
            getJoin(
                model,
                relation,
                tableAliases,
                parameters,
                genAsPreparedStatement,
                databaseMeta,
                locale);
        String joinOrderKey = relation.getJoinOrderKey();
        JoinType joinType;
        switch (RelationshipType.getJoinType(relation.getRelationshipType())) {
          case LEFT_OUTER:
            joinType = JoinType.LEFT_OUTER_JOIN;
            break;
          case RIGHT_OUTER:
            joinType = JoinType.RIGHT_OUTER_JOIN;
            break;
          case FULL_OUTER:
            joinType = JoinType.FULL_OUTER_JOIN;
            break;
          default:
            joinType = JoinType.INNER_JOIN;
            break;
        }

        String leftTableName =
            databaseMeta.getQuotedSchemaTableCombination(
                (String) relation.getFromTable().getProperty(SqlPhysicalTable.TARGET_SCHEMA),
                (String) relation.getFromTable().getProperty(SqlPhysicalTable.TARGET_TABLE));
        String leftTableAlias = tableAliases.get(relation.getFromTable());
        String rightTableName =
            databaseMeta.getQuotedSchemaTableCombination(
                (String) relation.getToTable().getProperty(SqlPhysicalTable.TARGET_SCHEMA),
                (String) relation.getToTable().getProperty(SqlPhysicalTable.TARGET_TABLE));
        String rightTableAlias = tableAliases.get(relation.getToTable());

        query.addJoin(
            leftTableName,
            leftTableAlias,
            rightTableName,
            rightTableAlias,
            joinType,
            joinFormula,
            joinOrderKey);
        // query.addWhereFormula(joinFormula, "AND"); //$NON-NLS-1$
      }
    }

    // WHERE CONDITIONS
    if (conditions != null) {
      boolean first = true;
      for (Constraint condition : conditions) {
        SqlOpenFormula formula = constraintFormulaMap.get(condition);

        // configure formula to use table aliases
        formula.setTableAliases(tableAliases);

        // The ones with aggregates in it are for the HAVING clause
        if (!formula.hasAggregate()) {

          String sqlFormula = formula.generateSQL(locale);
          String[] usedTables = formula.getLogicalTableIDs();
          query.addWhereFormula(sqlFormula, condition.getCombinationType().toString(), usedTables);
          first = false;
        } else {
          query.addHavingFormula(
              formula.generateSQL(locale), condition.getCombinationType().toString());
        }
      }
    }
  }
예제 #5
0
  protected String getJoin(
      LogicalModel businessModel,
      LogicalRelationship relation,
      Map<LogicalTable, String> tableAliases,
      Map<String, Object> parameters,
      boolean genAsPreparedStatement,
      DatabaseMeta databaseMeta,
      String locale)
      throws PentahoMetadataException {
    String join = ""; // $NON-NLS-1$
    if (relation.isComplex()) {
      try {
        // parse join as MQL
        SqlOpenFormula formula =
            new SqlOpenFormula(
                businessModel,
                databaseMeta,
                relation.getComplexJoin(),
                tableAliases,
                parameters,
                genAsPreparedStatement);
        formula.parseAndValidate();
        join = formula.generateSQL(locale);
      } catch (PentahoMetadataException e) {
        // backward compatibility, deprecate
        logger.warn(
            Messages.getErrorString(
                "SqlGenerator.ERROR_0017_FAILED_TO_PARSE_COMPLEX_JOIN", relation.getComplexJoin()),
            e); //$NON-NLS-1$
        join = relation.getComplexJoin();
      }
    } else if (relation.getFromTable() != null
        && relation.getToTable() != null
        && relation.getFromColumn() != null
        && relation.getToColumn() != null) {
      // Left side
      String leftTableAlias = null;
      if (tableAliases != null) {
        leftTableAlias = tableAliases.get(relation.getFromColumn().getLogicalTable());
      } else {
        leftTableAlias = relation.getFromColumn().getLogicalTable().getId();
      }

      join = databaseMeta.quoteField(leftTableAlias);
      join += "."; // $NON-NLS-1$
      join +=
          databaseMeta.quoteField(
              (String) relation.getFromColumn().getProperty(SqlPhysicalColumn.TARGET_COLUMN));

      // Equals
      join += " = "; // $NON-NLS-1$

      // Right side
      String rightTableAlias = null;
      if (tableAliases != null) {
        rightTableAlias = tableAliases.get(relation.getToColumn().getLogicalTable());
      } else {
        rightTableAlias = relation.getToColumn().getLogicalTable().getId();
      }

      join += databaseMeta.quoteField(rightTableAlias);
      join += "."; // $NON-NLS-1$
      join +=
          databaseMeta.quoteField(
              (String) relation.getToColumn().getProperty(SqlPhysicalColumn.TARGET_COLUMN));
    } else {
      throw new PentahoMetadataException(
          Messages.getErrorString(
              "SqlGenerator.ERROR_0003_INVALID_RELATION", relation.toString())); // $NON-NLS-1$
    }

    return join;
  }
예제 #6
0
  public static SqlAndTables getBusinessColumnSQL(
      LogicalModel businessModel,
      Selection column,
      Map<LogicalTable, String> tableAliases,
      Map<String, Object> parameters,
      boolean genAsPreparedStatement,
      DatabaseMeta databaseMeta,
      String locale) {
    String targetColumn =
        (String) column.getLogicalColumn().getProperty(SqlPhysicalColumn.TARGET_COLUMN);
    LogicalTable logicalTable = column.getLogicalColumn().getLogicalTable();
    if (column.getLogicalColumn().getProperty(SqlPhysicalColumn.TARGET_COLUMN_TYPE)
        == TargetColumnType.OPEN_FORMULA) {
      // convert to sql using libformula subsystem

      try {
        // we'll need to pass in some context to PMSFormula so it can resolve aliases if necessary
        SqlOpenFormula formula =
            new SqlOpenFormula(
                businessModel,
                logicalTable,
                databaseMeta,
                targetColumn,
                tableAliases,
                parameters,
                genAsPreparedStatement);
        formula.parseAndValidate();

        String formulaSql = formula.generateSQL(locale);

        // check for old style, where function is hardcoded in the model.
        if (column.hasAggregate() && !hasAggregateDefinedAlready(formulaSql, databaseMeta)) {
          formulaSql = getFunctionExpression(column, formulaSql, databaseMeta);
        }

        return new SqlAndTables(formulaSql, formula.getLogicalTables(), formula.getSelections());
      } catch (PentahoMetadataException e) {
        // this is for backwards compatibility.
        // eventually throw any errors
        logger.warn(
            Messages.getErrorString(
                "SqlGenerator.ERROR_0001_FAILED_TO_PARSE_FORMULA", targetColumn),
            e); //$NON-NLS-1$

        // Report just this table and column as being used along with the formula.
        //
        return new SqlAndTables(targetColumn, logicalTable, column);
      }
    } else {
      String tableColumn = ""; // $NON-NLS-1$

      // this step is required because this method is called in two contexts.  The first
      // call determines all the tables involved, making it impossible to guarantee
      // unique aliases.

      String tableAlias = null;
      if (tableAliases != null) {
        tableAlias = tableAliases.get(logicalTable);
      } else {
        tableAlias = logicalTable.getId();
      }
      tableColumn += databaseMeta.quoteField(tableAlias);
      tableColumn += "."; // $NON-NLS-1$

      // TODO: WPG: instead of using formula, shouldn't we use the physical column's name?
      tableColumn += databaseMeta.quoteField(targetColumn);

      // For the having clause, for example: HAVING sum(turnover) > 100
      if (column.hasAggregate()) {
        return new SqlAndTables(
            getFunctionExpression(column, tableColumn, databaseMeta), logicalTable, column);
      } else {
        return new SqlAndTables(tableColumn, logicalTable, column);
      }
    }
  }
예제 #7
0
  /**
   * Get the contents of the control file as specified in the meta object
   *
   * @param meta the meta object to model the control file after
   * @return a string containing the control file contents
   */
  public String getControlFileContents(OraBulkLoaderMeta meta, RowMetaInterface rm, Object[] r)
      throws KettleException {
    DatabaseMeta dm = meta.getDatabaseMeta();
    String inputName = "'" + environmentSubstitute(meta.getDataFile()) + "'";

    String loadAction = meta.getLoadAction();

    StringBuilder contents = new StringBuilder(500);
    contents.append("OPTIONS(").append(Const.CR);
    contents.append("  ERRORS=\'").append(meta.getMaxErrors()).append("\'").append(Const.CR);

    if (meta.getCommitSizeAsInt(this) != 0
        && !(meta.isDirectPath() && getStepMeta().getCopies() > 1)) {
      // For the second part of the above expressions: ROWS is not supported
      // in parallel mode (by sqlldr).
      contents.append("  , ROWS=\'").append(meta.getCommitSize()).append("\'").append(Const.CR);
    }

    if (meta.getBindSizeAsInt(this) != 0) {
      contents.append("  , BINDSIZE=\'").append(meta.getBindSize()).append("\'").append(Const.CR);
    }

    if (meta.getReadSizeAsInt(this) != 0) {
      contents.append("  , READSIZE=\'").append(meta.getReadSize()).append("\'").append(Const.CR);
    }

    contents.append(")").append(Const.CR);

    contents.append("LOAD DATA").append(Const.CR);
    if (!Utils.isEmpty(meta.getCharacterSetName())) {
      contents.append("CHARACTERSET ").append(meta.getCharacterSetName()).append(Const.CR);
    }
    if (!OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals(meta.getLoadMethod())
        || !Utils.isEmpty(meta.getAltRecordTerm())) {
      String infile = inputName;

      if (OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals(meta.getLoadMethod())) {
        infile = "''";
      }

      // For concurrent input, data command line argument must be specified
      contents.append("INFILE ").append(infile);
      if (!Utils.isEmpty(meta.getAltRecordTerm())) {
        contents
            .append(" \"STR x'")
            .append(encodeRecordTerminator(meta.getAltRecordTerm(), meta.getEncoding()))
            .append("'\"");
      }
      contents.append(Const.CR);
    }
    contents
        .append("INTO TABLE ")
        .append(
            dm.getQuotedSchemaTableCombination(
                environmentSubstitute(meta.getSchemaName()),
                environmentSubstitute(meta.getTableName())))
        .append(Const.CR)
        .append(loadAction)
        .append(Const.CR)
        .append("FIELDS TERMINATED BY ',' ENCLOSED BY '\"'")
        .append(Const.CR)
        .append("(");

    String[] streamFields = meta.getFieldStream();
    String[] tableFields = meta.getFieldTable();
    String[] dateMask = meta.getDateMask();

    if (streamFields == null || streamFields.length == 0) {
      throw new KettleException("No fields defined to load to database");
    }

    for (int i = 0; i < streamFields.length; i++) {
      if (i != 0) {
        contents.append(", ").append(Const.CR);
      }
      contents.append(dm.quoteField(tableFields[i]));

      int pos = rm.indexOfValue(streamFields[i]);
      if (pos < 0) {
        throw new KettleException("Could not find field " + streamFields[i] + " in stream");
      }
      ValueMetaInterface v = rm.getValueMeta(pos);
      switch (v.getType()) {
        case ValueMetaInterface.TYPE_STRING:
          if (v.getLength() > 255) {
            contents.append(" CHAR(").append(v.getLength()).append(")");
          } else {
            contents.append(" CHAR");
          }
          break;
        case ValueMetaInterface.TYPE_INTEGER:
        case ValueMetaInterface.TYPE_NUMBER:
        case ValueMetaInterface.TYPE_BIGNUMBER:
          break;
        case ValueMetaInterface.TYPE_DATE:
          if (OraBulkLoaderMeta.DATE_MASK_DATE.equals(dateMask[i])) {
            contents.append(" DATE 'yyyy-mm-dd'");
          } else if (OraBulkLoaderMeta.DATE_MASK_DATETIME.equals(dateMask[i])) {
            contents.append(" TIMESTAMP 'yyyy-mm-dd hh24:mi:ss.ff'");
          } else {
            // If not specified the default is date.
            contents.append(" DATE 'yyyy-mm-dd'");
          }
          break;
        case ValueMetaInterface.TYPE_BINARY:
          contents.append(" ENCLOSED BY '<startlob>' AND '<endlob>'");
          break;
        case ValueMetaInterface.TYPE_TIMESTAMP:
          contents.append(" TIMESTAMP 'yyyy-mm-dd hh24:mi:ss.ff'");
          break;
        default:
          break;
      }
    }
    contents.append(")");

    return contents.toString();
  }
  private String buildCopyStatementSqlString() {
    final DatabaseMeta databaseMeta = data.db.getDatabaseMeta();

    StringBuilder sb = new StringBuilder(150);
    sb.append("COPY ");

    sb.append(
        databaseMeta.getQuotedSchemaTableCombination(
            environmentSubstitute(meta.getSchemaName()),
            environmentSubstitute(meta.getTablename())));

    if (meta.specifyFields()) {
      final RowMetaInterface fields = data.insertRowMeta;
      sb.append(" (");
      for (int i = 0; i < fields.size(); i++) {
        if (i > 0) sb.append(", ");

        sb.append(databaseMeta.quoteField(fields.getValueMeta(i).getName()));
      }
      sb.append(")");
    }

    sb.append(" FROM STDIN NATIVE ");

    if (!Const.isEmpty(meta.getExceptionsFileName())) {
      sb.append("EXCEPTIONS E'")
          .append(meta.getExceptionsFileName().replace("'", "\\'"))
          .append("' ");
    }

    if (!Const.isEmpty(meta.getRejectedDataFileName())) {
      sb.append("REJECTED DATA E'")
          .append(meta.getRejectedDataFileName().replace("'", "\\'"))
          .append("' ");
    }

    // TODO: Should eventually get a preference for this, but for now, be backward compatible.
    sb.append("ENFORCELENGTH ");

    if (meta.isAbortOnError()) {
      sb.append("ABORT ON ERROR ");
    }

    if (meta.isDirect()) {
      sb.append("DIRECT ");
    }

    if (!Const.isEmpty(meta.getStreamName())) {
      sb.append("STREAM NAME E'")
          .append(environmentSubstitute(meta.getStreamName()).replace("'", "\\'"))
          .append("' ");
    }

    // XXX: I believe the right thing to do here is always use NO COMMIT since we want Kettle's
    // configuration to drive.
    // NO COMMIT does not seem to work even when the transformation setting 'make the transformation
    // database transactional' is on
    // sb.append("NO COMMIT");

    return sb.toString();
  }
  /** This inserts new record into a junk dimension */
  public Long combiInsert(RowMetaInterface rowMeta, Object[] row, Long val_key, Long val_crc)
      throws KettleDatabaseException {
    String debug = "Combination insert";
    DatabaseMeta databaseMeta = meta.getDatabaseMeta();
    try {
      if (data.prepStatementInsert == null) // first time: construct prepared statement
      {
        debug = "First: construct prepared statement";

        data.insertRowMeta = new RowMeta();

        /* Construct the SQL statement...
         *
         * INSERT INTO
         * d_test(keyfield, [crcfield,] keylookup[])
         * VALUES(val_key, [val_crc], row values with keynrs[])
         * ;
         */

        String sql = "";
        sql += "INSERT INTO " + data.schemaTable + ("( ");
        boolean comma = false;

        if (!isAutoIncrement()) // NO AUTOINCREMENT
        {
          sql += databaseMeta.quoteField(meta.getTechnicalKeyField());
          data.insertRowMeta.addValueMeta(
              new ValueMeta(meta.getTechnicalKeyField(), ValueMetaInterface.TYPE_INTEGER));
          comma = true;
        } else if (databaseMeta.needsPlaceHolder()) {
          sql += "0"; // placeholder on informix!  Will be replaced in table by real autoinc value.
          data.insertRowMeta.addValueMeta(
              new ValueMeta(meta.getTechnicalKeyField(), ValueMetaInterface.TYPE_INTEGER));
          comma = true;
        }

        if (meta.useHash()) {
          if (comma) sql += ", ";
          sql += databaseMeta.quoteField(meta.getHashField());
          data.insertRowMeta.addValueMeta(
              new ValueMeta(meta.getHashField(), ValueMetaInterface.TYPE_INTEGER));
          comma = true;
        }

        if (!Const.isEmpty(meta.getLastUpdateField())) {
          if (comma) sql += ", ";
          sql += databaseMeta.quoteField(meta.getLastUpdateField());
          data.insertRowMeta.addValueMeta(
              new ValueMeta(meta.getLastUpdateField(), ValueMetaInterface.TYPE_DATE));
          comma = true;
        }

        for (int i = 0; i < meta.getKeyLookup().length; i++) {
          if (comma) sql += ", ";
          sql += databaseMeta.quoteField(meta.getKeyLookup()[i]);
          data.insertRowMeta.addValueMeta(rowMeta.getValueMeta(data.keynrs[i]));
          comma = true;
        }

        sql += ") VALUES (";

        comma = false;

        if (!isAutoIncrement()) {
          sql += '?';
          comma = true;
        }
        if (meta.useHash()) {
          if (comma) sql += ',';
          sql += '?';
          comma = true;
        }
        if (!Const.isEmpty(meta.getLastUpdateField())) {
          if (comma) sql += ',';
          sql += '?';
          comma = true;
        }

        for (int i = 0; i < meta.getKeyLookup().length; i++) {
          if (comma) sql += ',';
          else comma = true;
          sql += '?';
        }

        sql += " )";

        String sqlStatement = sql;
        try {
          debug = "First: prepare statement";
          if (isAutoIncrement()) {
            logDetailed("SQL with return keys: " + sqlStatement);
            data.prepStatementInsert =
                data.db
                    .getConnection()
                    .prepareStatement(
                        databaseMeta.stripCR(sqlStatement), Statement.RETURN_GENERATED_KEYS);
          } else {
            logDetailed("SQL without return keys: " + sqlStatement);
            data.prepStatementInsert =
                data.db.getConnection().prepareStatement(databaseMeta.stripCR(sqlStatement));
          }
        } catch (SQLException ex) {
          throw new KettleDatabaseException(
              "Unable to prepare combi insert statement : " + Const.CR + sqlStatement, ex);
        } catch (Exception ex) {
          throw new KettleDatabaseException(
              "Unable to prepare combi insert statement : " + Const.CR + sqlStatement, ex);
        }
      }

      debug = "Create new insert row rins";
      Object[] insertRow = new Object[data.insertRowMeta.size()];
      int insertIndex = 0;

      if (!isAutoIncrement()) {
        insertRow[insertIndex] = val_key;
        insertIndex++;
      }
      if (meta.useHash()) {
        insertRow[insertIndex] = val_crc;
        insertIndex++;
      }
      if (!Const.isEmpty(meta.getLastUpdateField())) {
        insertRow[insertIndex] = new Date();
        insertIndex++;
      }
      for (int i = 0; i < data.keynrs.length; i++) {
        insertRow[insertIndex] = row[data.keynrs[i]];
        insertIndex++;
      }

      if (log.isRowLevel()) logRowlevel("rins=" + data.insertRowMeta.getString(insertRow));

      debug = "Set values on insert";
      // INSERT NEW VALUE!
      data.db.setValues(data.insertRowMeta, insertRow, data.prepStatementInsert);

      debug = "Insert row";
      data.db.insertRow(data.prepStatementInsert);

      debug = "Retrieve key";
      if (isAutoIncrement()) {
        ResultSet keys = null;
        try {
          keys = data.prepStatementInsert.getGeneratedKeys(); // 1 key
          if (keys.next()) val_key = new Long(keys.getLong(1));
          else {
            throw new KettleDatabaseException(
                "Unable to retrieve auto-increment of combi insert key : "
                    + meta.getTechnicalKeyField()
                    + ", no fields in resultset");
          }
        } catch (SQLException ex) {
          throw new KettleDatabaseException(
              "Unable to retrieve auto-increment of combi insert key : "
                  + meta.getTechnicalKeyField(),
              ex);
        } finally {
          try {
            if (keys != null) keys.close();
          } catch (SQLException ex) {
            throw new KettleDatabaseException(
                "Unable to retrieve auto-increment of combi insert key : "
                    + meta.getTechnicalKeyField(),
                ex);
          }
        }
      }
    } catch (Exception e) {
      logError(Const.getStackTracker(e));
      throw new KettleDatabaseException(
          "Unexpected error in combination insert in part [" + debug + "] : " + e.toString(), e);
    }

    return val_key;
  }
  /**
   * CombinationLookup table: dimension table keys[]: which dim-fields do we use to look up key?
   * retval: name of the key to return
   */
  public void setCombiLookup(RowMetaInterface inputRowMeta) throws KettleDatabaseException {
    DatabaseMeta databaseMeta = meta.getDatabaseMeta();

    String sql = "";
    boolean comma;
    data.lookupRowMeta = new RowMeta();

    /*
     * SELECT <retval>
     * FROM   <table>
     * WHERE  ( ( <key1> = ? ) OR ( <key1> IS NULL AND ? IS NULL ) )
     * AND    ( ( <key2> = ? ) OR ( <key1> IS NULL AND ? IS NULL ) )
     * ...
     * ;
     *
     * OR
     *
     * SELECT <retval>
     * FROM   <table>
     * WHERE  <crcfield> = ?
     * AND    ( ( <key1> = ? ) OR ( <key1> IS NULL AND ? IS NULL ) )
     * AND    ( ( <key2> = ? ) OR ( <key1> IS NULL AND ? IS NULL ) )
     * ...
     * ;
     *
     */

    sql += "SELECT " + databaseMeta.quoteField(meta.getTechnicalKeyField()) + Const.CR;
    sql += "FROM " + data.schemaTable + Const.CR;
    sql += "WHERE ";
    comma = false;

    if (meta.useHash()) {
      sql += databaseMeta.quoteField(meta.getHashField()) + " = ? " + Const.CR;
      comma = true;
      data.lookupRowMeta.addValueMeta(
          new ValueMeta(meta.getHashField(), ValueMetaInterface.TYPE_INTEGER));
    } else {
      sql += "( ( ";
    }

    for (int i = 0; i < meta.getKeyLookup().length; i++) {
      if (comma) {
        sql += " AND ( ( ";
      } else {
        comma = true;
      }
      sql +=
          databaseMeta.quoteField(meta.getKeyLookup()[i])
              + " = ? ) OR ( "
              + databaseMeta.quoteField(meta.getKeyLookup()[i]);
      data.lookupRowMeta.addValueMeta(inputRowMeta.getValueMeta(data.keynrs[i]));

      sql += " IS NULL AND ";
      if (databaseMeta.requiresCastToVariousForIsNull()) {
        sql += "CAST(? AS VARCHAR(256)) IS NULL";
      } else {
        sql += "? IS NULL";
      }
      // Add the ValueMeta for the null check, BUT cloning needed.
      // Otherwise the field gets renamed and gives problems when referenced by previous steps.
      data.lookupRowMeta.addValueMeta(inputRowMeta.getValueMeta(data.keynrs[i]).clone());

      sql += " ) )";
      sql += Const.CR;
    }

    try {
      if (log.isDebug()) logDebug("preparing combi-lookup statement:" + Const.CR + sql);
      data.prepStatementLookup =
          data.db.getConnection().prepareStatement(databaseMeta.stripCR(sql));
      if (databaseMeta.supportsSetMaxRows()) {
        data.prepStatementLookup.setMaxRows(1); // alywas get only 1 line back!
      }
    } catch (SQLException ex) {
      throw new KettleDatabaseException("Unable to prepare combi-lookup statement", ex);
    }
  }