public void getFields( RowMetaInterface r, String name, RowMetaInterface info[], StepMeta nextStep, VariableSpace space) throws KettleStepException { // It's best that this method doesn't change anything by itself. // Eventually it's the Mapping step that's going to tell this step how to behave meta-data wise. // It is the mapping step that tells the mapping output step what fields to rename. // if (inputValueRenames != null) { for (MappingValueRename valueRename : inputValueRenames) { ValueMetaInterface valueMeta = r.searchValueMeta(valueRename.getTargetValueName()); if (valueMeta != null) { valueMeta.setName(valueRename.getSourceValueName()); } } } // This is the optionally entered stuff in the output tab of the mapping dialog. // if (outputValueRenames != null) { for (MappingValueRename valueRename : outputValueRenames) { ValueMetaInterface valueMeta = r.searchValueMeta(valueRename.getSourceValueName()); if (valueMeta != null) { valueMeta.setName(valueRename.getTargetValueName()); } } } }
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev) throws KettleStepException { SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do! if (databaseMeta != null) { if (prev != null && prev.size() > 0) { // Copy the row RowMetaInterface tableFields = new RowMeta(); // Now change the field names for (int i = 0; i < fieldTable.length; i++) { ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]); if (v != null) { ValueMetaInterface tableField = v.clone(); tableField.setName(fieldTable[i]); tableFields.addValueMeta(tableField); } else { throw new KettleStepException( "Unable to find field [" + fieldStream[i] + "] in the input rows"); } } if (!Const.isEmpty(tableName)) { Database db = new Database(loggingObject, databaseMeta); db.shareVariablesWith(transMeta); try { db.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute(schemaName), transMeta.environmentSubstitute(tableName)); String sql = db.getDDL(schemaTable, tableFields, null, false, null, true); if (sql.length() == 0) retval.setSQL(null); else retval.setSQL(sql); } catch (KettleException e) { retval.setError( BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.ErrorOccurred") + e.getMessage()); // $NON-NLS-1$ } } else { retval.setError( BaseMessages.getString( PKG, "GPBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection")); // $NON-NLS-1$ } } else { retval.setError( BaseMessages.getString( PKG, "GPBulkLoaderMeta.GetSQL.NotReceivingAnyFields")); // $NON-NLS-1$ } } else { retval.setError( BaseMessages.getString( PKG, "GPBulkLoaderMeta.GetSQL.NoConnectionDefined")); // $NON-NLS-1$ } return retval; }
public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException { if (Const.isEmpty(info) || info[0] == null) // null or length 0 : no info from database { for (int i = 0; i < returnValueNewName.length; i++) { ValueMetaInterface v = new ValueMeta(returnValueNewName[i], returnValueDefaultType[i]); v.setOrigin(name); row.addValueMeta(v); } } else { for (int i = 0; i < returnValueNewName.length; i++) { ValueMetaInterface v = info[0].searchValueMeta(returnValueField[i]); if (v != null) { v.setName(returnValueNewName[i]); v.setOrigin(name); row.addValueMeta(v); } } } }
private void sql() { try { IngresVectorwiseLoaderMeta info = new IngresVectorwiseLoaderMeta(); getInfo(info); RowMetaInterface prev = transMeta.getPrevStepFields(stepname); StepMeta stepMeta = transMeta.findStep(stepname); // Only use the fields that were specified. // RowMetaInterface prevNew = new RowMeta(); for (int i = 0; i < info.getFieldDatabase().length; i++) { ValueMetaInterface insValue = prev.searchValueMeta(info.getFieldStream()[i]); if (insValue != null) { ValueMetaInterface insertValue = insValue.clone(); insertValue.setName(info.getFieldDatabase()[i]); prevNew.addValueMeta(insertValue); } else { throw new KettleStepException( BaseMessages.getString( PKG, "IngresVectorWiseLoaderDialog.FailedToFindField.Message", info.getFieldStream()[i])); } } prev = prevNew; SQLStatement sql = info.getSQLStatements(transMeta, stepMeta, prev, repository, metaStore); if (!sql.hasError()) { if (sql.hasSQL()) { SQLEditor sqledit = new SQLEditor( transMeta, shell, SWT.NONE, info.getDatabaseMeta(), transMeta.getDbCache(), sql.getSQL()); sqledit.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage( BaseMessages.getString(PKG, "IngresVectorWiseLoaderDialog.NoSQL.DialogMessage")); mb.setText(BaseMessages.getString(PKG, "IngresVectorWiseLoaderDialog.NoSQL.DialogTitle")); mb.open(); } } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(sql.getError()); mb.setText(BaseMessages.getString(PKG, "System.Dialog.Error.Title")); mb.open(); } } catch (KettleException ke) { new ErrorDialog( shell, BaseMessages.getString(PKG, "IngresVectorWiseLoaderDialog.BuildSQLError.DialogTitle"), BaseMessages.getString(PKG, "IngresVectorWiseLoaderDialog.BuildSQLError.DialogMessage"), ke); } }
public static ValueMetaInterface createValueMeta(String name, int type, int length, int precision) throws KettlePluginException { PluginInterface stringPlugin = pluginRegistry.getPlugin(ValueMetaPluginType.class, String.valueOf(type)); if (stringPlugin == null) { throw new KettlePluginException("Unable to locate value meta plugin of type (id) " + type); } ValueMetaInterface valueMeta = pluginRegistry.loadClass(stringPlugin, ValueMetaInterface.class); valueMeta.setName(name); valueMeta.setLength(length, precision); return valueMeta; }
@Test public void testGetFieldDefinitionInteger() { String integerName = "integerName"; ValueMetaInterface valueMetaInterface = new ValueMetaInteger(); valueMetaInterface.setName(integerName); valueMetaInterface.setPrecision(0); valueMetaInterface.setLength(9); assertGetFieldDefinition(valueMetaInterface, "INT"); valueMetaInterface.setLength(18); assertGetFieldDefinition(valueMetaInterface, "BIGINT"); valueMetaInterface.setLength(19); assertGetFieldDefinition(valueMetaInterface, "FLOAT"); }
@Test public void testGetFieldDefinitionBigNumber() { String bigNumberName = "bigNumberName"; ValueMetaInterface valueMetaInterface = new ValueMetaBigNumber(); valueMetaInterface.setName(bigNumberName); valueMetaInterface.setPrecision(0); valueMetaInterface.setLength(9); assertGetFieldDefinition(valueMetaInterface, "INT"); valueMetaInterface.setLength(18); assertGetFieldDefinition(valueMetaInterface, "BIGINT"); valueMetaInterface.setLength(19); assertGetFieldDefinition(valueMetaInterface, "FLOAT"); valueMetaInterface.setPrecision(10); valueMetaInterface.setLength(16); assertGetFieldDefinition(valueMetaInterface, "FLOAT"); valueMetaInterface.setLength(15); assertGetFieldDefinition(valueMetaInterface, "DOUBLE"); }
public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException { // Remember the types of the row. int fieldnrs[] = new int[fieldName.length]; ValueMetaInterface[] values = new ValueMetaInterface[fieldName.length]; for (int i = 0; i < fieldName.length; i++) { fieldnrs[i] = row.indexOfValue(fieldName[i]); ValueMetaInterface v = row.getValueMeta(fieldnrs[i]); values[i] = v.clone(); // copy value : default settings! switch (aggregateType[i]) { case TYPE_AGGREGATE_AVERAGE: case TYPE_AGGREGATE_COUNT: case TYPE_AGGREGATE_SUM: values[i].setType(Value.VALUE_TYPE_NUMBER); values[i].setLength(-1, -1); break; } } // Only the aggregate is returned! row.clear(); for (int i = 0; i < fieldName.length; i++) { ValueMetaInterface v = values[i]; v.setName(fieldNewName[i]); v.setOrigin(name); row.addValueMeta(v); } }
public void getFields( RowMetaInterface row, String origin, RowMetaInterface info[], StepMeta nextStep, VariableSpace space) throws KettleStepException { // First load some interesting data... // Then see which fields get added to the row. // TransMeta mappingTransMeta = null; try { mappingTransMeta = loadMappingMeta(this, repository, space); } catch (KettleException e) { throw new KettleStepException( BaseMessages.getString(PKG, "MappingMeta.Exception.UnableToLoadMappingTransformation"), e); } // The field structure may depend on the input parameters as well (think of parameter // replacements in MDX queries for instance) if (mappingParameters != null) { // See if we need to pass all variables from the parent or not... // if (mappingParameters.isInheritingAllVariables()) { mappingTransMeta.copyVariablesFrom(space); } // Just set the variables in the transformation statically. // This just means: set a number of variables or parameter values: // List<String> subParams = Arrays.asList(mappingTransMeta.listParameters()); for (int i = 0; i < mappingParameters.getVariable().length; i++) { String name = mappingParameters.getVariable()[i]; String value = space.environmentSubstitute(mappingParameters.getInputField()[i]); if (!Const.isEmpty(name) && !Const.isEmpty(value)) { if (subParams.contains(name)) { try { mappingTransMeta.setParameterValue(name, value); } catch (UnknownParamException e) { // this is explicitly checked for up front } } mappingTransMeta.setVariable(name, value); } } } // Keep track of all the fields that need renaming... // List<MappingValueRename> inputRenameList = new ArrayList<MappingValueRename>(); /* * Before we ask the mapping outputs anything, we should teach the mapping * input steps in the sub-transformation about the data coming in... */ for (MappingIODefinition definition : inputMappings) { RowMetaInterface inputRowMeta; if (definition.isMainDataPath() || Const.isEmpty(definition.getInputStepname())) { // The row metadata, what we pass to the mapping input step // definition.getOutputStep(), is "row" // However, we do need to re-map some fields... // inputRowMeta = row.clone(); if (!inputRowMeta.isEmpty()) { for (MappingValueRename valueRename : definition.getValueRenames()) { ValueMetaInterface valueMeta = inputRowMeta.searchValueMeta(valueRename.getSourceValueName()); if (valueMeta == null) { throw new KettleStepException( BaseMessages.getString( PKG, "MappingMeta.Exception.UnableToFindField", valueRename.getSourceValueName())); } valueMeta.setName(valueRename.getTargetValueName()); } } } else { // The row metadata that goes to the info mapping input comes from the // specified step // In fact, it's one of the info steps that is going to contain this // information... // String[] infoSteps = getInfoSteps(); int infoStepIndex = Const.indexOfString(definition.getInputStepname(), infoSteps); if (infoStepIndex < 0) { throw new KettleStepException( BaseMessages.getString( PKG, "MappingMeta.Exception.UnableToFindMetadataInfo", definition.getInputStepname())); } if (info[infoStepIndex] != null) { inputRowMeta = info[infoStepIndex].clone(); } else { inputRowMeta = null; } } // What is this mapping input step? // StepMeta mappingInputStep = mappingTransMeta.findMappingInputStep(definition.getOutputStepname()); // We're certain it's a MappingInput step... // MappingInputMeta mappingInputMeta = (MappingInputMeta) mappingInputStep.getStepMetaInterface(); // Inform the mapping input step about what it's going to receive... // mappingInputMeta.setInputRowMeta(inputRowMeta); // What values are we changing names for? // mappingInputMeta.setValueRenames(definition.getValueRenames()); // Keep a list of the input rename values that need to be changed back at // the output // if (definition.isRenamingOnOutput()) Mapping.addInputRenames(inputRenameList, definition.getValueRenames()); } // All the mapping steps now know what they will be receiving. // That also means that the sub-transformation / mapping has everything it // needs. // So that means that the MappingOutput steps know exactly what the output // is going to be. // That could basically be anything. // It also could have absolutely no resemblance to what came in on the // input. // The relative old approach is therefore no longer suited. // // OK, but what we *can* do is have the MappingOutput step rename the // appropriate fields. // The mapping step will tell this step how it's done. // // Let's look for the mapping output step that is relevant for this actual // call... // MappingIODefinition mappingOutputDefinition = null; if (nextStep == null) { // This is the main step we read from... // Look up the main step to write to. // This is the output mapping definition with "main path" enabled. // for (MappingIODefinition definition : outputMappings) { if (definition.isMainDataPath() || Const.isEmpty(definition.getOutputStepname())) { // This is the definition to use... // mappingOutputDefinition = definition; } } } else { // Is there an output mapping definition for this step? // If so, we can look up the Mapping output step to see what has changed. // for (MappingIODefinition definition : outputMappings) { if (nextStep.getName().equals(definition.getOutputStepname()) || definition.isMainDataPath() || Const.isEmpty(definition.getOutputStepname())) { mappingOutputDefinition = definition; } } } if (mappingOutputDefinition == null) { throw new KettleStepException( BaseMessages.getString(PKG, "MappingMeta.Exception.UnableToFindMappingDefinition")); } // OK, now find the mapping output step in the mapping... // This method in TransMeta takes into account a number of things, such as // the step not specified, etc. // The method never returns null but throws an exception. // StepMeta mappingOutputStep = mappingTransMeta.findMappingOutputStep(mappingOutputDefinition.getInputStepname()); // We know it's a mapping output step... MappingOutputMeta mappingOutputMeta = (MappingOutputMeta) mappingOutputStep.getStepMetaInterface(); // Change a few columns. mappingOutputMeta.setOutputValueRenames(mappingOutputDefinition.getValueRenames()); // Perhaps we need to change a few input columns back to the original? // mappingOutputMeta.setInputValueRenames(inputRenameList); // Now we know wat's going to come out of there... // This is going to be the full row, including all the remapping, etc. // RowMetaInterface mappingOutputRowMeta = mappingTransMeta.getStepFields(mappingOutputStep); row.clear(); row.addRowMeta(mappingOutputRowMeta); }
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore) throws KettleStepException { SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do! if (databaseMeta != null) { if (prev != null && prev.size() > 0) { // Copy the row RowMetaInterface tableFields = new RowMeta(); // Now change the field names // the key fields if (keyLookup != null) { for (int i = 0; i < keyLookup.length; i++) { ValueMetaInterface v = prev.searchValueMeta(keyStream[i]); if (v != null) { ValueMetaInterface tableField = v.clone(); tableField.setName(keyLookup[i]); tableFields.addValueMeta(tableField); } else { throw new KettleStepException( "Unable to find field [" + keyStream[i] + "] in the input rows"); } } } // the lookup fields for (int i = 0; i < updateLookup.length; i++) { ValueMetaInterface v = prev.searchValueMeta(updateStream[i]); if (v != null) { ValueMetaInterface vk = tableFields.searchValueMeta(updateStream[i]); if (vk == null) { // do not add again when already added as key fields ValueMetaInterface tableField = v.clone(); tableField.setName(updateLookup[i]); tableFields.addValueMeta(tableField); } } else { throw new KettleStepException( "Unable to find field [" + updateStream[i] + "] in the input rows"); } } if (!Const.isEmpty(tableName)) { Database db = new Database(loggingObject, databaseMeta); db.shareVariablesWith(transMeta); try { db.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName); String cr_table = db.getDDL(schemaTable, tableFields, null, false, null, true); String cr_index = ""; String[] idx_fields = null; if (keyLookup != null && keyLookup.length > 0) { idx_fields = new String[keyLookup.length]; for (int i = 0; i < keyLookup.length; i++) { idx_fields[i] = keyLookup[i]; } } else { retval.setError( BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.MissingKeyFields")); } // Key lookup dimensions... if (idx_fields != null && idx_fields.length > 0 && !db.checkIndexExists(schemaName, tableName, idx_fields)) { String indexname = "idx_" + tableName + "_lookup"; cr_index = db.getCreateIndexStatement( schemaTable, indexname, idx_fields, false, false, false, true); } String sql = cr_table + cr_index; if (sql.length() == 0) { retval.setSQL(null); } else { retval.setSQL(sql); } } catch (KettleException e) { retval.setError( BaseMessages.getString(PKG, "InsertUpdateMeta.ReturnValue.ErrorOccurred") + e.getMessage()); } } else { retval.setError( BaseMessages.getString( PKG, "InsertUpdateMeta.ReturnValue.NoTableDefinedOnConnection")); } } else { retval.setError( BaseMessages.getString(PKG, "InsertUpdateMeta.ReturnValue.NotReceivingAnyFields")); } } else { retval.setError( BaseMessages.getString(PKG, "InsertUpdateMeta.ReturnValue.NoConnectionDefined")); } return retval; }
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException { // Row should normally be empty when we get here. // That is because there is no previous step to this mapping input step from the viewpoint of // this single sub-transformation. // From the viewpoint of the transformation that executes the mapping, it's important to know // what comes out at the exit points. // For that reason we need to re-order etc, based on the input specification... // if (inputRowMeta != null && !inputRowMeta.isEmpty()) { // this gets set only in the parent transformation... // It includes all the renames that needed to be done // if (selectingAndSortingUnspecifiedFields) { // First rename any fields... if (valueRenames != null) { for (MappingValueRename valueRename : valueRenames) { ValueMetaInterface valueMeta = inputRowMeta.searchValueMeta(valueRename.getSourceValueName()); if (valueMeta == null) { throw new KettleStepException( BaseMessages.getString( PKG, "MappingInput.Exception.UnableToFindMappedValue", valueRename.getSourceValueName())); } valueMeta.setName(valueRename.getTargetValueName()); } } // Select the specified fields from the input, re-order everything and put the other fields // at the back, sorted... // RowMetaInterface newRow = new RowMeta(); for (int i = 0; i < fieldName.length; i++) { int index = inputRowMeta.indexOfValue(fieldName[i]); if (index < 0) { throw new KettleStepException( BaseMessages.getString( PKG, "MappingInputMeta.Exception.UnknownField", fieldName[i])); } newRow.addValueMeta(inputRowMeta.getValueMeta(index)); } // Now get the unspecified fields. // Sort the fields // Add them after the specified fields... // List<String> extra = new ArrayList<String>(); for (int i = 0; i < inputRowMeta.size(); i++) { String fieldName = inputRowMeta.getValueMeta(i).getName(); if (newRow.indexOfValue(fieldName) < 0) { extra.add(fieldName); } } Collections.sort(extra); for (String fieldName : extra) { ValueMetaInterface extraValue = inputRowMeta.searchValueMeta(fieldName); newRow.addValueMeta(extraValue); } // now merge the new row... // This is basically the input row meta data with the fields re-ordered. // row.mergeRowMeta(newRow); } else { row.mergeRowMeta(inputRowMeta); // Validate the existence of all the specified fields... // if (!row.isEmpty()) { for (int i = 0; i < fieldName.length; i++) { if (row.indexOfValue(fieldName[i]) < 0) { throw new KettleStepException( BaseMessages.getString( PKG, "MappingInputMeta.Exception.UnknownField", fieldName[i])); } } } } } else { // We'll have to work with the statically provided information for (int i = 0; i < fieldName.length; i++) { if (!Const.isEmpty(fieldName[i])) { ValueMetaInterface v = new ValueMeta(fieldName[i], fieldType[i]); if (v.getType() == ValueMetaInterface.TYPE_NONE) v.setType(ValueMetaInterface.TYPE_STRING); v.setLength(fieldLength[i]); v.setPrecision(fieldPrecision[i]); v.setOrigin(origin); row.addValueMeta(v); } } } }
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev) { SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseWriteMeta, null); // default: nothing to do! int i; if (databaseWriteMeta != null) { if (prev != null && prev.size() > 0) { if (!Const.isEmpty(tablename)) { String schemaTable = databaseWriteMeta.getQuotedSchemaTableCombination(schemaName, tablename); Database db = new Database(databaseWriteMeta); try { boolean doHash = false; String cr_table = null; db.connect(); // OK, what do we put in the new table?? RowMetaInterface fields = new RowMeta(); ValueMetaInterface vkeyfield = null; if (!Const.isEmpty(technicalKeyField)) { // First, the new technical key... vkeyfield = new ValueMeta(technicalKeyField, ValueMetaInterface.TYPE_INTEGER); vkeyfield.setLength(10); vkeyfield.setPrecision(0); } // Then the hashcode (optional) ValueMetaInterface vhashfield = null; if (useHash && !Const.isEmpty(hashField)) { vhashfield = new ValueMeta(hashField, ValueMetaInterface.TYPE_INTEGER); vhashfield.setLength(15); vhashfield.setPrecision(0); doHash = true; } // Then the last update field (optional) ValueMetaInterface vLastUpdateField = null; if (!Const.isEmpty(lastUpdateField)) { vLastUpdateField = new ValueMeta(lastUpdateField, ValueMetaInterface.TYPE_DATE); } if (!db.checkTableExists(schemaTable)) { if (vkeyfield != null) { // Add technical key field. fields.addValueMeta(vkeyfield); } // Add the keys only to the table if (keyField != null && keyLookup != null) { int cnt = keyField.length; for (i = 0; i < cnt; i++) { String error_field = ""; // $NON-NLS-1$ // Find the value in the stream ValueMetaInterface v = prev.searchValueMeta(keyField[i]); if (v != null) { String name = keyLookup[i]; ValueMetaInterface newValue = v.clone(); newValue.setName(name); if (vkeyfield != null) { if (name.equals(vkeyfield.getName()) || (doHash == true && name.equals(vhashfield.getName()))) { error_field += name; } } if (error_field.length() > 0) { retval.setError( Messages.getString( "ConcurrentCombinationLookupMeta.ReturnValue.NameCollision", error_field)); //$NON-NLS-1$ } else { fields.addValueMeta(newValue); } } } } if (doHash == true) { fields.addValueMeta(vhashfield); } if (vLastUpdateField != null) { fields.addValueMeta(vLastUpdateField); } } else { // Table already exists // Get the fields that are in the table now: RowMetaInterface tabFields = db.getTableFields(schemaTable); // Don't forget to quote these as well... databaseWriteMeta.quoteReservedWords(tabFields); if (vkeyfield != null && tabFields.searchValueMeta(vkeyfield.getName()) == null) { // Add technical key field if it didn't exist yet fields.addValueMeta(vkeyfield); } // Add the already existing fields int cnt = tabFields.size(); for (i = 0; i < cnt; i++) { ValueMetaInterface v = tabFields.getValueMeta(i); fields.addValueMeta(v); } // Find the missing fields in the real table String keyLookup[] = getKeyLookup(); String keyField[] = getKeyField(); if (keyField != null && keyLookup != null) { cnt = keyField.length; for (i = 0; i < cnt; i++) { // Find the value in the stream ValueMetaInterface v = prev.searchValueMeta(keyField[i]); if (v != null) { ValueMetaInterface newValue = v.clone(); newValue.setName(keyLookup[i]); // Does the corresponding name exist in the table if (tabFields.searchValueMeta(newValue.getName()) == null) { fields.addValueMeta(newValue); // nope --> add } } } } if (doHash == true && tabFields.searchValueMeta(vhashfield.getName()) == null) { // Add hash field fields.addValueMeta(vhashfield); } if (vLastUpdateField != null && tabFields.searchValueMeta(vLastUpdateField.getName()) == null) { fields.addValueMeta(vLastUpdateField); } } cr_table = db.getDDL( schemaTable, fields, (CREATION_METHOD_SEQUENCE.equals(getTechKeyCreation()) && sequenceFrom != null && sequenceFrom.length() != 0) ? null : technicalKeyField, CREATION_METHOD_AUTOINC.equals(getTechKeyCreation()), null, true); // // OK, now let's build the index // // What fields do we put int the index? // Only the hashcode or all fields? String cr_index = ""; // $NON-NLS-1$ String cr_uniq_index = ""; // $NON-NLS-1$ String idx_fields[] = null; if (useHash) { if (hashField != null && hashField.length() > 0) { idx_fields = new String[] {hashField}; } else { retval.setError( Messages.getString( "ConcurrentCombinationLookupMeta.ReturnValue.NotHashFieldSpecified")); //$NON-NLS-1$ } } else // index on all key fields... { if (!Const.isEmpty(keyLookup)) { int nrfields = keyLookup.length; if (nrfields > 32 && databaseWriteMeta.getDatabaseType() == DatabaseMeta.TYPE_DATABASE_ORACLE) { nrfields = 32; // Oracle indexes are limited to 32 fields... } idx_fields = new String[nrfields]; for (i = 0; i < nrfields; i++) idx_fields[i] = keyLookup[i]; } else { retval.setError( Messages.getString( "ConcurrentCombinationLookupMeta.ReturnValue.NotFieldsSpecified")); //$NON-NLS-1$ } } // OK, now get the create index statement... if (!Const.isEmpty(technicalKeyField)) { String techKeyArr[] = new String[] {technicalKeyField}; if (!db.checkIndexExists(schemaName, tablename, techKeyArr)) { String indexname = "idx_" + tablename + "_pk"; // $NON-NLS-1$ //$NON-NLS-2$ cr_uniq_index = db.getCreateIndexStatement( schemaName, tablename, indexname, techKeyArr, true, true, false, true); cr_uniq_index += Const.CR; } } // OK, now get the create lookup index statement... if (!Const.isEmpty(idx_fields) && !db.checkIndexExists(schemaName, tablename, idx_fields)) { String indexname = "idx_" + tablename + "_lookup"; // $NON-NLS-1$ //$NON-NLS-2$ cr_index = db.getCreateIndexStatement( schemaName, tablename, indexname, idx_fields, false, false, false, true); cr_index += Const.CR; } // // Don't forget the sequence (optional) // String cr_seq = ""; // $NON-NLS-1$ if (databaseWriteMeta.supportsSequences() && !Const.isEmpty(sequenceFrom)) { if (!db.checkSequenceExists(schemaName, sequenceFrom)) { cr_seq += db.getCreateSequenceStatement(schemaName, sequenceFrom, 1L, 1L, -1L, true); cr_seq += Const.CR; } } retval.setSQL(cr_table + cr_uniq_index + cr_index + cr_seq); } catch (KettleException e) { retval.setError( Messages.getString("ConcurrentCombinationLookupMeta.ReturnValue.ErrorOccurred") + Const.CR + e.getMessage()); // $NON-NLS-1$ } } else { retval.setError( Messages.getString( "ConcurrentCombinationLookupMeta.ReturnValue.NotTableDefined")); //$NON-NLS-1$ } } else { retval.setError( Messages.getString( "ConcurrentCombinationLookupMeta.ReturnValue.NotReceivingField")); //$NON-NLS-1$ } } else { retval.setError( Messages.getString( "ConcurrentCombinationLookupMeta.ReturnValue.NotConnectionDefined")); //$NON-NLS-1$ } return retval; }
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (MySQLTableOutputMeta) smi; data = (MySQLTableOutputData) sdi; ArrayList<Object[]> rows = new ArrayList<Object[]>(); for (int counter = 0; counter < data.batchSize; counter++) { Object[] r = getRow(); ; // this also waits for a previous step to be finished. if (r != null) { rows.add(r); } } if (rows.isEmpty()) { return false; } if (first) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this); data.insertRowMeta = new RowMeta(); // // Cache the position of the compare fields in Row row // data.valuenrs = new int[meta.getFieldDatabase().length]; for (int i = 0; i < meta.getFieldDatabase().length; i++) { data.valuenrs[i] = getInputRowMeta().indexOfValue(meta.getFieldStream()[i]); if (data.valuenrs[i] < 0) { throw new KettleStepException( Messages.getString( "TableOutput.Exception.FieldRequired", meta.getFieldStream()[i])); // $NON-NLS-1$ } } for (int i = 0; i < meta.getFieldDatabase().length; i++) { ValueMetaInterface insValue = getInputRowMeta().searchValueMeta(meta.getFieldStream()[i]); if (insValue != null) { ValueMetaInterface insertValue = insValue.clone(); insertValue.setName(meta.getFieldDatabase()[i]); data.insertRowMeta.addValueMeta(insertValue); } else { throw new KettleStepException( Messages.getString( "TableOutput.Exception.FailedToFindField", meta.getFieldStream()[i])); // $NON-NLS-1$ } } } try { ArrayList<Object[]> outputRowData = writeToTable(getInputRowMeta(), rows); if (outputRowData != null) { for (Object[] row : outputRowData) { putRow(data.outputRowMeta, row); // in case we want it go // further... incrementLinesOutput(); } } if (checkFeedback(getLinesRead())) { if (log.isBasic()) logBasic("linenr " + getLinesRead()); // $NON-NLS-1$ } } catch (KettleException e) { logError("Because of an error, this step can't continue: ", e); setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } return true; }
private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String name, String expectedType) { valueMetaInterface = valueMetaInterface.clone(); valueMetaInterface.setName(name); assertGetFieldDefinition(valueMetaInterface, expectedType); }
@Override public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (VerticaBulkLoaderMeta) smi; data = (VerticaBulkLoaderData) sdi; Object[] r = getRow(); // this also waits for a previous step to be // finished. if (r == null) // no more input to be expected... { try { data.close(); } catch (IOException ioe) { throw new KettleStepException("Error releasing resources", ioe); } return false; } if (first) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this); RowMetaInterface tableMeta = meta.getTableRowMetaInterface(); if (!meta.specifyFields()) { // Just take the whole input row data.insertRowMeta = getInputRowMeta().clone(); data.selectedRowFieldIndices = new int[data.insertRowMeta.size()]; data.colSpecs = new ArrayList<ColumnSpec>(data.insertRowMeta.size()); for (int insertFieldIdx = 0; insertFieldIdx < data.insertRowMeta.size(); insertFieldIdx++) { data.selectedRowFieldIndices[insertFieldIdx] = insertFieldIdx; ValueMetaInterface inputValueMeta = data.insertRowMeta.getValueMeta(insertFieldIdx); ValueMetaInterface insertValueMeta = inputValueMeta.clone(); ValueMetaInterface targetValueMeta = tableMeta.getValueMeta(insertFieldIdx); ColumnSpec cs = getColumnSpecFromField(inputValueMeta, insertValueMeta, targetValueMeta); data.colSpecs.add(insertFieldIdx, cs); } } else { int numberOfInsertFields = meta.getFieldDatabase().length; data.insertRowMeta = new RowMeta(); data.colSpecs = new ArrayList<ColumnSpec>(numberOfInsertFields); // Cache the position of the selected fields in the row array data.selectedRowFieldIndices = new int[numberOfInsertFields]; for (int insertFieldIdx = 0; insertFieldIdx < numberOfInsertFields; insertFieldIdx++) { String inputFieldName = meta.getFieldStream()[insertFieldIdx]; int inputFieldIdx = getInputRowMeta().indexOfValue(inputFieldName); if (inputFieldIdx < 0) { throw new KettleStepException( BaseMessages.getString( PKG, "VerticaBulkLoader.Exception.FieldRequired", inputFieldName)); //$NON-NLS-1$ } data.selectedRowFieldIndices[insertFieldIdx] = inputFieldIdx; String insertFieldName = meta.getFieldDatabase()[insertFieldIdx]; ValueMetaInterface inputValueMeta = getInputRowMeta().getValueMeta(inputFieldIdx); if (inputValueMeta == null) { throw new KettleStepException( BaseMessages.getString( PKG, "VerticaBulkLoader.Exception.FailedToFindField", meta.getFieldStream()[insertFieldIdx])); // $NON-NLS-1$ } ValueMetaInterface insertValueMeta = inputValueMeta.clone(); insertValueMeta.setName(insertFieldName); data.insertRowMeta.addValueMeta(insertValueMeta); ValueMetaInterface targetValueMeta = tableMeta.searchValueMeta(insertFieldName); ColumnSpec cs = getColumnSpecFromField(inputValueMeta, insertValueMeta, targetValueMeta); data.colSpecs.add(insertFieldIdx, cs); } } try { data.pipedInputStream = new PipedInputStream(); data.encoder = new StreamEncoder(data.colSpecs, data.pipedInputStream); initializeWorker(); data.encoder.writeHeader(); } catch (IOException ioe) { throw new KettleStepException("Error creating stream encoder", ioe); } } try { Object[] outputRowData = writeToOutputStream(r); if (outputRowData != null) { putRow(data.outputRowMeta, outputRowData); // in case we want it // go further... incrementLinesOutput(); } if (checkFeedback(getLinesRead())) { if (log.isBasic()) logBasic("linenr " + getLinesRead()); // $NON-NLS-1$ } } catch (KettleException e) { logError("Because of an error, this step can't continue: ", e); setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } catch (IOException e) { e.printStackTrace(); } return true; }
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (TableOutputMeta) smi; data = (TableOutputData) sdi; Object[] r = getRow(); // this also waits for a previous step to be finished. if (r == null) { // no more input to be expected... return false; } if (first) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore); if (!meta.specifyFields()) { // Just take the input row data.insertRowMeta = getInputRowMeta().clone(); } else { data.insertRowMeta = new RowMeta(); // // Cache the position of the compare fields in Row row // data.valuenrs = new int[meta.getFieldDatabase().length]; for (int i = 0; i < meta.getFieldDatabase().length; i++) { data.valuenrs[i] = getInputRowMeta().indexOfValue(meta.getFieldStream()[i]); if (data.valuenrs[i] < 0) { throw new KettleStepException( BaseMessages.getString( PKG, "TableOutput.Exception.FieldRequired", meta.getFieldStream()[i])); } } for (int i = 0; i < meta.getFieldDatabase().length; i++) { ValueMetaInterface insValue = getInputRowMeta().searchValueMeta(meta.getFieldStream()[i]); if (insValue != null) { ValueMetaInterface insertValue = insValue.clone(); insertValue.setName(meta.getFieldDatabase()[i]); data.insertRowMeta.addValueMeta(insertValue); } else { throw new KettleStepException( BaseMessages.getString( PKG, "TableOutput.Exception.FailedToFindField", meta.getFieldStream()[i])); } } } } try { Object[] outputRowData = writeToTable(getInputRowMeta(), r); if (outputRowData != null) { putRow(data.outputRowMeta, outputRowData); // in case we want it go further... incrementLinesOutput(); } if (checkFeedback(getLinesRead())) { if (log.isBasic()) { logBasic("linenr " + getLinesRead()); } } } catch (KettleException e) { logError("Because of an error, this step can't continue: ", e); setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } return true; }