private Object convertDataToTargetValueMeta(int i, Object formulaResult) throws KettleException { if (formulaResult == null) { return formulaResult; } ValueMetaInterface target = data.outputRowMeta.getValueMeta(i); ValueMetaInterface actual = ValueMetaFactory.guessValueMetaInterface(formulaResult); Object value = target.convertData(actual, formulaResult); return value; }
private Object[] getOneRow() throws KettleException { if (!openNextFile()) { return null; } // Build an empty row based on the meta-data Object[] outputRowData = buildEmptyRow(); try { // Create new row or clone if (meta.getIsInFields()) System.arraycopy(data.readrow, 0, outputRowData, 0, data.readrow.length); // Read fields... for (int i = 0; i < data.nrInputFields; i++) { // Get field LoadFileInputField loadFileInputField = meta.getInputFields()[i]; String o = null; switch (loadFileInputField.getElementType()) { case LoadFileInputField.ELEMENT_TYPE_FILECONTENT: // DO Trimming! switch (loadFileInputField.getTrimType()) { case LoadFileInputField.TYPE_TRIM_LEFT: data.filecontent = Const.ltrim(data.filecontent); break; case LoadFileInputField.TYPE_TRIM_RIGHT: data.filecontent = Const.rtrim(data.filecontent); break; case LoadFileInputField.TYPE_TRIM_BOTH: data.filecontent = Const.trim(data.filecontent); break; default: break; } o = data.filecontent; break; case LoadFileInputField.ELEMENT_TYPE_FILESIZE: o = String.valueOf(data.fileSize); break; default: break; } int indexField = data.totalpreviousfields + i; // Do conversions // ValueMetaInterface targetValueMeta = data.outputRowMeta.getValueMeta(indexField); ValueMetaInterface sourceValueMeta = data.convertRowMeta.getValueMeta(indexField); outputRowData[indexField] = targetValueMeta.convertData(sourceValueMeta, o); // Do we need to repeat this field if it is null? if (loadFileInputField.isRepeated()) { if (data.previousRow != null && o == null) { outputRowData[indexField] = data.previousRow[indexField]; } } } // End of loop over fields... int rowIndex = data.totalpreviousfields + data.nrInputFields; // See if we need to add the filename to the row... if (meta.includeFilename() && meta.getFilenameField() != null && meta.getFilenameField().length() > 0) { outputRowData[rowIndex++] = data.filename; } // See if we need to add the row number to the row... if (meta.includeRowNumber() && meta.getRowNumberField() != null && meta.getRowNumberField().length() > 0) { outputRowData[rowIndex++] = new Long(data.rownr); } // Possibly add short filename... if (meta.getShortFileNameField() != null && meta.getShortFileNameField().length() > 0) { outputRowData[rowIndex++] = data.shortFilename; } // Add Extension if (meta.getExtensionField() != null && meta.getExtensionField().length() > 0) { outputRowData[rowIndex++] = data.extension; } // add path if (meta.getPathField() != null && meta.getPathField().length() > 0) { outputRowData[rowIndex++] = data.path; } // add Hidden if (meta.isHiddenField() != null && meta.isHiddenField().length() > 0) { outputRowData[rowIndex++] = new Boolean(data.hidden); } // Add modification date if (meta.getLastModificationDateField() != null && meta.getLastModificationDateField().length() > 0) { outputRowData[rowIndex++] = data.lastModificationDateTime; } // Add Uri if (meta.getUriField() != null && meta.getUriField().length() > 0) { outputRowData[rowIndex++] = data.uriName; } // Add RootUri if (meta.getRootUriField() != null && meta.getRootUriField().length() > 0) { outputRowData[rowIndex++] = data.rootUriName; } RowMetaInterface irow = getInputRowMeta(); data.previousRow = irow == null ? outputRowData : (Object[]) irow.cloneRow(outputRowData); // copy it to make // surely the next step doesn't change it in between... incrementLinesInput(); data.rownr++; } catch (Exception e) { throw new KettleException("Impossible de charger le fichier", e); } return outputRowData; }
public static final RowMetaAndData buildRow( RowGeneratorMeta meta, List<CheckResultInterface> remarks, String origin) { RowMetaInterface rowMeta = new RowMeta(); Object[] rowData = RowDataUtil.allocateRowData(meta.getFieldName().length); for (int i = 0; i < meta.getFieldName().length; i++) { int valtype = ValueMeta.getType(meta.getFieldType()[i]); if (meta.getFieldName()[i] != null) { ValueMetaInterface valueMeta = new ValueMeta(meta.getFieldName()[i], valtype); // build a value! valueMeta.setLength(meta.getFieldLength()[i]); valueMeta.setPrecision(meta.getFieldPrecision()[i]); valueMeta.setConversionMask(meta.getFieldFormat()[i]); valueMeta.setGroupingSymbol(meta.getGroup()[i]); valueMeta.setDecimalSymbol(meta.getDecimal()[i]); valueMeta.setOrigin(origin); ValueMetaInterface stringMeta = valueMeta.clone(); stringMeta.setType(ValueMetaInterface.TYPE_STRING); String stringValue = meta.getValue()[i]; // If the value is empty: consider it to be NULL. if (Const.isEmpty(stringValue)) { rowData[i] = null; if (valueMeta.getType() == ValueMetaInterface.TYPE_NONE) { String message = BaseMessages.getString( PKG, "RowGenerator.CheckResult.SpecifyTypeError", valueMeta.getName(), stringValue); remarks.add(new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, message, null)); } } else { // Convert the data from String to the specified type ... // try { rowData[i] = valueMeta.convertData(stringMeta, stringValue); } catch (KettleValueException e) { switch (valueMeta.getType()) { case ValueMetaInterface.TYPE_NUMBER: { String message = BaseMessages.getString( PKG, "RowGenerator.BuildRow.Error.Parsing.Number", valueMeta.getName(), stringValue, e.toString()); remarks.add( new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, message, null)); } break; case ValueMetaInterface.TYPE_DATE: { String message = BaseMessages.getString( PKG, "RowGenerator.BuildRow.Error.Parsing.Date", valueMeta.getName(), stringValue, e.toString()); remarks.add( new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, message, null)); } break; case ValueMetaInterface.TYPE_INTEGER: { String message = BaseMessages.getString( PKG, "RowGenerator.BuildRow.Error.Parsing.Integer", valueMeta.getName(), stringValue, e.toString()); remarks.add( new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, message, null)); } break; case ValueMetaInterface.TYPE_BIGNUMBER: { String message = BaseMessages.getString( PKG, "RowGenerator.BuildRow.Error.Parsing.BigNumber", valueMeta.getName(), stringValue, e.toString()); remarks.add( new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, message, null)); } break; default: // Boolean and binary don't throw errors normally, so it's probably an unspecified // error problem... { String message = BaseMessages.getString( PKG, "RowGenerator.CheckResult.SpecifyTypeError", valueMeta.getName(), stringValue); remarks.add( new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, message, null)); } break; } } } // Now add value to the row! // This is in fact a copy from the fields row, but now with data. rowMeta.addValueMeta(valueMeta); } } return new RowMetaAndData(rowMeta, rowData); }
/** * Performs the lookup based on the meta-data and the input row. * * @param row The row to use as lookup data and the row to add the returned lookup fields to * @return the resulting row after the lookup values where added * @throws KettleException In case something goes wrong. */ private synchronized Object[] lookupValues(RowMetaInterface inputRowMeta, Object[] row) throws KettleException { Object[] outputRow = RowDataUtil.resizeArray(row, data.outputRowMeta.size()); Object[] lookupRow = new Object[data.lookupMeta.size()]; int lookupIndex = 0; for (int i = 0; i < meta.getStreamKeyField1().length; i++) { if (data.keynrs[i] >= 0) { ValueMetaInterface input = inputRowMeta.getValueMeta(data.keynrs[i]); ValueMetaInterface value = data.lookupMeta.getValueMeta(lookupIndex); lookupRow[lookupIndex] = row[data.keynrs[i]]; // Try to convert type if needed if (input.getType() != value.getType()) { lookupRow[lookupIndex] = value.convertData(input, lookupRow[lookupIndex]); } lookupIndex++; } if (data.keynrs2[i] >= 0) { ValueMetaInterface input = inputRowMeta.getValueMeta(data.keynrs2[i]); ValueMetaInterface value = data.lookupMeta.getValueMeta(lookupIndex); lookupRow[lookupIndex] = row[data.keynrs2[i]]; // Try to convert type if needed if (input.getType() != value.getType()) { lookupRow[lookupIndex] = value.convertData(input, lookupRow[lookupIndex]); } lookupIndex++; } } Object[] add = null; boolean cache_now = false; boolean cacheHit = false; // First, check if we looked up before if (meta.isCached()) { add = getRowFromCache(data.lookupMeta, lookupRow); if (add != null) { cacheHit = true; } } else { add = null; } if (add == null) { if (!(meta.isCached() && meta.isLoadingAllDataInCache()) || data.hasDBCondition) { // do not go to the // database when all rows // are in (exception LIKE // operator) if (log.isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseLookup.Log.AddedValuesToLookupRow1") + meta.getStreamKeyField1().length + BaseMessages.getString(PKG, "DatabaseLookup.Log.AddedValuesToLookupRow2") + data.lookupMeta.getString(lookupRow)); } data.db.setValuesLookup(data.lookupMeta, lookupRow); add = data.db.getLookup(meta.isFailingOnMultipleResults()); cache_now = true; } } if (add == null) { // nothing was found, unknown code: add default values if (meta.isEatingRowOnLookupFailure()) { return null; } if (getStepMeta().isDoingErrorHandling()) { putError(getInputRowMeta(), row, 1L, "No lookup found", null, "DBL001"); // return false else we would still be processed. return null; } if (log.isRowLevel()) { logRowlevel(BaseMessages.getString(PKG, "DatabaseLookup.Log.NoResultsFoundAfterLookup")); } add = new Object[data.returnMeta.size()]; for (int i = 0; i < meta.getReturnValueField().length; i++) { if (data.nullif[i] != null) { add[i] = data.nullif[i]; } else { add[i] = null; } } } else { if (log.isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseLookup.Log.FoundResultsAfterLookup") + add); } // Only verify the data types if the data comes from the DB, NOT when we have a cache hit // In that case, we already know the data type is OK. if (!cacheHit) { incrementLinesInput(); int[] types = meta.getReturnValueDefaultType(); // The assumption here is that the types are in the same order // as the returned lookup row, but since we make the lookup row // that should not be a problem. // for (int i = 0; i < types.length; i++) { ValueMetaInterface returned = data.db.getReturnRowMeta().getValueMeta(i); ValueMetaInterface expected = data.returnMeta.getValueMeta(i); if (returned != null && types[i] > 0 && types[i] != returned.getType()) { // Set the type to the default return type add[i] = expected.convertData(returned, add[i]); } } } } // Store in cache if we need to! // If we already loaded all data into the cache, storing more makes no sense. // if (meta.isCached() && cache_now && !meta.isLoadingAllDataInCache() && data.allEquals) { storeRowInCache(data.lookupMeta, lookupRow, add); } for (int i = 0; i < data.returnMeta.size(); i++) { outputRow[inputRowMeta.size() + i] = add[i]; } return outputRow; }
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (DatabaseLookupMeta) smi; data = (DatabaseLookupData) sdi; boolean sendToErrorRow = false; String errorMessage = null; Object[] r = getRow(); // Get row from input rowset & set row busy! if (r == null) { // no more input to be expected... setOutputDone(); return false; } if (first) { first = false; // create the output metadata data.outputRowMeta = getInputRowMeta().clone(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore); if (meta.isCached()) { if (meta.getCacheSize() > 0) { data.look = new Hashtable<RowMetaAndData, TimedRow>((int) (meta.getCacheSize() * 1.5)); } else { data.look = new Hashtable<RowMetaAndData, TimedRow>(); } } data.db.setLookup( environmentSubstitute(meta.getSchemaName()), environmentSubstitute(meta.getTablename()), meta.getTableKeyField(), meta.getKeyCondition(), meta.getReturnValueField(), meta.getReturnValueNewName(), meta.getOrderByClause(), meta.isFailingOnMultipleResults()); // lookup the values! if (log.isDetailed()) { logDetailed( BaseMessages.getString(PKG, "DatabaseLookup.Log.CheckingRow") + getInputRowMeta().getString(r)); } data.keynrs = new int[meta.getStreamKeyField1().length]; data.keynrs2 = new int[meta.getStreamKeyField1().length]; for (int i = 0; i < meta.getStreamKeyField1().length; i++) { data.keynrs[i] = getInputRowMeta().indexOfValue(meta.getStreamKeyField1()[i]); if (data.keynrs[i] < 0 && // couldn't find field! !"IS NULL".equalsIgnoreCase(meta.getKeyCondition()[i]) && // No field needed! !"IS NOT NULL".equalsIgnoreCase(meta.getKeyCondition()[i]) // No field needed! ) { throw new KettleStepException( BaseMessages.getString(PKG, "DatabaseLookup.ERROR0001.FieldRequired1.Exception") + meta.getStreamKeyField1()[i] + BaseMessages.getString( PKG, "DatabaseLookup.ERROR0001.FieldRequired2.Exception")); } data.keynrs2[i] = getInputRowMeta().indexOfValue(meta.getStreamKeyField2()[i]); if (data.keynrs2[i] < 0 && // couldn't find field! "BETWEEN".equalsIgnoreCase(meta.getKeyCondition()[i]) // 2 fields needed! ) { throw new KettleStepException( BaseMessages.getString(PKG, "DatabaseLookup.ERROR0001.FieldRequired3.Exception") + meta.getStreamKeyField2()[i] + BaseMessages.getString( PKG, "DatabaseLookup.ERROR0001.FieldRequired4.Exception")); } if (log.isDebug()) { logDebug( BaseMessages.getString(PKG, "DatabaseLookup.Log.FieldHasIndex1") + meta.getStreamKeyField1()[i] + BaseMessages.getString(PKG, "DatabaseLookup.Log.FieldHasIndex2") + data.keynrs[i]); } } data.nullif = new Object[meta.getReturnValueField().length]; for (int i = 0; i < meta.getReturnValueField().length; i++) { ValueMetaInterface stringMeta = new ValueMeta("string", ValueMetaInterface.TYPE_STRING); ValueMetaInterface returnMeta = data.outputRowMeta.getValueMeta(i + getInputRowMeta().size()); if (!Const.isEmpty(meta.getReturnValueDefault()[i])) { data.nullif[i] = returnMeta.convertData(stringMeta, meta.getReturnValueDefault()[i]); } else { data.nullif[i] = null; } } // Determine the types... data.keytypes = new int[meta.getTableKeyField().length]; String schemaTable = meta.getDatabaseMeta() .getQuotedSchemaTableCombination( environmentSubstitute(meta.getSchemaName()), environmentSubstitute(meta.getTablename())); RowMetaInterface fields = data.db.getTableFields(schemaTable); if (fields != null) { // Fill in the types... for (int i = 0; i < meta.getTableKeyField().length; i++) { ValueMetaInterface key = fields.searchValueMeta(meta.getTableKeyField()[i]); if (key != null) { data.keytypes[i] = key.getType(); } else { throw new KettleStepException( BaseMessages.getString(PKG, "DatabaseLookup.ERROR0001.FieldRequired5.Exception") + meta.getTableKeyField()[i] + BaseMessages.getString( PKG, "DatabaseLookup.ERROR0001.FieldRequired6.Exception")); } } } else { throw new KettleStepException( BaseMessages.getString(PKG, "DatabaseLookup.ERROR0002.UnableToDetermineFieldsOfTable") + schemaTable + "]"); } // Count the number of values in the lookup as well as the metadata to send along with it. // data.lookupMeta = new RowMeta(); for (int i = 0; i < meta.getStreamKeyField1().length; i++) { if (data.keynrs[i] >= 0) { ValueMetaInterface inputValueMeta = getInputRowMeta().getValueMeta(data.keynrs[i]); // Try to convert type if needed in a clone, we don't want to // change the type in the original row // ValueMetaInterface value = ValueMetaFactory.cloneValueMeta(inputValueMeta, data.keytypes[i]); data.lookupMeta.addValueMeta(value); } if (data.keynrs2[i] >= 0) { ValueMetaInterface inputValueMeta = getInputRowMeta().getValueMeta(data.keynrs2[i]); // Try to convert type if needed in a clone, we don't want to // change the type in the original row // ValueMetaInterface value = ValueMetaFactory.cloneValueMeta(inputValueMeta, data.keytypes[i]); data.lookupMeta.addValueMeta(value); } } // We also want to know the metadata of the return values beforehand (null handling) data.returnMeta = new RowMeta(); for (int i = 0; i < meta.getReturnValueField().length; i++) { ValueMetaInterface v = data.outputRowMeta.getValueMeta(getInputRowMeta().size() + i).clone(); data.returnMeta.addValueMeta(v); } // If the user selected to load all data into the cache at startup, that's what we do now... // if (meta.isCached() && meta.isLoadingAllDataInCache()) { loadAllTableDataIntoTheCache(); } } if (log.isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseLookup.Log.GotRowFromPreviousStep") + getInputRowMeta().getString(r)); } try { // add new lookup values to the row Object[] outputRow = lookupValues(getInputRowMeta(), r); if (outputRow != null) { // copy row to output rowset(s); putRow(data.outputRowMeta, outputRow); if (log.isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseLookup.Log.WroteRowToNextStep") + getInputRowMeta().getString(r)); } if (checkFeedback(getLinesRead())) { logBasic("linenr " + getLinesRead()); } } } catch (KettleException e) { if (getStepMeta().isDoingErrorHandling()) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError( BaseMessages.getString(PKG, "DatabaseLookup.ERROR003.UnexpectedErrorDuringProcessing") + e.getMessage()); setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } if (sendToErrorRow) { // Simply add this row to the error row putError(getInputRowMeta(), r, 1, errorMessage, null, "DBLOOKUPD001"); } } return true; }
private Object[] buildRow() throws KettleException { // Create new row... Object[] outputRowData = null; if (data.readrow != null) outputRowData = data.readrow.clone(); else outputRowData = buildEmptyRow(); // Read fields... for (int i = 0; i < data.nrInputFields; i++) { // Get field JsonInputField field = meta.getInputFields()[i]; // get json array for field JSONArray jsona = data.resultList.get(i).getJSONArray(); String nodevalue = null; if (jsona != null) { Object jo = (Object) jsona.get(data.recordnr); if (jo != null) { nodevalue = jo.toString(); } } // Do trimming switch (field.getTrimType()) { case JsonInputField.TYPE_TRIM_LEFT: nodevalue = Const.ltrim(nodevalue); break; case JsonInputField.TYPE_TRIM_RIGHT: nodevalue = Const.rtrim(nodevalue); break; case JsonInputField.TYPE_TRIM_BOTH: nodevalue = Const.trim(nodevalue); break; default: break; } if (meta.isInFields()) { // Add result field to input stream outputRowData = RowDataUtil.addValueData(outputRowData, data.totalpreviousfields + i, nodevalue); } // Do conversions // ValueMetaInterface targetValueMeta = data.outputRowMeta.getValueMeta(data.totalpreviousfields + i); ValueMetaInterface sourceValueMeta = data.convertRowMeta.getValueMeta(data.totalpreviousfields + i); outputRowData[data.totalpreviousfields + i] = targetValueMeta.convertData(sourceValueMeta, nodevalue); // Do we need to repeat this field if it is null? if (meta.getInputFields()[i].isRepeated()) { if (data.previousRow != null && Const.isEmpty(nodevalue)) { outputRowData[data.totalpreviousfields + i] = data.previousRow[data.totalpreviousfields + i]; } } } // End of loop over fields... // When we have an input stream // the row index take care of previous fields int rowIndex = data.totalpreviousfields + data.nrInputFields; // See if we need to add the filename to the row... if (meta.includeFilename() && !Const.isEmpty(meta.getFilenameField())) { outputRowData[rowIndex++] = data.filename; } // See if we need to add the row number to the row... if (meta.includeRowNumber() && !Const.isEmpty(meta.getRowNumberField())) { outputRowData[rowIndex++] = new Long(data.rownr); } // Possibly add short filename... if (meta.getShortFileNameField() != null && meta.getShortFileNameField().length() > 0) { outputRowData[rowIndex++] = data.shortFilename; } // Add Extension if (meta.getExtensionField() != null && meta.getExtensionField().length() > 0) { outputRowData[rowIndex++] = data.extension; } // add path if (meta.getPathField() != null && meta.getPathField().length() > 0) { outputRowData[rowIndex++] = data.path; } // Add Size if (meta.getSizeField() != null && meta.getSizeField().length() > 0) { outputRowData[rowIndex++] = new Long(data.size); } // add Hidden if (meta.isHiddenField() != null && meta.isHiddenField().length() > 0) { outputRowData[rowIndex++] = new Boolean(data.path); } // Add modification date if (meta.getLastModificationDateField() != null && meta.getLastModificationDateField().length() > 0) { outputRowData[rowIndex++] = data.lastModificationDateTime; } // Add Uri if (meta.getUriField() != null && meta.getUriField().length() > 0) { outputRowData[rowIndex++] = data.uriName; } // Add RootUri if (meta.getRootUriField() != null && meta.getRootUriField().length() > 0) { outputRowData[rowIndex++] = data.rootUriName; } data.recordnr++; RowMetaInterface irow = getInputRowMeta(); data.previousRow = irow == null ? outputRowData : (Object[]) irow.cloneRow(outputRowData); // copy it to make // surely the next step doesn't change it in between... return outputRowData; }