@Override public Result execute(Result prevResult, int k) throws KettleException { Result result = prevResult; ML_Classify direct = new ML_Classify(); direct.setName(this.getRecordsetName()); direct.setRecordsetName(this.getRecordsetName()); direct.setModel(model); direct.setIndependentVar(independentVar); direct.setClassifyType(classifyType); direct.setDataType(dataType); direct.setRidge(ridge); direct.setEpsilon(epsilon); direct.setMaxIter(maxIter); direct.setPasses(passes); direct.setAlpha(alpha); // private Text algType; //NaiveBayes, Logistic // private Text dependentVar; // 1 // private Text independentVar; // 2 // ml.setIterations(this.getIterations()); // ml.setThreshold(this.getThreshold()); // ml.setThreshold(this.getThreshold()); logBasic("{Iterate Job} Execute = " + direct.ecl()); logBasic("{Iterate Job} Previous =" + result.getLogText()); result.setResult(true); RowMetaAndData data = new RowMetaAndData(); data.addValue("ecl", Value.VALUE_TYPE_STRING, direct.ecl()); List list = result.getRows(); list.add(data); String eclCode = ""; if (list == null) { list = new ArrayList(); } else { for (int i = 0; i < list.size(); i++) { RowMetaAndData rowData = (RowMetaAndData) list.get(i); String code = rowData.getString("ecl", null); if (code != null) { eclCode += code; } } logBasic("{Iterate Job} ECL Code =" + eclCode); } result.setRows(list); return result; }
@Override public Result execute(Result prevResult, int k) throws KettleException { Result result = modifyResults(prevResult); if (result.isStopped()) { return result; } Rollup rollup = new Rollup(); rollup.setName(this.getRecordsetName()); rollup.setRecordset(this.getRecordset()); rollup.setRecordFormat(this.getRecordset()); rollup.setRunLocal(this.getRunLocal()); rollup.setCondition(this.getCondition()); rollup.setFieldlist(this.getFieldlist()); if (this.group.equalsIgnoreCase("yes")) { rollup.setGroup("GROUP"); } else { rollup.setGroup(""); } rollup.setTransformName(this.getTransformName()); rollup.setTransform(generateEclForMapperGrid()); logBasic("{rollup Job} Execute = " + rollup.ecl()); logBasic("{rollup Job} Previous =" + result.getLogText()); result.setResult(true); RowMetaAndData data = new RowMetaAndData(); data.addValue("ecl", Value.VALUE_TYPE_STRING, rollup.ecl()); List list = result.getRows(); list.add(data); String eclCode = parseEclFromRowData(list); /* String eclCode = ""; if (list == null) { list = new ArrayList(); } else { for (int i = 0; i < list.size(); i++) { RowMetaAndData rowData = (RowMetaAndData) list.get(i); String code = rowData.getString("ecl", null); if (code != null) { eclCode += code; } } logBasic("{Iterate Job} ECL Code =" + eclCode); } */ result.setRows(list); return result; }
@Override public Result execute(Result prevResult, int k) throws KettleException { Result result = prevResult; logBasic("{Group job} Creating Group object"); Group group = new Group(); logBasic("{Group job} Group object created"); group.setName(this.getRecordSetName()); group.setRecordSet(this.getRecordSet()); group.setBreakCriteria(this.getBreakCriteria()); group.setIsAll(this.getIsAll()); group.setRunLocal(this.getIsRunLocal()); logBasic("{Group job} Execute = " + group.ecl()); logBasic("{Group job} Previous = " + result.getLogText()); result.setResult(true); RowMetaAndData data = new RowMetaAndData(); data.addValue("ecl", Value.VALUE_TYPE_STRING, group.ecl()); List list = result.getRows(); list.add(data); String eclCode = ""; if (list == null) { list = new ArrayList(); } else { for (int i = 0; i < list.size(); i++) { RowMetaAndData rowData = (RowMetaAndData) list.get(i); String code = rowData.getString("ecl", null); if (code != null) eclCode += code; } logBasic("{Group job} ECL Code = " + eclCode); } result.setRows(list); return result; }
@Override public Result execute(Result prevResult, int k) throws KettleException { Result result = prevResult; Sort sort = new Sort(); sort.setFields(getFields()); sort.setDatasetName(getDatasetName()); sort.setName(getRecordsetName()); logBasic("{Sort Job} Execute = " + sort.ecl()); logBasic("{Sort Job} Previous =" + result.getLogText()); result.setResult(true); RowMetaAndData data = new RowMetaAndData(); data.addValue("ecl", Value.VALUE_TYPE_STRING, sort.ecl()); List list = result.getRows(); list.add(data); String eclCode = ""; if (list == null) { list = new ArrayList(); } else { for (int i = 0; i < list.size(); i++) { RowMetaAndData rowData = (RowMetaAndData) list.get(i); String code = rowData.getString("ecl", null); if (code != null) { eclCode += code; } } logBasic("{Sort Job} ECL Code =" + eclCode); } result.setRows(list); return result; }
public Result execute(Result previousResult, int nr) throws KettleException { Result result = previousResult; int NrErrors = 0; int NrSuccess = 0; // Check output parameters int nrOutputProps = getOutputPropertyName() == null ? 0 : getOutputPropertyName().length; if (nrOutputProps > 0) { outputProperties = new Properties(); for (int i = 0; i < nrOutputProps; i++) { outputProperties.put( getOutputPropertyName()[i], environmentSubstitute(getOutputPropertyValue()[i])); } setOutputProperties = true; } // Check parameters nrParams = getParameterField() == null ? 0 : getParameterField().length; if (nrParams > 0) { nameOfParams = new String[nrParams]; valueOfParams = new String[nrParams]; for (int i = 0; i < nrParams; i++) { String name = environmentSubstitute(getParameterName()[i]); String value = environmentSubstitute(getParameterField()[i]); if (Const.isEmpty(value)) { throw new KettleStepException( BaseMessages.getString(PKG, "Xslt.Exception.ParameterFieldMissing", name, i)); } nameOfParams[i] = name; valueOfParams[i] = value; } useParameters = true; } List<RowMetaAndData> rows = result.getRows(); if (isFilenamesFromPrevious()) { if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryXSLT.Log.ArgFromPrevious.Found", (rows != null ? rows.size() : 0) + "")); } } if (isFilenamesFromPrevious() && rows != null) { // Copy the input row to the (command line) arguments RowMetaAndData resultRow = null; for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) { resultRow = rows.get(iteration); // Get filenames (xml, xsl, output filename) String xmlfilename_previous = resultRow.getString(0, null); String xslfilename_previous = resultRow.getString(1, null); String ouputfilename_previous = resultRow.getString(2, null); if (!Const.isEmpty(xmlfilename_previous) && !Const.isEmpty(xslfilename_previous) && !Const.isEmpty(ouputfilename_previous)) { if (processOneXMLFile( xmlfilename_previous, xslfilename_previous, ouputfilename_previous, result, parentJob)) { NrSuccess++; } else { NrErrors++; } } else { // We failed! logError(BaseMessages.getString(PKG, "JobEntryXSLT.AllFilesNotNull.Label")); NrErrors++; } } } else { String realxmlfilename = getRealxmlfilename(); String realxslfilename = getRealxslfilename(); String realoutputfilename = getoutputfilename(); if (!Const.isEmpty(realxmlfilename) && !Const.isEmpty(realxslfilename) && !Const.isEmpty(realoutputfilename)) { if (processOneXMLFile( realxmlfilename, realxslfilename, realoutputfilename, result, parentJob)) { NrSuccess++; } else { NrErrors++; } } else { // We failed! logError(BaseMessages.getString(PKG, "JobEntryXSLT.AllFilesNotNull.Label")); NrErrors++; } } result.setResult(NrErrors == 0); result.setNrErrors(NrErrors); result.setNrLinesWritten(NrSuccess); return result; }
public Result execute(Result previousResult, int nr) { Result result = previousResult; List<RowMetaAndData> rows = result.getRows(); RowMetaAndData resultRow = null; oneFileLocked = false; result.setResult(true); try { if (argFromPrevious) { if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); } } if (argFromPrevious && rows != null) { // Copy the input row to the (command line) arguments for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) { resultRow = rows.get(iteration); // Get values from previous result String filefolder_previous = resultRow.getString(0, ""); String fmasks_previous = resultRow.getString(1, ""); // ok we can process this file/folder if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.ProcessingRow", filefolder_previous, fmasks_previous)); } ProcessFile(filefolder_previous, fmasks_previous); } } else if (arguments != null) { for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) { // ok we can process this file/folder if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.ProcessingArg", arguments[i], filemasks[i])); } ProcessFile(arguments[i], filemasks[i]); } } if (oneFileLocked) { result.setResult(false); result.setNrErrors(1); } } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobEntryCheckFilesLocked.ErrorRunningJobEntry", e)); } return result; }
public Result execute(Result previousResult, int nr) { Result result = previousResult; result.setResult(false); // see PDI-10270, PDI-10644 for details boolean oldBehavior = "Y" .equalsIgnoreCase( getVariable(Const.KETTLE_COMPATIBILITY_SET_ERROR_ON_SPECIFIC_JOB_ENTRIES, "N")); String countSQLStatement = null; long rowsCount = 0; long errCount = 0; boolean successOK = false; int nrRowsLimit = Const.toInt(environmentSubstitute(limit), 0); if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.nrRowsLimit", "" + nrRowsLimit)); } if (connection != null) { Database db = new Database(this, connection); db.shareVariablesWith(this); try { db.connect(parentJob.getTransactionId(), null); if (iscustomSQL) { String realCustomSQL = customSQL; if (isUseVars) { realCustomSQL = environmentSubstitute(realCustomSQL); } if (log.isDebug()) { logDebug( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.EnteredCustomSQL", realCustomSQL)); } if (!Const.isEmpty(realCustomSQL)) { countSQLStatement = realCustomSQL; } else { errCount++; logError(BaseMessages.getString(PKG, "JobEntryEvalTableContent.Error.NoCustomSQL")); } } else { String realTablename = environmentSubstitute(tablename); String realSchemaname = environmentSubstitute(schemaname); if (!Const.isEmpty(realTablename)) { if (!Const.isEmpty(realSchemaname)) { countSQLStatement = selectCount + db.getDatabaseMeta() .getQuotedSchemaTableCombination(realSchemaname, realTablename); } else { countSQLStatement = selectCount + db.getDatabaseMeta().quoteField(realTablename); } } else { errCount++; logError(BaseMessages.getString(PKG, "JobEntryEvalTableContent.Error.NoTableName")); } } if (countSQLStatement != null) { if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.RunSQLStatement", countSQLStatement)); } if (iscustomSQL) { if (isClearResultList) { result.getRows().clear(); } List<Object[]> ar = db.getRows(countSQLStatement, 0); if (ar != null) { rowsCount = ar.size(); // ad rows to result RowMetaInterface rowMeta = db.getQueryFields(countSQLStatement, false); List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(); for (int i = 0; i < ar.size(); i++) { rows.add(new RowMetaAndData(rowMeta, ar.get(i))); } if (isAddRowsResult && iscustomSQL) { if (rows != null) { result.getRows().addAll(rows); } } } else { if (log.isDebug()) { logDebug( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.customSQLreturnedNothing", countSQLStatement)); } } } else { RowMetaAndData row = db.getOneRow(countSQLStatement); if (row != null) { rowsCount = row.getInteger(0); } } if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Log.NrRowsReturned", "" + rowsCount)); } switch (successCondition) { case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_EQUAL: successOK = (rowsCount == nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_DIFFERENT: successOK = (rowsCount != nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_SMALLER: successOK = (rowsCount < nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_SMALLER_EQUAL: successOK = (rowsCount <= nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_GREATER: successOK = (rowsCount > nrRowsLimit); break; case JobEntryEvalTableContent.SUCCESS_CONDITION_ROWS_COUNT_GREATER_EQUAL: successOK = (rowsCount >= nrRowsLimit); break; default: break; } if (!successOK && oldBehavior) { errCount++; } } // end if countSQLStatement!=null } catch (KettleException dbe) { errCount++; logError( BaseMessages.getString( PKG, "JobEntryEvalTableContent.Error.RunningEntry", dbe.getMessage())); } finally { if (db != null) { db.disconnect(); } } } else { errCount++; logError(BaseMessages.getString(PKG, "JobEntryEvalTableContent.NoDbConnection")); } result.setResult(successOK); result.setNrLinesRead(rowsCount); result.setNrErrors(errCount); return result; }
protected boolean SQLDataOK( Result result, long nrRowsLimit, String realSchemaName, String realTableName, String customSQL) throws KettleException { String countStatement = null; long rowsCount = 0; boolean successOK = false; List<Object[]> ar = null; RowMetaInterface rowMeta = null; Database db = new Database(this, connection); db.shareVariablesWith(this); try { db.connect(parentJob.getTransactionId(), null); if (iscustomSQL) { countStatement = customSQL; } else { if (!Const.isEmpty(realSchemaName)) { countStatement = selectCount + db.getDatabaseMeta() .getQuotedSchemaTableCombination(realSchemaName, realTableName); } else { countStatement = selectCount + db.getDatabaseMeta().quoteField(realTableName); } } if (countStatement != null) { if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryWaitForSQL.Log.RunSQLStatement", countStatement)); } if (iscustomSQL) { ar = db.getRows(countStatement, 0); if (ar != null) { rowsCount = ar.size(); } else { if (log.isDebug()) { logDebug( BaseMessages.getString( PKG, "JobEntryWaitForSQL.Log.customSQLreturnedNothing", countStatement)); } } } else { RowMetaAndData row = db.getOneRow(countStatement); if (row != null) { rowsCount = row.getInteger(0); } } if (log.isDetailed()) { logDetailed( BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.NrRowsReturned", "" + rowsCount)); } switch (successCondition) { case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_EQUAL: successOK = (rowsCount == nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_DIFFERENT: successOK = (rowsCount != nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_SMALLER: successOK = (rowsCount < nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_SMALLER_EQUAL: successOK = (rowsCount <= nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_GREATER: successOK = (rowsCount > nrRowsLimit); break; case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_GREATER_EQUAL: successOK = (rowsCount >= nrRowsLimit); break; default: break; } } // end if countStatement!=null } catch (KettleDatabaseException dbe) { logError( BaseMessages.getString(PKG, "JobEntryWaitForSQL.Error.RunningEntry", dbe.getMessage())); } finally { if (db != null) { if (isAddRowsResult && iscustomSQL && ar != null) { rowMeta = db.getQueryFields(countStatement, false); } db.disconnect(); } } if (successOK) { // ad rows to result if (isAddRowsResult && iscustomSQL && ar != null) { List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(); for (int i = 0; i < ar.size(); i++) { rows.add(new RowMetaAndData(rowMeta, ar.get(i))); } if (rows != null) { result.getRows().addAll(rows); } } } return successOK; }
@Override public Result execute(Result previousResult, int nr) { Result result = previousResult; result.setResult(false); result.setNrErrors(1); String realCustomSQL = null; String realTablename = environmentSubstitute(tablename); String realSchemaname = environmentSubstitute(schemaname); if (connection == null) { logError(BaseMessages.getString(PKG, "JobEntryWaitForSQL.NoDbConnection")); return result; } if (iscustomSQL) { // clear result list rows if (isClearResultList) { result.getRows().clear(); } realCustomSQL = customSQL; if (isUseVars) { realCustomSQL = environmentSubstitute(realCustomSQL); } if (log.isDebug()) { logDebug( BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.EnteredCustomSQL", realCustomSQL)); } if (Const.isEmpty(realCustomSQL)) { logError(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Error.NoCustomSQL")); return result; } } else { if (Const.isEmpty(realTablename)) { logError(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Error.NoTableName")); return result; } } try { // check connection // connect and disconnect checkConnection(); // starttime (in seconds) long timeStart = System.currentTimeMillis() / 1000; int nrRowsLimit = Const.toInt(environmentSubstitute(rowsCountValue), 0); if (log.isDetailed()) { logDetailed( BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.nrRowsLimit", "" + nrRowsLimit)); } long iMaximumTimeout = Const.toInt( environmentSubstitute(maximumTimeout), Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0)); long iCycleTime = Const.toInt( environmentSubstitute(checkCycleTime), Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 0)); // // Sanity check on some values, and complain on insanity // if (iMaximumTimeout < 0) { iMaximumTimeout = Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0); logBasic("Maximum timeout invalid, reset to " + iMaximumTimeout); } if (iCycleTime < 1) { // If lower than 1 set to the default iCycleTime = Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 1); logBasic("Check cycle time invalid, reset to " + iCycleTime); } if (iMaximumTimeout == 0) { logBasic("Waiting indefinitely for SQL data"); } else { logBasic("Waiting " + iMaximumTimeout + " seconds for SQL data"); } boolean continueLoop = true; while (continueLoop && !parentJob.isStopped()) { if (SQLDataOK(result, nrRowsLimit, realSchemaname, realTablename, realCustomSQL)) { // SQL data exists, we're happy to exit logBasic("Detected SQL data within timeout"); result.setResult(true); continueLoop = false; } else { long now = System.currentTimeMillis() / 1000; if ((iMaximumTimeout > 0) && (now > (timeStart + iMaximumTimeout))) { continueLoop = false; // SQL data doesn't exist after timeout, either true or false if (isSuccessOnTimeout()) { logBasic("Didn't detect SQL data before timeout, success"); result.setResult(true); } else { logBasic("Didn't detect SQL data before timeout, failure"); result.setResult(false); } } // sleep algorithm long sleepTime = 0; if (iMaximumTimeout == 0) { sleepTime = iCycleTime; } else { if ((now + iCycleTime) < (timeStart + iMaximumTimeout)) { sleepTime = iCycleTime; } else { sleepTime = iCycleTime - ((now + iCycleTime) - (timeStart + iMaximumTimeout)); } } try { if (sleepTime > 0) { if (log.isDetailed()) { logDetailed("Sleeping " + sleepTime + " seconds before next check for SQL data"); } Thread.sleep(sleepTime * 1000); } } catch (InterruptedException e) { // something strange happened result.setResult(false); continueLoop = false; } } } } catch (Exception e) { logBasic("Exception while waiting for SQL data: " + e.getMessage()); } if (result.getResult()) { // Remove error count set at the beginning of the method // PDI-15437 result.setNrErrors(0); } return result; }
public Result execute(Result result, int nr) throws KettleException { FileLoggingEventListener loggingEventListener = null; LogLevel shellLogLevel = parentJob.getLogLevel(); if (setLogfile) { String realLogFilename = environmentSubstitute(getLogFilename()); // We need to check here the log filename // if we do not have one, we must fail if (Const.isEmpty(realLogFilename)) { logError(BaseMessages.getString(PKG, "JobEntryShell.Exception.LogFilenameMissing")); result.setNrErrors(1); result.setResult(false); return result; } try { loggingEventListener = new FileLoggingEventListener(getLogChannelId(), realLogFilename, setAppendLogfile); KettleLogStore.getAppender().addLoggingEventListener(loggingEventListener); } catch (KettleException e) { logError( BaseMessages.getString( PKG, "JobEntryShell.Error.UnableopenAppenderFile", getLogFilename(), e.toString())); logError(Const.getStackTracker(e)); result.setNrErrors(1); result.setResult(false); return result; } shellLogLevel = logFileLevel; } log.setLogLevel(shellLogLevel); result.setEntryNr(nr); // "Translate" the arguments for later String[] substArgs = null; if (arguments != null) { substArgs = new String[arguments.length]; for (int idx = 0; idx < arguments.length; idx++) { substArgs[idx] = environmentSubstitute(arguments[idx]); } } int iteration = 0; String[] args = substArgs; RowMetaAndData resultRow = null; boolean first = true; List<RowMetaAndData> rows = result.getRows(); if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryShell.Log.FoundPreviousRows", "" + (rows != null ? rows.size() : 0))); } while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) { first = false; if (rows != null && execPerRow) { resultRow = rows.get(iteration); } else { resultRow = null; } List<RowMetaAndData> cmdRows = null; if (execPerRow) { // Execute for each input row if (argFromPrevious) { // Copy the input row to the (command line) arguments if (resultRow != null) { args = new String[resultRow.size()]; for (int i = 0; i < resultRow.size(); i++) { args[i] = resultRow.getString(i, null); } } } else { // Just pass a single row List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>(); newList.add(resultRow); cmdRows = newList; } } else { if (argFromPrevious) { // Only put the first Row on the arguments args = null; if (resultRow != null) { args = new String[resultRow.size()]; for (int i = 0; i < resultRow.size(); i++) { args[i] = resultRow.getString(i, null); } } else { cmdRows = rows; } } else { // Keep it as it was... cmdRows = rows; } } executeShell(result, cmdRows, args); iteration++; } if (setLogfile) { if (loggingEventListener != null) { KettleLogStore.getAppender().removeLoggingEventListener(loggingEventListener); loggingEventListener.close(); ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_LOG, loggingEventListener.getFile(), parentJob.getJobname(), getName()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } } return result; }