@Test public void testHTTPResultDefaultRows() throws IOException { File localFileForUpload = getInputFile("existingFile1", ".tmp"); File tempFileForDownload = File.createTempFile("downloadedFile1", ".tmp"); localFileForUpload.deleteOnExit(); tempFileForDownload.deleteOnExit(); Object[] r = new Object[] { HTTP_SERVER_BASEURL + "/uploadFile", localFileForUpload.getCanonicalPath(), tempFileForDownload.getCanonicalPath() }; RowMeta rowMetaDefault = new RowMeta(); rowMetaDefault.addValueMeta(new ValueMetaString("URL")); rowMetaDefault.addValueMeta(new ValueMetaString("UPLOAD")); rowMetaDefault.addValueMeta(new ValueMetaString("DESTINATION")); List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(); rows.add(new RowMetaAndData(rowMetaDefault, r)); Result previousResult = new Result(); previousResult.setRows(rows); JobEntryHTTP http = new JobEntryHTTP(); http.setParentJob(new Job()); http.setRunForEveryRow(true); http.setAddFilenameToResult(false); http.execute(previousResult, 0); assertTrue(FileUtils.contentEquals(localFileForUpload, tempFileForDownload)); }
@Override public Result execute(Result prevResult, int k) throws KettleException { Result result = prevResult; ML_Classify direct = new ML_Classify(); direct.setName(this.getRecordsetName()); direct.setRecordsetName(this.getRecordsetName()); direct.setModel(model); direct.setIndependentVar(independentVar); direct.setClassifyType(classifyType); direct.setDataType(dataType); direct.setRidge(ridge); direct.setEpsilon(epsilon); direct.setMaxIter(maxIter); direct.setPasses(passes); direct.setAlpha(alpha); // private Text algType; //NaiveBayes, Logistic // private Text dependentVar; // 1 // private Text independentVar; // 2 // ml.setIterations(this.getIterations()); // ml.setThreshold(this.getThreshold()); // ml.setThreshold(this.getThreshold()); logBasic("{Iterate Job} Execute = " + direct.ecl()); logBasic("{Iterate Job} Previous =" + result.getLogText()); result.setResult(true); RowMetaAndData data = new RowMetaAndData(); data.addValue("ecl", Value.VALUE_TYPE_STRING, direct.ecl()); List list = result.getRows(); list.add(data); String eclCode = ""; if (list == null) { list = new ArrayList(); } else { for (int i = 0; i < list.size(); i++) { RowMetaAndData rowData = (RowMetaAndData) list.get(i); String code = rowData.getString("ecl", null); if (code != null) { eclCode += code; } } logBasic("{Iterate Job} ECL Code =" + eclCode); } result.setRows(list); return result; }
@Override public Result execute(Result prevResult, int k) throws KettleException { Result result = modifyResults(prevResult); if (result.isStopped()) { return result; } Rollup rollup = new Rollup(); rollup.setName(this.getRecordsetName()); rollup.setRecordset(this.getRecordset()); rollup.setRecordFormat(this.getRecordset()); rollup.setRunLocal(this.getRunLocal()); rollup.setCondition(this.getCondition()); rollup.setFieldlist(this.getFieldlist()); if (this.group.equalsIgnoreCase("yes")) { rollup.setGroup("GROUP"); } else { rollup.setGroup(""); } rollup.setTransformName(this.getTransformName()); rollup.setTransform(generateEclForMapperGrid()); logBasic("{rollup Job} Execute = " + rollup.ecl()); logBasic("{rollup Job} Previous =" + result.getLogText()); result.setResult(true); RowMetaAndData data = new RowMetaAndData(); data.addValue("ecl", Value.VALUE_TYPE_STRING, rollup.ecl()); List list = result.getRows(); list.add(data); String eclCode = parseEclFromRowData(list); /* String eclCode = ""; if (list == null) { list = new ArrayList(); } else { for (int i = 0; i < list.size(); i++) { RowMetaAndData rowData = (RowMetaAndData) list.get(i); String code = rowData.getString("ecl", null); if (code != null) { eclCode += code; } } logBasic("{Iterate Job} ECL Code =" + eclCode); } */ result.setRows(list); return result; }
@Override public Result execute(Result prevResult, int k) throws KettleException { Result result = prevResult; logBasic("{Group job} Creating Group object"); Group group = new Group(); logBasic("{Group job} Group object created"); group.setName(this.getRecordSetName()); group.setRecordSet(this.getRecordSet()); group.setBreakCriteria(this.getBreakCriteria()); group.setIsAll(this.getIsAll()); group.setRunLocal(this.getIsRunLocal()); logBasic("{Group job} Execute = " + group.ecl()); logBasic("{Group job} Previous = " + result.getLogText()); result.setResult(true); RowMetaAndData data = new RowMetaAndData(); data.addValue("ecl", Value.VALUE_TYPE_STRING, group.ecl()); List list = result.getRows(); list.add(data); String eclCode = ""; if (list == null) { list = new ArrayList(); } else { for (int i = 0; i < list.size(); i++) { RowMetaAndData rowData = (RowMetaAndData) list.get(i); String code = rowData.getString("ecl", null); if (code != null) eclCode += code; } logBasic("{Group job} ECL Code = " + eclCode); } result.setRows(list); return result; }
@Override public Result execute(Result prevResult, int k) throws KettleException { Result result = prevResult; Sort sort = new Sort(); sort.setFields(getFields()); sort.setDatasetName(getDatasetName()); sort.setName(getRecordsetName()); logBasic("{Sort Job} Execute = " + sort.ecl()); logBasic("{Sort Job} Previous =" + result.getLogText()); result.setResult(true); RowMetaAndData data = new RowMetaAndData(); data.addValue("ecl", Value.VALUE_TYPE_STRING, sort.ecl()); List list = result.getRows(); list.add(data); String eclCode = ""; if (list == null) { list = new ArrayList(); } else { for (int i = 0; i < list.size(); i++) { RowMetaAndData rowData = (RowMetaAndData) list.get(i); String code = rowData.getString("ecl", null); if (code != null) { eclCode += code; } } logBasic("{Sort Job} ECL Code =" + eclCode); } result.setRows(list); return result; }