private boolean OpenFile() throws Exception { data.oneFileOpened = true; String realFilename = environmentSubstitute(meta.getFilename()); if (log.isBasic()) { logBasic(BaseMessages.getString(PKG, "AccessOutput.log.WritingToFile", realFilename)); } FileObject fileObject = KettleVFS.getFileObject(realFilename, getTransMeta()); File file = FileUtils.toFile(fileObject.getURL()); // First open or create the access file if (!file.exists()) { if (meta.isFileCreated()) { data.db = Database.create(file); } else { logError( BaseMessages.getString(PKG, "AccessOutput.InitError.FileDoesNotExist", realFilename)); return false; } } else { data.db = Database.open(file); } // Add the filename to the result object... // if (meta.isAddToResultFiles()) { ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, fileObject, getTransMeta().getName(), toString()); resultFile.setComment("This file was created with an access output step"); addResultFile(resultFile); } return true; }
private void addFileToResultFilesname(FileObject file) throws Exception { if (meta.addResultFile()) { // Add this to the result file names... ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, file, getTransMeta().getName(), getStepname()); resultFile.setComment("File was read by a LoadFileInput step"); addResultFile(resultFile); } }
private void addFileToResultFilesname(FileObject file) throws Exception { if (meta.addResultFile()) { // Add this to the result file names... ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, file, getTransMeta().getName(), getStepname()); resultFile.setComment(BaseMessages.getString(PKG, "JsonInput.Log.FileAddedResult")); addResultFile(resultFile); } }
public void openNewFile(String baseFilename) throws KettleException { data.writer = null; ResultFile resultFile = null; String filename = buildFilename(environmentSubstitute(baseFilename), true); try { if (meta.isFileAsCommand()) { if (log.isDebug()) logDebug("Spawning external process"); if (data.cmdProc != null) { logError("Previous command not correctly terminated"); setErrors(1); } String cmdstr = environmentSubstitute(meta.getFileName()); if (Const.getOS().equals("Windows 95")) { cmdstr = "command.com /C " + cmdstr; } else { if (Const.getOS().startsWith("Windows")) { cmdstr = "cmd.exe /C " + cmdstr; } } if (log.isDetailed()) logDetailed("Starting: " + cmdstr); Runtime r = Runtime.getRuntime(); data.cmdProc = r.exec(cmdstr, EnvUtil.getEnvironmentVariablesForRuntimeExec()); data.writer = data.cmdProc.getOutputStream(); StreamLogger stdoutLogger = new StreamLogger(data.cmdProc.getInputStream(), "(stdout)"); StreamLogger stderrLogger = new StreamLogger(data.cmdProc.getErrorStream(), "(stderr)"); new Thread(stdoutLogger).start(); new Thread(stderrLogger).start(); } else { // Add this to the result file names... resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(filename), getTransMeta().getName(), getStepname()); resultFile.setComment("This file was created with a text file output step"); addResultFile(resultFile); OutputStream outputStream; if (!Const.isEmpty(meta.getFileCompression()) && !meta.getFileCompression().equals(FILE_COMPRESSION_TYPE_NONE)) { if (meta.getFileCompression().equals(FILE_COMPRESSION_TYPE_ZIP)) { if (log.isDetailed()) log.logDetailed(toString(), "Opening output stream in zipped mode"); if (checkPreviouslyOpened(filename)) { data.fos = KettleVFS.getOutputStream(filename, true); } else { data.fos = KettleVFS.getOutputStream(filename, meta.isFileAppended()); } data.zip = new ZipOutputStream(data.fos); File entry = new File(filename); ZipEntry zipentry = new ZipEntry(entry.getName()); zipentry.setComment("Compressed by Kettle"); data.zip.putNextEntry(zipentry); outputStream = data.zip; } else if (meta.getFileCompression().equals(FILE_COMPRESSION_TYPE_GZIP)) { if (log.isDetailed()) log.logDetailed(toString(), "Opening output stream in gzipped mode"); if (checkPreviouslyOpened(filename)) { data.fos = KettleVFS.getOutputStream(filename, true); } else { data.fos = KettleVFS.getOutputStream(filename, meta.isFileAppended()); } data.gzip = new GZIPOutputStream(data.fos); outputStream = data.gzip; } else { throw new KettleFileException("No compression method specified!"); } } else { if (log.isDetailed()) log.logDetailed(toString(), "Opening output stream in nocompress mode"); if (checkPreviouslyOpened(filename)) { data.fos = KettleVFS.getOutputStream(filename, true); } else { data.fos = KettleVFS.getOutputStream(filename, meta.isFileAppended()); } outputStream = data.fos; } if (!Const.isEmpty(meta.getEncoding())) { if (log.isDetailed()) log.logDetailed(toString(), "Opening output stream in encoding: " + meta.getEncoding()); data.writer = new BufferedOutputStream(outputStream, 5000); } else { if (log.isDetailed()) log.logDetailed(toString(), "Opening output stream in default encoding"); data.writer = new BufferedOutputStream(outputStream, 5000); } if (log.isDetailed()) logDetailed("Opened new file with name [" + filename + "]"); } } catch (Exception e) { throw new KettleException("Error opening new file : " + e.toString()); } // System.out.println("end of newFile(), splitnr="+splitnr); data.splitnr++; if (resultFile != null && meta.isAddToResultFiles()) { // Add this to the result file names... addResultFile(resultFile); } }
private boolean processOneXMLFile( String xmlfilename, String xslfilename, String outputfilename, Result result, Job parentJob) { boolean retval = false; FileObject xmlfile = null; FileObject xslfile = null; FileObject outputfile = null; try { xmlfile = KettleVFS.getFileObject(xmlfilename, this); xslfile = KettleVFS.getFileObject(xslfilename, this); outputfile = KettleVFS.getFileObject(outputfilename, this); if (xmlfile.exists() && xslfile.exists()) { if (outputfile.exists() && iffileexists == 2) { // Output file exists // User want to fail logError( BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists1.Label") + outputfilename + BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists2.Label")); return retval; } else if (outputfile.exists() && iffileexists == 1) { // Do nothing if (log.isDebug()) { logDebug( BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists1.Label") + outputfilename + BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists2.Label")); } retval = true; return retval; } else { if (outputfile.exists() && iffileexists == 0) { // the output file exists and user want to create new one with unique name // Format Date // Try to clean filename (without wildcard) String wildcard = outputfilename.substring(outputfilename.length() - 4, outputfilename.length()); if (wildcard.substring(0, 1).equals(".")) { // Find wildcard outputfilename = outputfilename.substring(0, outputfilename.length() - 4) + "_" + StringUtil.getFormattedDateTimeNow(true) + wildcard; } else { // did not find wildcard outputfilename = outputfilename + "_" + StringUtil.getFormattedDateTimeNow(true); } if (log.isDebug()) { logDebug( BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists1.Label") + outputfilename + BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileExists2.Label")); logDebug( BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileNameChange1.Label") + outputfilename + BaseMessages.getString(PKG, "JobEntryXSLT.OuputFileNameChange2.Label")); } } // Create transformer factory TransformerFactory factory = TransformerFactory.newInstance(); if (xsltfactory.equals(FACTORY_SAXON)) { // Set the TransformerFactory to the SAXON implementation. factory = new net.sf.saxon.TransformerFactoryImpl(); } if (log.isDetailed()) { log.logDetailed( BaseMessages.getString(PKG, "JobEntryXSL.Log.TransformerFactoryInfos"), BaseMessages.getString( PKG, "JobEntryXSL.Log.TransformerFactory", factory.getClass().getName())); } InputStream xslInputStream = KettleVFS.getInputStream(xslfile); InputStream xmlInputStream = KettleVFS.getInputStream(xmlfile); OutputStream os = null; try { // Use the factory to create a template containing the xsl file Templates template = factory.newTemplates(new StreamSource(xslInputStream)); // Use the template to create a transformer Transformer xformer = template.newTransformer(); if (log.isDetailed()) { log.logDetailed( BaseMessages.getString(PKG, "JobEntryXSL.Log.TransformerClassInfos"), BaseMessages.getString( PKG, "JobEntryXSL.Log.TransformerClass", xformer.getClass().getName())); } // Do we need to set output properties? if (setOutputProperties) { xformer.setOutputProperties(outputProperties); } // Do we need to pass parameters? if (useParameters) { for (int i = 0; i < nrParams; i++) { xformer.setParameter(nameOfParams[i], valueOfParams[i]); } } // Prepare the input and output files Source source = new StreamSource(xmlInputStream); os = KettleVFS.getOutputStream(outputfile, false); StreamResult resultat = new StreamResult(os); // Apply the xsl file to the source file and write the result to the output file xformer.transform(source, resultat); if (isAddFileToResult()) { // Add output filename to output files ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(outputfilename, this), parentJob.getJobname(), toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } // Everything is OK retval = true; } finally { try { xslInputStream.close(); } catch (IOException ignored) { // ignore IO Exception on close } try { xmlInputStream.close(); } catch (IOException ignored) { // ignore IO Exception on close } try { if (os != null) { os.close(); } } catch (IOException ignored) { // ignore IO Exception on close } } } } else { if (!xmlfile.exists()) { logError( BaseMessages.getString(PKG, "JobEntryXSLT.FileDoesNotExist1.Label") + xmlfilename + BaseMessages.getString(PKG, "JobEntryXSLT.FileDoesNotExist2.Label")); } if (!xslfile.exists()) { logError( BaseMessages.getString(PKG, "JobEntryXSLT.FileDoesNotExist1.Label") + xmlfilename + BaseMessages.getString(PKG, "JobEntryXSLT.FileDoesNotExist2.Label")); } } } catch (Exception e) { logError( BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLST.Label") + BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLSTXML1.Label") + xmlfilename + BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLSTXML2.Label") + BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLSTXSL1.Label") + xslfilename + BaseMessages.getString(PKG, "JobEntryXSLT.ErrorXLSTXSL2.Label") + e.getMessage()); } finally { try { if (xmlfile != null) { xmlfile.close(); } if (xslfile != null) { xslfile.close(); } if (outputfile != null) { outputfile.close(); } } catch (IOException e) { logError("Unable to close file", e); } } return retval; }
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (AutoDocMeta) smi; data = (AutoDocData) sdi; Object[] row = getRow(); if (row == null) { if (data.filenames.isEmpty()) { // Nothing to see here, move along! // setOutputDone(); return false; } // End of the line, create the documentation... // FileObject targetFile = KettleVFS.getFileObject(environmentSubstitute(meta.getTargetFilename())); String targetFilename = KettleVFS.getFilename(targetFile); // Create the report builder // KettleReportBuilder kettleReportBuilder = new KettleReportBuilder(this, data.filenames, KettleVFS.getFilename(targetFile), meta); try { // Try to get the Classic Reporting Engine to boot inside of the plugin class loader... // if (ClassicEngineBoot.getInstance().isBootDone() == false) { ObjectUtilities.setClassLoader(getClass().getClassLoader()); ObjectUtilities.setClassLoaderSource(ObjectUtilities.CLASS_CONTEXT); LibLoaderBoot.getInstance().start(); LibFontBoot.getInstance().start(); ClassicEngineBoot.getInstance().start(); } // Do the reporting thing... // kettleReportBuilder.createReport(); kettleReportBuilder.render(); Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size()); int outputIndex = 0; outputRowData[outputIndex++] = targetFilename; // Pass along the data to the next steps... // putRow(data.outputRowMeta, outputRowData); // Add the target file to the result file list // ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, targetFile, getTransMeta().getName(), toString()); resultFile.setComment("This file was generated by the 'Auto Documentation Output' step"); addResultFile(resultFile); } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "AutoDoc.Exception.UnableToRenderReport"), e); } setOutputDone(); return false; } if (first) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore); // Get the filename field index... // String filenameField = environmentSubstitute(meta.getFilenameField()); data.fileNameFieldIndex = getInputRowMeta().indexOfValue(filenameField); if (data.fileNameFieldIndex < 0) { throw new KettleException( BaseMessages.getString(PKG, "AutoDoc.Exception.FilenameFieldNotFound", filenameField)); } // Get the file type field index... // String fileTypeField = environmentSubstitute(meta.getFileTypeField()); data.fileTypeFieldIndex = getInputRowMeta().indexOfValue(fileTypeField); if (data.fileTypeFieldIndex < 0) { throw new KettleException( BaseMessages.getString(PKG, "AutoDoc.Exception.FileTypeFieldNotFound", fileTypeField)); } data.repository = getTrans().getRepository(); if (data.repository != null) { data.tree = data.repository.loadRepositoryDirectoryTree(); } // Initialize the repository information handlers (images, metadata, loading, etc) // TransformationInformation.init(getTrans().getRepository()); JobInformation.init(getTrans().getRepository()); } // One more transformation or job to place in the documentation. // String fileName = getInputRowMeta().getString(row, data.fileNameFieldIndex); String fileType = getInputRowMeta().getString(row, data.fileTypeFieldIndex); RepositoryObjectType objectType; if ("Transformation".equalsIgnoreCase(fileType)) { objectType = RepositoryObjectType.TRANSFORMATION; } else if ("Job".equalsIgnoreCase(fileType)) { objectType = RepositoryObjectType.JOB; } else { throw new KettleException( BaseMessages.getString(PKG, "AutoDoc.Exception.UnknownFileTypeValue", fileType)); } ReportSubjectLocation location = null; if (getTrans().getRepository() == null) { switch (objectType) { case TRANSFORMATION: location = new ReportSubjectLocation(fileName, null, null, objectType); break; case JOB: location = new ReportSubjectLocation(fileName, null, null, objectType); break; default: break; } } else { int lastSlashIndex = fileName.lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR); if (lastSlashIndex < 0) { fileName = RepositoryDirectory.DIRECTORY_SEPARATOR + fileName; lastSlashIndex = 0; } String directoryName = fileName.substring(0, lastSlashIndex + 1); String objectName = fileName.substring(lastSlashIndex + 1); RepositoryDirectoryInterface directory = data.tree.findDirectory(directoryName); if (directory == null) { throw new KettleException( BaseMessages.getString( PKG, "AutoDoc.Exception.RepositoryDirectoryNotFound", directoryName)); } location = new ReportSubjectLocation(null, directory, objectName, objectType); } if (location == null) { throw new KettleException( BaseMessages.getString( PKG, "AutoDoc.Exception.UnableToDetermineLocation", fileName, fileType)); } if (meta.getOutputType() != OutputType.METADATA) { // Add the file location to the list for later processing in one output report // data.filenames.add(location); } else { // Load the metadata from the transformation / job... // Output it in one row for each input row // Object[] outputRow = RowDataUtil.resizeArray(row, data.outputRowMeta.size()); int outputIndex = getInputRowMeta().size(); List<AreaOwner> imageAreaList = null; switch (location.getObjectType()) { case TRANSFORMATION: TransformationInformation ti = TransformationInformation.getInstance(); TransMeta transMeta = ti.getTransMeta(location); imageAreaList = ti.getImageAreaList(location); // TransMeta outputRow[outputIndex++] = transMeta; break; case JOB: JobInformation ji = JobInformation.getInstance(); JobMeta jobMeta = ji.getJobMeta(location); imageAreaList = ji.getImageAreaList(location); // TransMeta outputRow[outputIndex++] = jobMeta; break; default: break; } // Name if (meta.isIncludingName()) { outputRow[outputIndex++] = KettleFileTableModel.getName(location); } // Description if (meta.isIncludingDescription()) { outputRow[outputIndex++] = KettleFileTableModel.getDescription(location); } // Extended Description if (meta.isIncludingExtendedDescription()) { outputRow[outputIndex++] = KettleFileTableModel.getExtendedDescription(location); } // created if (meta.isIncludingCreated()) { outputRow[outputIndex++] = KettleFileTableModel.getCreation(location); } // modified if (meta.isIncludingModified()) { outputRow[outputIndex++] = KettleFileTableModel.getModification(location); } // image if (meta.isIncludingImage()) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try { BufferedImage image = KettleFileTableModel.getImage(location); ImageIO.write(image, "png", outputStream); outputRow[outputIndex++] = outputStream.toByteArray(); } catch (Exception e) { throw new KettleException("Unable to serialize image to PNG", e); } finally { try { outputStream.close(); } catch (IOException e) { throw new KettleException("Unable to serialize image to PNG", e); } } } if (meta.isIncludingLoggingConfiguration()) { outputRow[outputIndex++] = KettleFileTableModel.getLogging(location); } if (meta.isIncludingLastExecutionResult()) { outputRow[outputIndex++] = KettleFileTableModel.getLogging(location); } if (meta.isIncludingImageAreaList()) { outputRow[outputIndex++] = imageAreaList; } putRow(data.outputRowMeta, outputRow); } return true; }
public Result execute(Result previousResult, int nr) { Result result = previousResult; result.setResult(false); // starttime (in seconds) long timeStart = System.currentTimeMillis() / 1000; if (filename != null) { FileObject fileObject = null; String realFilename = getRealFilename(); try { fileObject = KettleVFS.getFileObject(realFilename, this); long iMaximumTimeout = Const.toInt(getRealMaximumTimeout(), Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0)); long iCycleTime = Const.toInt(getRealCheckCycleTime(), Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 0)); // // Sanity check on some values, and complain on insanity // if (iMaximumTimeout < 0) { iMaximumTimeout = Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0); if (log.isBasic()) logBasic("Maximum timeout invalid, reset to " + iMaximumTimeout); } if (iCycleTime < 1) { // If lower than 1 set to the default iCycleTime = Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 1); if (log.isBasic()) logBasic("Check cycle time invalid, reset to " + iCycleTime); } if (iMaximumTimeout == 0) { if (log.isBasic()) logBasic("Waiting indefinitely for file [" + realFilename + "]"); } else { if (log.isBasic()) logBasic("Waiting " + iMaximumTimeout + " seconds for file [" + realFilename + "]"); } boolean continueLoop = true; while (continueLoop && !parentJob.isStopped()) { fileObject = KettleVFS.getFileObject(realFilename, this); if (fileObject.exists()) { // file exists, we're happy to exit if (log.isBasic()) logBasic("Detected file [" + realFilename + "] within timeout"); result.setResult(true); continueLoop = false; // add filename to result filenames if (addFilenameToResult && fileObject.getType() == FileType.FILE) { ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, fileObject, parentJob.getJobname(), toString()); resultFile.setComment(BaseMessages.getString(PKG, "JobWaitForFile.FilenameAdded")); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } } else { long now = System.currentTimeMillis() / 1000; if ((iMaximumTimeout > 0) && (now > (timeStart + iMaximumTimeout))) { continueLoop = false; // file doesn't exist after timeout, either true or false if (isSuccessOnTimeout()) { if (log.isBasic()) logBasic("Didn't detect file [" + realFilename + "] before timeout, success"); result.setResult(true); } else { if (log.isBasic()) logBasic("Didn't detect file [" + realFilename + "] before timeout, failure"); result.setResult(false); } } // sleep algorithm long sleepTime = 0; if (iMaximumTimeout == 0) { sleepTime = iCycleTime; } else { if ((now + iCycleTime) < (timeStart + iMaximumTimeout)) { sleepTime = iCycleTime; } else { sleepTime = iCycleTime - ((now + iCycleTime) - (timeStart + iMaximumTimeout)); } } try { if (sleepTime > 0) { if (log.isDetailed()) { logDetailed( "Sleeping " + sleepTime + " seconds before next check for file [" + realFilename + "]"); } Thread.sleep(sleepTime * 1000); } } catch (InterruptedException e) { // something strange happened result.setResult(false); continueLoop = false; } } } if (!parentJob.isStopped() && fileObject.exists() && isFileSizeCheck()) { long oldSize = -1; long newSize = fileObject.getContent().getSize(); if (log.isDetailed()) logDetailed("File [" + realFilename + "] is " + newSize + " bytes long"); if (log.isBasic()) logBasic( "Waiting until file [" + realFilename + "] stops growing for " + iCycleTime + " seconds"); while (oldSize != newSize && !parentJob.isStopped()) { try { if (log.isDetailed()) { logDetailed( "Sleeping " + iCycleTime + " seconds, waiting for file [" + realFilename + "] to stop growing"); } Thread.sleep(iCycleTime * 1000); } catch (InterruptedException e) { // something strange happened result.setResult(false); continueLoop = false; } oldSize = newSize; newSize = fileObject.getContent().getSize(); if (log.isDetailed()) { logDetailed("File [" + realFilename + "] is " + newSize + " bytes long"); } } if (log.isBasic()) logBasic("Stopped waiting for file [" + realFilename + "] to stop growing"); } if (parentJob.isStopped()) { result.setResult(false); } } catch (Exception e) { logBasic("Exception while waiting for file [" + realFilename + "] to stop growing", e); } finally { if (fileObject != null) { try { fileObject.close(); } catch (Exception e) { } } } } else { logError("No filename is defined."); } return result; }
public Result execute(Result previousResult, int nr) { String LimitNbrLignes = ""; String ListOfColumn = "*"; String strHighPriority = ""; String OutDumpText = ""; String OptionEnclosed = ""; String FieldSeparator = ""; String LinesTerminated = ""; Result result = previousResult; result.setResult(false); // Let's check the filename ... if (filename != null) { // User has specified a file, We can continue ... String realFilename = getRealFilename(); File file = new File(realFilename); if (file.exists() && iffileexists == 2) { // the file exists and user want to Fail result.setResult(false); result.setNrErrors(1); logError( BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label")); } else if (file.exists() && iffileexists == 1) { // the file exists and user want to do nothing result.setResult(true); if (log.isDetailed()) logDetailed( BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label")); } else { if (file.exists() && iffileexists == 0) { // File exists and user want to renamme it with unique name // Format Date // Try to clean filename (without wildcard) String wildcard = realFilename.substring(realFilename.length() - 4, realFilename.length()); if (wildcard.substring(0, 1).equals(".")) { // Find wildcard realFilename = realFilename.substring(0, realFilename.length() - 4) + "_" + StringUtil.getFormattedDateTimeNow(true) + wildcard; } else { // did not find wildcard realFilename = realFilename + "_" + StringUtil.getFormattedDateTimeNow(true); } logDebug( BaseMessages.getString(PKG, "JobMysqlBulkFile.FileNameChange1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileNameChange1.Label")); } // User has specified an existing file, We can continue ... if (log.isDetailed()) logDetailed( BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label")); if (connection != null) { // User has specified a connection, We can continue ... Database db = new Database(this, connection); db.shareVariablesWith(this); try { db.connect(parentJob.getTransactionId(), null); // Get schemaname String realSchemaname = environmentSubstitute(schemaname); // Get tablename String realTablename = environmentSubstitute(tablename); if (db.checkTableExists(realTablename)) { // The table existe, We can continue ... if (log.isDetailed()) logDetailed( BaseMessages.getString(PKG, "JobMysqlBulkFile.TableExists1.Label") + realTablename + BaseMessages.getString(PKG, "JobMysqlBulkFile.TableExists2.Label")); // Add schemaname (Most the time Schemaname.Tablename) if (schemaname != null) { realTablename = realSchemaname + "." + realTablename; } // Set the Limit lines if (Const.toInt(getRealLimitlines(), 0) > 0) { LimitNbrLignes = "LIMIT " + getRealLimitlines(); } // Set list of Column, if null get all columns (*) if (getRealListColumn() != null) { ListOfColumn = MysqlString(getRealListColumn()); } // Fields separator if (getRealSeparator() != null && outdumpvalue == 0) { FieldSeparator = "FIELDS TERMINATED BY '" + Const.replace(getRealSeparator(), "'", "''") + "'"; } // Lines Terminated by if (getRealLineterminated() != null && outdumpvalue == 0) { LinesTerminated = "LINES TERMINATED BY '" + Const.replace(getRealLineterminated(), "'", "''") + "'"; } // High Priority ? if (isHighPriority()) { strHighPriority = "HIGH_PRIORITY"; } if (getRealEnclosed() != null && outdumpvalue == 0) { if (isOptionEnclosed()) { OptionEnclosed = "OPTIONALLY "; } OptionEnclosed = OptionEnclosed + "ENCLOSED BY '" + Const.replace(getRealEnclosed(), "'", "''") + "'"; } // OutFile or Dumpfile if (outdumpvalue == 0) { OutDumpText = "INTO OUTFILE"; } else { OutDumpText = "INTO DUMPFILE"; } String FILEBulkFile = "SELECT " + strHighPriority + " " + ListOfColumn + " " + OutDumpText + " '" + realFilename + "' " + FieldSeparator + " " + OptionEnclosed + " " + LinesTerminated + " FROM " + realTablename + " " + LimitNbrLignes + " LOCK IN SHARE MODE"; try { if (log.isDetailed()) logDetailed(FILEBulkFile); // Run the SQL PreparedStatement ps = db.prepareSQL(FILEBulkFile); ps.execute(); // Everything is OK...we can disconnect now db.disconnect(); if (isAddFileToResult()) { // Add filename to output files ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } result.setResult(true); } catch (SQLException je) { db.disconnect(); result.setNrErrors(1); logError( BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + " " + je.getMessage()); } catch (KettleFileException e) { logError( BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + e.getMessage()); result.setNrErrors(1); } } else { // Of course, the table should have been created already before the bulk load // operation db.disconnect(); result.setNrErrors(1); if (log.isDetailed()) logDetailed( BaseMessages.getString(PKG, "JobMysqlBulkFile.TableNotExists1.Label") + realTablename + BaseMessages.getString(PKG, "JobMysqlBulkFile.TableNotExists2.Label")); } } catch (KettleDatabaseException dbe) { db.disconnect(); result.setNrErrors(1); logError( BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + " " + dbe.getMessage()); } } else { // No database connection is defined result.setNrErrors(1); logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Nodatabase.Label")); } } } else { // No file was specified result.setNrErrors(1); logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Nofilename.Label")); } return result; }
public boolean openNewFile() { boolean retval = false; data.writer = null; try { FileObject file = KettleVFS.getFileObject(buildFilename(true), getTransMeta()); if (meta.isAddToResultFiles()) { // Add this to the result file names... ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, file, getTransMeta().getName(), getStepname()); resultFile.setComment("This file was created with a xml output step"); // $NON-NLS-1$ addResultFile(resultFile); } OutputStream outputStream; if (meta.isZipped()) { OutputStream fos = KettleVFS.getOutputStream(file, false); data.zip = new ZipOutputStream(fos); File entry = new File(buildFilename(false)); ZipEntry zipentry = new ZipEntry(entry.getName()); zipentry.setComment("Compressed by Kettle"); // $NON-NLS-1$ data.zip.putNextEntry(zipentry); outputStream = data.zip; } else { OutputStream fos = KettleVFS.getOutputStream(file, false); outputStream = fos; } if (meta.getEncoding() != null && meta.getEncoding().length() > 0) { logBasic("Opening output stream in encoding: " + meta.getEncoding()); // $NON-NLS-1$ data.writer = new OutputStreamWriter(outputStream, meta.getEncoding()); data.writer.write(XMLHandler.getXMLHeader(meta.getEncoding()).toCharArray()); } else { logBasic( "Opening output stream in default encoding : " + Const.XML_ENCODING); // $NON-NLS-1$ data.writer = new OutputStreamWriter(outputStream); data.writer.write(XMLHandler.getXMLHeader(Const.XML_ENCODING).toCharArray()); } // Add the name space if defined StringBuffer nameSpace = new StringBuffer(); if ((meta.getNameSpace() != null) && (!"".equals(meta.getNameSpace()))) { // $NON-NLS-1$ nameSpace.append(" xmlns=\""); // $NON-NLS-1$ nameSpace.append(meta.getNameSpace()); nameSpace.append("\""); // $NON-NLS-1$ } // OK, write the header & the parent element: data.writer.write( ("<" + meta.getMainElement() + nameSpace.toString() + ">" + Const.CR) .toCharArray()); //$NON-NLS-1$//$NON-NLS-2$ retval = true; } catch (Exception e) { logError("Error opening new file : " + e.toString()); // $NON-NLS-1$ } // System.out.println("end of newFile(), splitnr="+splitnr); data.splitnr++; return retval; }
public Result execute(Result result, int nr) throws KettleException { FileLoggingEventListener loggingEventListener = null; LogLevel shellLogLevel = parentJob.getLogLevel(); if (setLogfile) { String realLogFilename = environmentSubstitute(getLogFilename()); // We need to check here the log filename // if we do not have one, we must fail if (Const.isEmpty(realLogFilename)) { logError(BaseMessages.getString(PKG, "JobEntryShell.Exception.LogFilenameMissing")); result.setNrErrors(1); result.setResult(false); return result; } try { loggingEventListener = new FileLoggingEventListener(getLogChannelId(), realLogFilename, setAppendLogfile); KettleLogStore.getAppender().addLoggingEventListener(loggingEventListener); } catch (KettleException e) { logError( BaseMessages.getString( PKG, "JobEntryShell.Error.UnableopenAppenderFile", getLogFilename(), e.toString())); logError(Const.getStackTracker(e)); result.setNrErrors(1); result.setResult(false); return result; } shellLogLevel = logFileLevel; } log.setLogLevel(shellLogLevel); result.setEntryNr(nr); // "Translate" the arguments for later String[] substArgs = null; if (arguments != null) { substArgs = new String[arguments.length]; for (int idx = 0; idx < arguments.length; idx++) { substArgs[idx] = environmentSubstitute(arguments[idx]); } } int iteration = 0; String[] args = substArgs; RowMetaAndData resultRow = null; boolean first = true; List<RowMetaAndData> rows = result.getRows(); if (log.isDetailed()) { logDetailed( BaseMessages.getString( PKG, "JobEntryShell.Log.FoundPreviousRows", "" + (rows != null ? rows.size() : 0))); } while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) { first = false; if (rows != null && execPerRow) { resultRow = rows.get(iteration); } else { resultRow = null; } List<RowMetaAndData> cmdRows = null; if (execPerRow) { // Execute for each input row if (argFromPrevious) { // Copy the input row to the (command line) arguments if (resultRow != null) { args = new String[resultRow.size()]; for (int i = 0; i < resultRow.size(); i++) { args[i] = resultRow.getString(i, null); } } } else { // Just pass a single row List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>(); newList.add(resultRow); cmdRows = newList; } } else { if (argFromPrevious) { // Only put the first Row on the arguments args = null; if (resultRow != null) { args = new String[resultRow.size()]; for (int i = 0; i < resultRow.size(); i++) { args[i] = resultRow.getString(i, null); } } else { cmdRows = rows; } } else { // Keep it as it was... cmdRows = rows; } } executeShell(result, cmdRows, args); iteration++; } if (setLogfile) { if (loggingEventListener != null) { KettleLogStore.getAppender().removeLoggingEventListener(loggingEventListener); loggingEventListener.close(); ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_LOG, loggingEventListener.getFile(), parentJob.getJobname(), getName()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } } return result; }