private TransMeta loadTransformation(ReportSubjectLocation location) throws KettleException {
   TransMeta transMeta;
   if (!Const.isEmpty(location.getFilename())) {
     transMeta = new TransMeta(location.getFilename());
   } else {
     transMeta =
         repository.loadTransformation(
             location.getName(), location.getDirectory(), null, true, null);
   }
   return transMeta;
 }
Esempio n. 2
0
  public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    meta = (AutoDocMeta) smi;
    data = (AutoDocData) sdi;

    Object[] row = getRow();
    if (row == null) {

      if (data.filenames.isEmpty()) {
        // Nothing to see here, move along!
        //
        setOutputDone();
        return false;
      }

      // End of the line, create the documentation...
      //
      FileObject targetFile =
          KettleVFS.getFileObject(environmentSubstitute(meta.getTargetFilename()));
      String targetFilename = KettleVFS.getFilename(targetFile);

      // Create the report builder
      //
      KettleReportBuilder kettleReportBuilder =
          new KettleReportBuilder(this, data.filenames, KettleVFS.getFilename(targetFile), meta);

      try {
        // Try to get the Classic Reporting Engine to boot inside of the plugin class loader...
        //
        if (ClassicEngineBoot.getInstance().isBootDone() == false) {

          ObjectUtilities.setClassLoader(getClass().getClassLoader());
          ObjectUtilities.setClassLoaderSource(ObjectUtilities.CLASS_CONTEXT);

          LibLoaderBoot.getInstance().start();
          LibFontBoot.getInstance().start();
          ClassicEngineBoot.getInstance().start();
        }

        // Do the reporting thing...
        //
        kettleReportBuilder.createReport();
        kettleReportBuilder.render();

        Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size());
        int outputIndex = 0;
        outputRowData[outputIndex++] = targetFilename;

        // Pass along the data to the next steps...
        //
        putRow(data.outputRowMeta, outputRowData);

        // Add the target file to the result file list
        //
        ResultFile resultFile =
            new ResultFile(
                ResultFile.FILE_TYPE_GENERAL, targetFile, getTransMeta().getName(), toString());
        resultFile.setComment("This file was generated by the 'Auto Documentation Output' step");
        addResultFile(resultFile);
      } catch (Exception e) {
        throw new KettleException(
            BaseMessages.getString(PKG, "AutoDoc.Exception.UnableToRenderReport"), e);
      }

      setOutputDone();
      return false;
    }

    if (first) {
      first = false;

      data.outputRowMeta = getInputRowMeta().clone();
      meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);

      // Get the filename field index...
      //
      String filenameField = environmentSubstitute(meta.getFilenameField());
      data.fileNameFieldIndex = getInputRowMeta().indexOfValue(filenameField);
      if (data.fileNameFieldIndex < 0) {
        throw new KettleException(
            BaseMessages.getString(PKG, "AutoDoc.Exception.FilenameFieldNotFound", filenameField));
      }

      // Get the file type field index...
      //
      String fileTypeField = environmentSubstitute(meta.getFileTypeField());
      data.fileTypeFieldIndex = getInputRowMeta().indexOfValue(fileTypeField);
      if (data.fileTypeFieldIndex < 0) {
        throw new KettleException(
            BaseMessages.getString(PKG, "AutoDoc.Exception.FileTypeFieldNotFound", fileTypeField));
      }

      data.repository = getTrans().getRepository();
      if (data.repository != null) {
        data.tree = data.repository.loadRepositoryDirectoryTree();
      }

      // Initialize the repository information handlers (images, metadata, loading, etc)
      //
      TransformationInformation.init(getTrans().getRepository());
      JobInformation.init(getTrans().getRepository());
    }

    // One more transformation or job to place in the documentation.
    //
    String fileName = getInputRowMeta().getString(row, data.fileNameFieldIndex);
    String fileType = getInputRowMeta().getString(row, data.fileTypeFieldIndex);

    RepositoryObjectType objectType;
    if ("Transformation".equalsIgnoreCase(fileType)) {
      objectType = RepositoryObjectType.TRANSFORMATION;
    } else if ("Job".equalsIgnoreCase(fileType)) {
      objectType = RepositoryObjectType.JOB;
    } else {
      throw new KettleException(
          BaseMessages.getString(PKG, "AutoDoc.Exception.UnknownFileTypeValue", fileType));
    }

    ReportSubjectLocation location = null;
    if (getTrans().getRepository() == null) {
      switch (objectType) {
        case TRANSFORMATION:
          location = new ReportSubjectLocation(fileName, null, null, objectType);
          break;
        case JOB:
          location = new ReportSubjectLocation(fileName, null, null, objectType);
          break;
        default:
          break;
      }
    } else {
      int lastSlashIndex = fileName.lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR);
      if (lastSlashIndex < 0) {
        fileName = RepositoryDirectory.DIRECTORY_SEPARATOR + fileName;
        lastSlashIndex = 0;
      }

      String directoryName = fileName.substring(0, lastSlashIndex + 1);
      String objectName = fileName.substring(lastSlashIndex + 1);

      RepositoryDirectoryInterface directory = data.tree.findDirectory(directoryName);
      if (directory == null) {
        throw new KettleException(
            BaseMessages.getString(
                PKG, "AutoDoc.Exception.RepositoryDirectoryNotFound", directoryName));
      }

      location = new ReportSubjectLocation(null, directory, objectName, objectType);
    }

    if (location == null) {
      throw new KettleException(
          BaseMessages.getString(
              PKG, "AutoDoc.Exception.UnableToDetermineLocation", fileName, fileType));
    }

    if (meta.getOutputType() != OutputType.METADATA) {
      // Add the file location to the list for later processing in one output report
      //
      data.filenames.add(location);
    } else {
      // Load the metadata from the transformation / job...
      // Output it in one row for each input row
      //
      Object[] outputRow = RowDataUtil.resizeArray(row, data.outputRowMeta.size());
      int outputIndex = getInputRowMeta().size();

      List<AreaOwner> imageAreaList = null;

      switch (location.getObjectType()) {
        case TRANSFORMATION:
          TransformationInformation ti = TransformationInformation.getInstance();
          TransMeta transMeta = ti.getTransMeta(location);
          imageAreaList = ti.getImageAreaList(location);

          // TransMeta
          outputRow[outputIndex++] = transMeta;
          break;

        case JOB:
          JobInformation ji = JobInformation.getInstance();
          JobMeta jobMeta = ji.getJobMeta(location);
          imageAreaList = ji.getImageAreaList(location);

          // TransMeta
          outputRow[outputIndex++] = jobMeta;
          break;
        default:
          break;
      }

      // Name
      if (meta.isIncludingName()) {
        outputRow[outputIndex++] = KettleFileTableModel.getName(location);
      }

      // Description
      if (meta.isIncludingDescription()) {
        outputRow[outputIndex++] = KettleFileTableModel.getDescription(location);
      }

      // Extended Description
      if (meta.isIncludingExtendedDescription()) {
        outputRow[outputIndex++] = KettleFileTableModel.getExtendedDescription(location);
      }

      // created
      if (meta.isIncludingCreated()) {
        outputRow[outputIndex++] = KettleFileTableModel.getCreation(location);
      }

      // modified
      if (meta.isIncludingModified()) {
        outputRow[outputIndex++] = KettleFileTableModel.getModification(location);
      }

      // image
      if (meta.isIncludingImage()) {
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        try {
          BufferedImage image = KettleFileTableModel.getImage(location);
          ImageIO.write(image, "png", outputStream);

          outputRow[outputIndex++] = outputStream.toByteArray();
        } catch (Exception e) {
          throw new KettleException("Unable to serialize image to PNG", e);
        } finally {
          try {
            outputStream.close();
          } catch (IOException e) {
            throw new KettleException("Unable to serialize image to PNG", e);
          }
        }
      }

      if (meta.isIncludingLoggingConfiguration()) {
        outputRow[outputIndex++] = KettleFileTableModel.getLogging(location);
      }

      if (meta.isIncludingLastExecutionResult()) {
        outputRow[outputIndex++] = KettleFileTableModel.getLogging(location);
      }

      if (meta.isIncludingImageAreaList()) {
        outputRow[outputIndex++] = imageAreaList;
      }

      putRow(data.outputRowMeta, outputRow);
    }

    return true;
  }