// Listen to the Variable... button
  public static final String getVariableName(Shell shell, VariableSpace space) {
    String keys[] = space.listVariables();
    Arrays.sort(keys);

    int size = keys.length;
    String key[] = new String[size];
    String val[] = new String[size];
    String str[] = new String[size];

    for (int i = 0; i < keys.length; i++) {
      key[i] = keys[i];
      val[i] = space.getVariable(key[i]);
      str[i] = key[i] + "  [" + val[i] + "]";
    }

    EnterSelectionDialog esd =
        new EnterSelectionDialog(
            shell,
            str,
            Messages.getString("System.Dialog.SelectEnvironmentVar.Title"),
            Messages.getString("System.Dialog.SelectEnvironmentVar.Message"));
    esd.clearModal();
    if (esd.open() != null) {
      int nr = esd.getSelectionNr();
      String var = key[nr];

      return var;
    } else {
      return null;
    }
  }
  protected String getLogBuffer(
      VariableSpace space, String logChannelId, LogStatus status, String limit) {

    StringBuffer buffer = KettleLogStore.getAppender().getBuffer(logChannelId, true);

    if (Const.isEmpty(limit)) {
      String defaultLimit = space.getVariable(Const.KETTLE_LOG_SIZE_LIMIT, null);
      if (!Const.isEmpty(defaultLimit)) {
        limit = defaultLimit;
      }
    }

    // See if we need to limit the amount of rows
    //
    int nrLines = Const.isEmpty(limit) ? -1 : Const.toInt(space.environmentSubstitute(limit), -1);

    if (nrLines > 0) {
      int start = buffer.length() - 1;
      for (int i = 0; i < nrLines && start > 0; i++) {
        start = buffer.lastIndexOf(Const.CR, start - 1);
      }
      if (start > 0) {
        buffer.delete(0, start + Const.CR.length());
      }
    }

    return buffer.append(Const.CR + status.getStatus().toUpperCase() + Const.CR).toString();
  }
  public RowMetaInterface getRequiredFields(VariableSpace space) throws KettleException {
    String realTableName = space.environmentSubstitute(tableName);
    String realSchemaName = space.environmentSubstitute(schemaName);

    if (databaseMeta != null) {
      Database db = new Database(loggingObject, databaseMeta);
      try {
        db.connect();

        if (!Const.isEmpty(realTableName)) {
          String schemaTable =
              databaseMeta.getQuotedSchemaTableCombination(realSchemaName, realTableName);

          // Check if this table exists...
          if (db.checkTableExists(schemaTable)) {
            return db.getTableFields(schemaTable);
          } else {
            throw new KettleException(
                BaseMessages.getString(PKG, "GPBulkLoaderMeta.Exception.TableNotFound"));
          }
        } else {
          throw new KettleException(
              BaseMessages.getString(PKG, "GPBulkLoaderMeta.Exception.TableNotSpecified"));
        }
      } catch (Exception e) {
        throw new KettleException(
            BaseMessages.getString(PKG, "GPBulkLoaderMeta.Exception.ErrorGettingFields"), e);
      } finally {
        db.disconnect();
      }
    } else {
      throw new KettleException(
          BaseMessages.getString(PKG, "GPBulkLoaderMeta.Exception.ConnectionNotDefined"));
    }
  }
  public void setVariables(VariableSpace space) {
    this.variables = new HashMap<String, String>();

    for (String name : space.listVariables()) {
      String value = space.getVariable(name);
      this.variables.put(name, value);
    }
  }
Esempio n. 5
0
 /** @return the connectionName */
 public String getActualConnectionName() {
   String name = space.environmentSubstitute(connectionName);
   if (Const.isEmpty(name)) {
     name = space.getVariable(getConnectionNameVariable());
   }
   if (Const.isEmpty(name)) return null;
   else return name;
 }
Esempio n. 6
0
  /** @return the tableName */
  public String getActualTableName() {
    if (!Const.isEmpty(tableName)) return space.environmentSubstitute(tableName);

    String name = space.getVariable(getTableNameVariable());
    if (Const.isEmpty(name)) {
      return null;
    } else {
      return name;
    }
  }
  /** @return the schemaName */
  public String getActualSchemaName() {
    if (!Const.isEmpty(schemaName)) {
      return space.environmentSubstitute(schemaName);
    }

    String name = space.getVariable(getSchemaNameVariable());
    if (Const.isEmpty(name)) {
      return null;
    } else {
      return name;
    }
  }
  public void open(VariableSpace space, Process sqlldrProcess) throws KettleException {
    String loadMethod = meta.getLoadMethod();
    try {
      OutputStream os = null;

      if (OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals(loadMethod)) {
        os = sqlldrProcess.getOutputStream();
      } else {
        // Else open the data file filled in.
        String dataFile = meta.getDataFile();
        dataFile = space.environmentSubstitute(dataFile);

        os = new FileOutputStream(dataFile, false);
      }

      String encoding = meta.getEncoding();
      if (Const.isEmpty(encoding)) {
        // Use the default encoding.
        output = new BufferedWriter(new OutputStreamWriter(os));
      } else {
        // Use the specified encoding
        output = new BufferedWriter(new OutputStreamWriter(os, encoding));
      }
    } catch (IOException e) {
      throw new KettleException("IO exception occured: " + e.getMessage(), e);
    }
  }
  /**
   * Since the exported transformation that runs this will reside in a ZIP file, we can't reference
   * files relatively. So what this does is turn the name of the base path into an absolute path.
   *
   * @param space the variable space to use
   * @param definitions
   * @param resourceNamingInterface
   * @param repository The repository to optionally load other resources from (to be converted to
   *     XML)
   * @param metaStore the metaStore in which non-kettle metadata could reside.
   * @return the filename of the exported resource
   */
  public String exportResources(
      VariableSpace space,
      Map<String, ResourceDefinition> definitions,
      ResourceNamingInterface resourceNamingInterface,
      Repository repository,
      IMetaStore metaStore)
      throws KettleException {
    try {
      // The object that we're modifying here is a copy of the original!
      // So let's change the filename from relative to absolute by grabbing the file object...
      // In case the name of the file comes from previous steps, forget about this!
      //
      if (!fileNameInField) {

        if (!Const.isEmpty(fileName)) {
          FileObject fileObject =
              KettleVFS.getFileObject(space.environmentSubstitute(fileName), space);
          fileName = resourceNamingInterface.nameResource(fileObject, space, true);
        }
      }

      return null;
    } catch (Exception e) {
      throw new KettleException(e);
    }
  }
Esempio n. 10
0
  public void getFields(
      RowMetaInterface r,
      String name,
      RowMetaInterface info[],
      StepMeta nextStep,
      VariableSpace space,
      Repository repository,
      IMetaStore metaStore)
      throws KettleStepException {
    int i;
    for (i = 0; i < inputFields.length; i++) {
      RssInputField field = inputFields[i];

      int type = field.getType();
      if (type == ValueMeta.TYPE_NONE) type = ValueMeta.TYPE_STRING;
      try {
        ValueMetaInterface v =
            ValueMetaFactory.createValueMeta(space.environmentSubstitute(field.getName()), type);
        v.setLength(field.getLength(), field.getPrecision());
        v.setOrigin(name);
        r.addValueMeta(v);
      } catch (Exception e) {
        throw new KettleStepException(e);
      }
    }

    if (includeUrl) {
      ValueMetaInterface v =
          new ValueMeta(space.environmentSubstitute(urlField), ValueMeta.TYPE_STRING);
      v.setLength(100, -1);
      v.setOrigin(name);
      r.addValueMeta(v);
    }

    if (includeRowNumber) {
      ValueMetaInterface v =
          new ValueMeta(space.environmentSubstitute(rowNumberField), ValueMeta.TYPE_INTEGER);
      v.setLength(ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0);
      v.setOrigin(name);
      r.addValueMeta(v);
    }
  }
  /**
   * Reset this field. Should be called prior to processing a new field value from the avro file
   *
   * @param space environment variables (values that environment variables resolve to cannot contain
   *     "."s)
   */
  public void reset(VariableSpace space) {
    m_tempParts.clear();

    for (String part : m_pathParts) {
      m_tempParts.add(space.environmentSubstitute(part));
    }

    // reset sub fields
    for (MongoField f : m_subFields) {
      f.reset(space);
    }
  }
  public void getUsedVariables(JobMeta jobMeta) {
    Properties sp = new Properties();
    VariableSpace space = Variables.getADefaultVariableSpace();

    String[] keys = space.listVariables();
    for (int i = 0; i < keys.length; i++) {
      sp.put(keys[i], space.getVariable(keys[i]));
    }

    List<String> vars = jobMeta.getUsedVariables();
    if (vars != null && vars.size() > 0) {
      HashMap<String, String> newVariables = new HashMap<String, String>();

      for (int i = 0; i < vars.size(); i++) {
        String varname = vars.get(i);
        if (!varname.startsWith(Const.INTERNAL_VARIABLE_PREFIX)) {
          newVariables.put(varname, Const.NVL(variables.get(varname), sp.getProperty(varname, "")));
        }
      }
      // variables.clear();
      variables.putAll(newVariables);
    }
  }
  /**
   * Since the exported transformation that runs this will reside in a ZIP file, we can't reference
   * files relatively. So what this does is turn the name of files into absolute paths OR it simply
   * includes the resource in the ZIP file. For now, we'll simply turn it into an absolute path and
   * pray that the file is on a shared drive or something like that.
   *
   * <p>TODO: create options to configure this behavior
   */
  public String exportResources(
      VariableSpace space,
      Map<String, ResourceDefinition> definitions,
      ResourceNamingInterface resourceNamingInterface,
      Repository repository)
      throws KettleException {
    try {
      // The object that we're modifying here is a copy of the original!
      // So let's change the filename from relative to absolute by grabbing the file object...
      // In case the name of the file comes from previous steps, forget about this!
      //
      if (Const.isEmpty(filenameField)) {
        // From : ${Internal.Transformation.Filename.Directory}/../foo/bar.csv
        // To   : /home/matt/test/files/foo/bar.csv
        //
        FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(filename));

        // If the file doesn't exist, forget about this effort too!
        //
        if (fileObject.exists()) {
          // Convert to an absolute path...
          //
          filename =
              resourceNamingInterface.nameResource(
                  fileObject.getName().getBaseName(),
                  fileObject.getParent().getName().getPath(),
                  space.toString(),
                  FileNamingType.DATA_FILE);

          return filename;
        }
      }
      return null;
    } catch (Exception e) {
      throw new KettleException(e); // $NON-NLS-1$
    }
  }
  public RowMetaInterface getRequiredFields(VariableSpace space) throws KettleException {
    String realFilename = space.environmentSubstitute(filename);
    File file = new File(realFilename);
    Database db = null;
    try {
      if (!file.exists() || !file.isFile()) {
        throw new KettleException(
            BaseMessages.getString(
                PKG, "AccessOutputMeta.Exception.FileDoesNotExist", realFilename));
      }

      // open the database and get the table
      db = Database.open(file);
      String realTablename = space.environmentSubstitute(tablename);
      Table table = db.getTable(realTablename);
      if (table == null) {
        throw new KettleException(
            BaseMessages.getString(
                PKG, "AccessOutputMeta.Exception.TableDoesNotExist", realTablename));
      }

      RowMetaInterface layout = getLayout(table);
      return layout;
    } catch (Exception e) {
      throw new KettleException(
          BaseMessages.getString(PKG, "AccessOutputMeta.Exception.ErrorGettingFields"), e);
    } finally {
      try {
        if (db != null) {
          db.close();
        }
      } catch (IOException e) {
        throw new KettleException(
            BaseMessages.getString(PKG, "AccessOutputMeta.Exception.ErrorClosingDatabase"), e);
      }
    }
  }
 /* This function adds meta data to the rows being pushed out */
 public void getFields(
     RowMetaInterface r,
     String name,
     RowMetaInterface[] info,
     StepMeta nextStep,
     VariableSpace space,
     Repository repository,
     IMetaStore metaStore)
     throws KettleStepException {
   String realfieldname = space.environmentSubstitute(getSalesforceIDFieldName());
   if (!Const.isEmpty(realfieldname)) {
     ValueMetaInterface v = new ValueMetaString(realfieldname);
     v.setLength(18);
     v.setOrigin(name);
     r.addValueMeta(v);
   }
 }
Esempio n. 16
0
 public void getFields(
     RowMetaInterface inputRowMeta,
     String name,
     RowMetaInterface info[],
     StepMeta nextStep,
     VariableSpace space)
     throws KettleStepException {
   for (int i = 0; i < fieldOutStream.length; i++) {
     if (!Const.isEmpty(fieldOutStream[i])) {
       ValueMetaInterface v =
           new ValueMeta(space.environmentSubstitute(fieldOutStream[i]), ValueMeta.TYPE_STRING);
       v.setLength(100, -1);
       v.setOrigin(name);
       inputRowMeta.addValueMeta(v);
     }
   }
 }
 /* This function adds meta data to the rows being pushed out */
 public void getFields(
     RowMetaInterface r,
     String name,
     RowMetaInterface[] info,
     StepMeta nextStep,
     VariableSpace space,
     Repository repository,
     IMetaStore metaStore)
     throws KettleStepException {
   if (StringUtils.isNotBlank(this.getIdOutField())) {
     ValueMetaInterface valueMeta =
         new ValueMeta(
             space.environmentSubstitute(this.getIdOutField()), ValueMetaInterface.TYPE_STRING);
     valueMeta.setOrigin(name);
     // add if doesn't exist
     if (!r.exists(valueMeta)) {
       r.addValueMeta(valueMeta);
     }
   }
 }
  public void getFields(
      RowMetaInterface row,
      String name,
      RowMetaInterface[] info,
      StepMeta nextStep,
      VariableSpace space,
      Repository repository,
      IMetaStore metaStore)
      throws KettleStepException {

    ValueMetaInterface v = new ValueMetaString(newFieldname);
    v.setOrigin(name);
    row.addValueMeta(v);

    // include row number
    if (includeRowNumber) {
      v = new ValueMetaInteger(space.environmentSubstitute(rowNumberField));
      v.setLength(ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0);
      v.setOrigin(name);
      row.addValueMeta(v);
    }
  }
  /**
   * This factory returns a FileSystemConfigBuilder. Custom FileSystemConfigBuilders can be created
   * by implementing the {@link IKettleFileSystemConfigBuilder} or overriding the {@link
   * KettleGenericFileSystemConfigBuilder}
   *
   * @see FileSystemConfigBuilder
   * @param varSpace A Kettle variable space for resolving VFS config parameters
   * @param scheme The VFS scheme (FILE, HTTP, SFTP, etc...)
   * @return A FileSystemConfigBuilder that can translate Kettle variables into VFS config
   *     parameters
   * @throws IOException
   */
  public static IKettleFileSystemConfigBuilder getConfigBuilder(
      VariableSpace varSpace, String scheme) throws IOException {
    IKettleFileSystemConfigBuilder result = null;

    // Attempt to load the Config Builder from a variable: vfs.config.parser = class
    String parserClass =
        varSpace.getVariable("vfs." + scheme + ".config.parser"); // $NON-NLS-1$ //$NON-NLS-2$

    if (parserClass != null) {
      try {
        Class<?> configBuilderClass =
            KettleFileSystemConfigBuilderFactory.class.getClassLoader().loadClass(parserClass);
        Method mGetInstance = configBuilderClass.getMethod("getInstance"); // $NON-NLS-1$
        if ((mGetInstance != null)
            && (IKettleFileSystemConfigBuilder.class.isAssignableFrom(
                mGetInstance.getReturnType()))) {
          result = (IKettleFileSystemConfigBuilder) mGetInstance.invoke(null);
        } else {
          result = (IKettleFileSystemConfigBuilder) configBuilderClass.newInstance();
        }
      } catch (Exception e) {
        // Failed to load custom parser. Throw exception.
        throw new IOException(
            BaseMessages.getString(PKG, "CustomVfsSettingsParser.Log.FailedToLoad")); // $NON-NLS-1$
      }
    } else {
      // No custom parser requested, load default
      if (scheme.equalsIgnoreCase("sftp")) { // $NON-NLS-1$
        result = KettleSftpFileSystemConfigBuilder.getInstance();
      } else {
        result = KettleGenericFileSystemConfigBuilder.getInstance();
      }
    }

    return result;
  }
  public String buildFilename(
      String filename,
      String extension,
      VariableSpace space,
      int stepnr,
      String partnr,
      int splitnr,
      boolean ziparchive,
      TextFileOutputMeta meta) {
    SimpleDateFormat daf = new SimpleDateFormat();

    // Replace possible environment variables...
    String retval = space.environmentSubstitute(filename);
    String realextension = space.environmentSubstitute(extension);

    if (meta.isFileAsCommand()) {
      return retval;
    }

    Date now = new Date();

    if (meta.isSpecifyingFormat() && !Const.isEmpty(meta.getDateTimeFormat())) {
      daf.applyPattern(meta.getDateTimeFormat());
      String dt = daf.format(now);
      retval += dt;
    } else {
      if (meta.isDateInFilename()) {
        daf.applyPattern("yyyMMdd");
        String d = daf.format(now);
        retval += "_" + d;
      }
      if (meta.isTimeInFilename()) {
        daf.applyPattern("HHmmss");
        String t = daf.format(now);
        retval += "_" + t;
      }
    }
    if (meta.isStepNrInFilename()) {
      retval += "_" + stepnr;
    }
    if (meta.isPartNrInFilename()) {
      retval += "_" + partnr;
    }
    if (meta.getSplitEvery() > 0) {
      retval += "_" + splitnr;
    }

    if (meta.getFileCompression().equals("Zip")) {
      if (ziparchive) {
        retval += ".zip";
      } else {
        if (realextension != null && realextension.length() != 0) {
          retval += "." + realextension;
        }
      }
    } else {
      if (realextension != null && realextension.length() != 0) {
        retval += "." + realextension;
      }
      if (meta.getFileCompression().equals("GZip")) {
        retval += ".gz";
      }
    }
    return retval;
  }
Esempio n. 21
0
  public static final synchronized TransMeta loadMappingMeta(
      MappingMeta mappingMeta, Repository rep, VariableSpace space) throws KettleException {
    TransMeta mappingTransMeta = null;

    switch (mappingMeta.getSpecificationMethod()) {
      case FILENAME:
        String realFilename = space.environmentSubstitute(mappingMeta.getFileName());
        try {
          // OK, load the meta-data from file...
          //
          // Don't set internal variables: they belong to the parent thread!
          //
          mappingTransMeta = new TransMeta(realFilename, space);
          mappingTransMeta
              .getLogChannel()
              .logDetailed(
                  "Loading Mapping from repository",
                  "Mapping transformation was loaded from XML file [" + realFilename + "]");
        } catch (Exception e) {
          throw new KettleException(
              BaseMessages.getString(PKG, "MappingMeta.Exception.UnableToLoadMapping"), e);
        }
        break;

      case REPOSITORY_BY_NAME:
        String realTransname = space.environmentSubstitute(mappingMeta.getTransName());
        String realDirectory = space.environmentSubstitute(mappingMeta.getDirectoryPath());

        if (!Const.isEmpty(realTransname) && !Const.isEmpty(realDirectory) && rep != null) {
          RepositoryDirectoryInterface repdir = rep.findDirectory(realDirectory);
          if (repdir != null) {
            try {
              // reads the last revision in the repository...
              //
              mappingTransMeta = rep.loadTransformation(realTransname, repdir, null, true, null);
              mappingTransMeta
                  .getLogChannel()
                  .logDetailed(
                      "Loading Mapping from repository",
                      "Mapping transformation ["
                          + realTransname
                          + "] was loaded from the repository");
            } catch (Exception e) {
              throw new KettleException("Unable to load transformation [" + realTransname + "]", e);
            }
          } else {
            throw new KettleException(
                BaseMessages.getString(
                        PKG, "MappingMeta.Exception.UnableToLoadTransformation", realTransname)
                    + realDirectory); //$NON-NLS-1$ //$NON-NLS-2$
          }
        }
        break;

      case REPOSITORY_BY_REFERENCE:
        // Read the last revision by reference...
        mappingTransMeta = rep.loadTransformation(mappingMeta.getTransObjectId(), null);
        break;
    }

    // Pass some important information to the mapping transformation metadata:
    //
    mappingTransMeta.copyVariablesFrom(space);
    mappingTransMeta.setRepository(rep);
    mappingTransMeta.setFilename(mappingTransMeta.getFilename());

    return mappingTransMeta;
  }
Esempio n. 22
0
  public void getFields(
      RowMetaInterface row,
      String origin,
      RowMetaInterface info[],
      StepMeta nextStep,
      VariableSpace space)
      throws KettleStepException {
    // First load some interesting data...

    // Then see which fields get added to the row.
    //
    TransMeta mappingTransMeta = null;
    try {
      mappingTransMeta = loadMappingMeta(this, repository, space);
    } catch (KettleException e) {
      throw new KettleStepException(
          BaseMessages.getString(PKG, "MappingMeta.Exception.UnableToLoadMappingTransformation"),
          e);
    }

    // The field structure may depend on the input parameters as well (think of parameter
    // replacements in MDX queries for instance)
    if (mappingParameters != null) {

      // See if we need to pass all variables from the parent or not...
      //
      if (mappingParameters.isInheritingAllVariables()) {
        mappingTransMeta.copyVariablesFrom(space);
      }

      // Just set the variables in the transformation statically.
      // This just means: set a number of variables or parameter values:
      //
      List<String> subParams = Arrays.asList(mappingTransMeta.listParameters());

      for (int i = 0; i < mappingParameters.getVariable().length; i++) {
        String name = mappingParameters.getVariable()[i];
        String value = space.environmentSubstitute(mappingParameters.getInputField()[i]);
        if (!Const.isEmpty(name) && !Const.isEmpty(value)) {
          if (subParams.contains(name)) {
            try {
              mappingTransMeta.setParameterValue(name, value);
            } catch (UnknownParamException e) {
              // this is explicitly checked for up front
            }
          }
          mappingTransMeta.setVariable(name, value);
        }
      }
    }

    // Keep track of all the fields that need renaming...
    //
    List<MappingValueRename> inputRenameList = new ArrayList<MappingValueRename>();

    /*
     * Before we ask the mapping outputs anything, we should teach the mapping
     * input steps in the sub-transformation about the data coming in...
     */
    for (MappingIODefinition definition : inputMappings) {

      RowMetaInterface inputRowMeta;

      if (definition.isMainDataPath() || Const.isEmpty(definition.getInputStepname())) {
        // The row metadata, what we pass to the mapping input step
        // definition.getOutputStep(), is "row"
        // However, we do need to re-map some fields...
        //
        inputRowMeta = row.clone();
        if (!inputRowMeta.isEmpty()) {
          for (MappingValueRename valueRename : definition.getValueRenames()) {
            ValueMetaInterface valueMeta =
                inputRowMeta.searchValueMeta(valueRename.getSourceValueName());
            if (valueMeta == null) {
              throw new KettleStepException(
                  BaseMessages.getString(
                      PKG,
                      "MappingMeta.Exception.UnableToFindField",
                      valueRename.getSourceValueName()));
            }
            valueMeta.setName(valueRename.getTargetValueName());
          }
        }
      } else {
        // The row metadata that goes to the info mapping input comes from the
        // specified step
        // In fact, it's one of the info steps that is going to contain this
        // information...
        //
        String[] infoSteps = getInfoSteps();
        int infoStepIndex = Const.indexOfString(definition.getInputStepname(), infoSteps);
        if (infoStepIndex < 0) {
          throw new KettleStepException(
              BaseMessages.getString(
                  PKG,
                  "MappingMeta.Exception.UnableToFindMetadataInfo",
                  definition.getInputStepname()));
        }
        if (info[infoStepIndex] != null) {
          inputRowMeta = info[infoStepIndex].clone();
        } else {
          inputRowMeta = null;
        }
      }

      // What is this mapping input step?
      //
      StepMeta mappingInputStep =
          mappingTransMeta.findMappingInputStep(definition.getOutputStepname());

      // We're certain it's a MappingInput step...
      //
      MappingInputMeta mappingInputMeta =
          (MappingInputMeta) mappingInputStep.getStepMetaInterface();

      // Inform the mapping input step about what it's going to receive...
      //
      mappingInputMeta.setInputRowMeta(inputRowMeta);

      // What values are we changing names for?
      //
      mappingInputMeta.setValueRenames(definition.getValueRenames());

      // Keep a list of the input rename values that need to be changed back at
      // the output
      //
      if (definition.isRenamingOnOutput())
        Mapping.addInputRenames(inputRenameList, definition.getValueRenames());
    }

    // All the mapping steps now know what they will be receiving.
    // That also means that the sub-transformation / mapping has everything it
    // needs.
    // So that means that the MappingOutput steps know exactly what the output
    // is going to be.
    // That could basically be anything.
    // It also could have absolutely no resemblance to what came in on the
    // input.
    // The relative old approach is therefore no longer suited.
    //
    // OK, but what we *can* do is have the MappingOutput step rename the
    // appropriate fields.
    // The mapping step will tell this step how it's done.
    //
    // Let's look for the mapping output step that is relevant for this actual
    // call...
    //
    MappingIODefinition mappingOutputDefinition = null;
    if (nextStep == null) {
      // This is the main step we read from...
      // Look up the main step to write to.
      // This is the output mapping definition with "main path" enabled.
      //
      for (MappingIODefinition definition : outputMappings) {
        if (definition.isMainDataPath() || Const.isEmpty(definition.getOutputStepname())) {
          // This is the definition to use...
          //
          mappingOutputDefinition = definition;
        }
      }
    } else {
      // Is there an output mapping definition for this step?
      // If so, we can look up the Mapping output step to see what has changed.
      //

      for (MappingIODefinition definition : outputMappings) {
        if (nextStep.getName().equals(definition.getOutputStepname())
            || definition.isMainDataPath()
            || Const.isEmpty(definition.getOutputStepname())) {
          mappingOutputDefinition = definition;
        }
      }
    }

    if (mappingOutputDefinition == null) {
      throw new KettleStepException(
          BaseMessages.getString(PKG, "MappingMeta.Exception.UnableToFindMappingDefinition"));
    }

    // OK, now find the mapping output step in the mapping...
    // This method in TransMeta takes into account a number of things, such as
    // the step not specified, etc.
    // The method never returns null but throws an exception.
    //
    StepMeta mappingOutputStep =
        mappingTransMeta.findMappingOutputStep(mappingOutputDefinition.getInputStepname());

    // We know it's a mapping output step...
    MappingOutputMeta mappingOutputMeta =
        (MappingOutputMeta) mappingOutputStep.getStepMetaInterface();

    // Change a few columns.
    mappingOutputMeta.setOutputValueRenames(mappingOutputDefinition.getValueRenames());

    // Perhaps we need to change a few input columns back to the original?
    //
    mappingOutputMeta.setInputValueRenames(inputRenameList);

    // Now we know wat's going to come out of there...
    // This is going to be the full row, including all the remapping, etc.
    //
    RowMetaInterface mappingOutputRowMeta = mappingTransMeta.getStepFields(mappingOutputStep);

    row.clear();
    row.addRowMeta(mappingOutputRowMeta);
  }
 public int getBatchSizeInt(VariableSpace vars) {
   return Const.toInt(vars.environmentSubstitute(this.batchSize), DEFAULT_BATCH_SIZE);
 }
 public String[] getFilePaths(VariableSpace space) {
   return new String[] {
     space.environmentSubstitute(filename),
   };
 }
Esempio n. 25
0
 public String fieldSubstitute(String aString, RowMetaInterface rowMeta, Object[] rowData)
     throws KettleValueException {
   return variables.fieldSubstitute(aString, rowMeta, rowData);
 }
  public void getFields(
      RowMetaInterface rowMeta,
      String origin,
      RowMetaInterface[] info,
      StepMeta nextStep,
      VariableSpace space)
      throws KettleStepException {
    rowMeta.clear(); // Start with a clean slate, eats the input

    for (int i = 0; i < inputFields.length; i++) {
      TextFileInputField field = inputFields[i];

      ValueMetaInterface valueMeta = new ValueMeta(field.getName(), field.getType());
      valueMeta.setConversionMask(field.getFormat());
      valueMeta.setLength(field.getLength());
      valueMeta.setPrecision(field.getPrecision());
      valueMeta.setConversionMask(field.getFormat());
      valueMeta.setDecimalSymbol(field.getDecimalSymbol());
      valueMeta.setGroupingSymbol(field.getGroupSymbol());
      valueMeta.setCurrencySymbol(field.getCurrencySymbol());
      valueMeta.setTrimType(field.getTrimType());
      if (lazyConversionActive)
        valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
      valueMeta.setStringEncoding(space.environmentSubstitute(encoding));

      // In case we want to convert Strings...
      // Using a copy of the valueMeta object means that the inner and outer representation format
      // is the same.
      // Preview will show the data the same way as we read it.
      // This layout is then taken further down the road by the metadata through the transformation.
      //
      ValueMetaInterface storageMetadata = valueMeta.clone();
      storageMetadata.setType(ValueMetaInterface.TYPE_STRING);
      storageMetadata.setStorageType(ValueMetaInterface.STORAGE_TYPE_NORMAL);
      storageMetadata.setLength(
          -1, -1); // we don't really know the lengths of the strings read in advance.
      valueMeta.setStorageMetadata(storageMetadata);

      valueMeta.setOrigin(origin);

      rowMeta.addValueMeta(valueMeta);
    }

    if (!Const.isEmpty(filenameField) && includingFilename) {
      ValueMetaInterface filenameMeta =
          new ValueMeta(filenameField, ValueMetaInterface.TYPE_STRING);
      filenameMeta.setOrigin(origin);
      if (lazyConversionActive) {
        filenameMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
        filenameMeta.setStorageMetadata(
            new ValueMeta(filenameField, ValueMetaInterface.TYPE_STRING));
      }
      rowMeta.addValueMeta(filenameMeta);
    }

    if (!Const.isEmpty(rowNumField)) {
      ValueMetaInterface rowNumMeta = new ValueMeta(rowNumField, ValueMetaInterface.TYPE_INTEGER);
      rowNumMeta.setLength(10);
      rowNumMeta.setOrigin(origin);
      rowMeta.addValueMeta(rowNumMeta);
    }
  }
Esempio n. 27
0
 /**
  * @param vs - variable space to be used for searching variable value usually "this" for a calling
  *     step
  * @return Returns the commitSize.
  */
 public int getCommitSize(VariableSpace vs) {
   return Integer.parseInt(vs.environmentSubstitute(commitSize));
 }
Esempio n. 28
0
 /**
  * @param vs - variable space to be used for searching variable value usually "this" for a calling
  *     step
  * @return Returns the commitSize.
  */
 public int getCommitSize(VariableSpace vs) {
   // this happens when the step is created via API and no setDefaults was called
   commitSize = (commitSize == null) ? "0" : commitSize;
   return Integer.parseInt(vs.environmentSubstitute(commitSize));
 }
Esempio n. 29
0
 public String[] environmentSubstitute(String[] aString) {
   return variables.environmentSubstitute(aString);
 }
Esempio n. 30
0
 public void copyVariablesFrom(VariableSpace space) {
   variables.copyVariablesFrom(space);
 }