@Override
  public void eventAdded(KettleLoggingEvent event) {

    try {
      Object messageObject = event.getMessage();
      if (messageObject instanceof LogMessage) {
        boolean logToFile = false;

        if (logChannelId == null) {
          logToFile = true;
        } else {
          LogMessage message = (LogMessage) messageObject;
          // This should be fast enough cause cached.
          List<String> logChannelChildren =
              LoggingRegistry.getInstance().getLogChannelChildren(logChannelId);
          // This could be non-optimal, consider keeping the list sorted in the logging registry
          logToFile = Const.indexOfString(message.getLogChannelId(), logChannelChildren) >= 0;
        }

        if (logToFile) {
          String logText = layout.format(event);
          outputStream.write(logText.getBytes());
          outputStream.write(Const.CR.getBytes());
        }
      }
    } catch (Exception e) {
      exception =
          new KettleException("Unable to write to logging event to file '" + filename + "'", e);
    }
  }
  private void getParameters(TransMeta inputTransMeta) {
    try {
      if (inputTransMeta == null) {
        JobEntryTrans jet = new JobEntryTrans();
        getInfo(jet);
        inputTransMeta = jet.getTransMeta(rep, metaStore, jobMeta);
      }
      String[] parameters = inputTransMeta.listParameters();

      String[] existing = wParameters.getItems(1);

      for (int i = 0; i < parameters.length; i++) {
        if (Const.indexOfString(parameters[i], existing) < 0) {
          TableItem item = new TableItem(wParameters.table, SWT.NONE);
          item.setText(1, parameters[i]);
        }
      }
      wParameters.removeEmptyRows();
      wParameters.setRowNums();
      wParameters.optWidth(true);
    } catch (Exception e) {
      new ErrorDialog(
          shell,
          BaseMessages.getString(
              PKG, "JobEntryTransDialog.Exception.UnableToLoadTransformation.Title"),
          BaseMessages.getString(
              PKG, "JobEntryTransDialog.Exception.UnableToLoadTransformation.Message"),
          e);
    }
  }
Exemplo n.º 3
0
  public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
    meta = (DatabaseLookupMeta) smi;
    data = (DatabaseLookupData) sdi;

    if (super.init(smi, sdi)) {
      if (meta.getDatabaseMeta() == null) {
        logError(
            BaseMessages.getString(PKG, "DatabaseLookup.Init.ConnectionMissing", getStepname()));
        return false;
      }
      data.db = new Database(this, meta.getDatabaseMeta());
      data.db.shareVariablesWith(this);
      try {
        if (getTransMeta().isUsingUniqueConnections()) {
          synchronized (getTrans()) {
            data.db.connect(getTrans().getTransactionId(), getPartitionID());
          }
        } else {
          data.db.connect(getPartitionID());
        }

        data.db.setCommit(100); // we never get a commit, but it just turns off auto-commit.

        if (log.isDetailed()) {
          logDetailed(BaseMessages.getString(PKG, "DatabaseLookup.Log.ConnectedToDatabase"));
        }

        // See if all the lookup conditions are "equal"
        // This might speed up things in the case when we load all data in the cache
        //
        data.allEquals = true;
        data.hasDBCondition = false;
        data.conditions = new int[meta.getKeyCondition().length];
        for (int i = 0; i < meta.getKeyCondition().length; i++) {
          data.conditions[i] =
              Const.indexOfString(meta.getKeyCondition()[i], DatabaseLookupMeta.conditionStrings);
          if (!("=".equals(meta.getKeyCondition()[i]))) {
            data.allEquals = false;
          }
          if (data.conditions[i] == DatabaseLookupMeta.CONDITION_LIKE) {
            data.hasDBCondition = true;
          }
        }

        return true;
      } catch (Exception e) {
        logError(
            BaseMessages.getString(PKG, "DatabaseLookup.ERROR0004.UnexpectedErrorDuringInit")
                + e.toString());
        if (data.db != null) {
          data.db.disconnect();
        }
      }
    }
    return false;
  }
  // Liste des encodages
  private void loadEncodings() {

    wEncoding.removeAll();
    List<Charset> values = new ArrayList<Charset>(Charset.availableCharsets().values());
    for (int i = 0; i < values.size(); i++) {
      Charset charSet = (Charset) values.get(i);
      wEncoding.add(charSet.displayName());
    }

    // Celui par défaut en environnement
    String defEncoding = Const.getEnvironmentVariable("file.encoding", "UTF-8");
    int idx = Const.indexOfString(defEncoding, wEncoding.getItems());
    if (idx >= 0) wEncoding.select(idx);
  }
Exemplo n.º 5
0
  private void setEncodings() {
    // Encoding of the text file:
    if (!gotEncodings) {
      gotEncodings = true;

      wEncoding.removeAll();
      List<Charset> values = new ArrayList<Charset>(Charset.availableCharsets().values());
      for (int i = 0; i < values.size(); i++) {
        Charset charSet = values.get(i);
        wEncoding.add(charSet.displayName());
      }

      // Now select the default!
      String defEncoding = Const.getEnvironmentVariable("file.encoding", "UTF-8");
      int idx = Const.indexOfString(defEncoding, wEncoding.getItems());
      if (idx >= 0) {
        wEncoding.select(idx);
      }
    }
  }
  /**
   * Verifies on the specified database connection if an index exists on the fields with the
   * specified name.
   *
   * @param database a connected database
   * @param schemaName
   * @param tableName
   * @param idxFields
   * @return true if the index exists, false if it doesn't.
   * @throws KettleDatabaseException
   */
  @Override
  public boolean checkIndexExists(
      Database database, String schemaName, String tableName, String[] idx_fields)
      throws KettleDatabaseException {

    String tablename =
        database.getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);

    boolean[] exists = new boolean[idx_fields.length];
    for (int i = 0; i < exists.length; i++) {
      exists[i] = false;
    }

    try {
      //
      // Get the info from the data dictionary...
      //
      StringBuffer sql = new StringBuffer(128);
      sql.append("select i.name table_name, c.name column_name ");
      sql.append("from     sysindexes i, sysindexkeys k, syscolumns c ");
      sql.append("where    i.name = '" + tablename + "' ");
      sql.append("AND      i.id = k.id ");
      sql.append("AND      i.id = c.id ");
      sql.append("AND      k.colid = c.colid ");

      ResultSet res = null;
      try {
        res = database.openQuery(sql.toString());
        if (res != null) {
          Object[] row = database.getRow(res);
          while (row != null) {
            String column = database.getReturnRowMeta().getString(row, "column_name", "");
            int idx = Const.indexOfString(column, idx_fields);
            if (idx >= 0) {
              exists[idx] = true;
            }

            row = database.getRow(res);
          }
        } else {
          return false;
        }
      } finally {
        if (res != null) {
          database.closeQuery(res);
        }
      }

      // See if all the fields are indexed...
      boolean all = true;
      for (int i = 0; i < exists.length && all; i++) {
        if (!exists[i]) {
          all = false;
        }
      }

      return all;
    } catch (Exception e) {
      throw new KettleDatabaseException(
          "Unable to determine if indexes exists on table [" + tablename + "]", e);
    }
  }
Exemplo n.º 7
0
  public void getFields(
      RowMetaInterface row,
      String origin,
      RowMetaInterface info[],
      StepMeta nextStep,
      VariableSpace space)
      throws KettleStepException {
    // First load some interesting data...

    // Then see which fields get added to the row.
    //
    TransMeta mappingTransMeta = null;
    try {
      mappingTransMeta = loadMappingMeta(this, repository, space);
    } catch (KettleException e) {
      throw new KettleStepException(
          BaseMessages.getString(PKG, "MappingMeta.Exception.UnableToLoadMappingTransformation"),
          e);
    }

    // The field structure may depend on the input parameters as well (think of parameter
    // replacements in MDX queries for instance)
    if (mappingParameters != null) {

      // See if we need to pass all variables from the parent or not...
      //
      if (mappingParameters.isInheritingAllVariables()) {
        mappingTransMeta.copyVariablesFrom(space);
      }

      // Just set the variables in the transformation statically.
      // This just means: set a number of variables or parameter values:
      //
      List<String> subParams = Arrays.asList(mappingTransMeta.listParameters());

      for (int i = 0; i < mappingParameters.getVariable().length; i++) {
        String name = mappingParameters.getVariable()[i];
        String value = space.environmentSubstitute(mappingParameters.getInputField()[i]);
        if (!Const.isEmpty(name) && !Const.isEmpty(value)) {
          if (subParams.contains(name)) {
            try {
              mappingTransMeta.setParameterValue(name, value);
            } catch (UnknownParamException e) {
              // this is explicitly checked for up front
            }
          }
          mappingTransMeta.setVariable(name, value);
        }
      }
    }

    // Keep track of all the fields that need renaming...
    //
    List<MappingValueRename> inputRenameList = new ArrayList<MappingValueRename>();

    /*
     * Before we ask the mapping outputs anything, we should teach the mapping
     * input steps in the sub-transformation about the data coming in...
     */
    for (MappingIODefinition definition : inputMappings) {

      RowMetaInterface inputRowMeta;

      if (definition.isMainDataPath() || Const.isEmpty(definition.getInputStepname())) {
        // The row metadata, what we pass to the mapping input step
        // definition.getOutputStep(), is "row"
        // However, we do need to re-map some fields...
        //
        inputRowMeta = row.clone();
        if (!inputRowMeta.isEmpty()) {
          for (MappingValueRename valueRename : definition.getValueRenames()) {
            ValueMetaInterface valueMeta =
                inputRowMeta.searchValueMeta(valueRename.getSourceValueName());
            if (valueMeta == null) {
              throw new KettleStepException(
                  BaseMessages.getString(
                      PKG,
                      "MappingMeta.Exception.UnableToFindField",
                      valueRename.getSourceValueName()));
            }
            valueMeta.setName(valueRename.getTargetValueName());
          }
        }
      } else {
        // The row metadata that goes to the info mapping input comes from the
        // specified step
        // In fact, it's one of the info steps that is going to contain this
        // information...
        //
        String[] infoSteps = getInfoSteps();
        int infoStepIndex = Const.indexOfString(definition.getInputStepname(), infoSteps);
        if (infoStepIndex < 0) {
          throw new KettleStepException(
              BaseMessages.getString(
                  PKG,
                  "MappingMeta.Exception.UnableToFindMetadataInfo",
                  definition.getInputStepname()));
        }
        if (info[infoStepIndex] != null) {
          inputRowMeta = info[infoStepIndex].clone();
        } else {
          inputRowMeta = null;
        }
      }

      // What is this mapping input step?
      //
      StepMeta mappingInputStep =
          mappingTransMeta.findMappingInputStep(definition.getOutputStepname());

      // We're certain it's a MappingInput step...
      //
      MappingInputMeta mappingInputMeta =
          (MappingInputMeta) mappingInputStep.getStepMetaInterface();

      // Inform the mapping input step about what it's going to receive...
      //
      mappingInputMeta.setInputRowMeta(inputRowMeta);

      // What values are we changing names for?
      //
      mappingInputMeta.setValueRenames(definition.getValueRenames());

      // Keep a list of the input rename values that need to be changed back at
      // the output
      //
      if (definition.isRenamingOnOutput())
        Mapping.addInputRenames(inputRenameList, definition.getValueRenames());
    }

    // All the mapping steps now know what they will be receiving.
    // That also means that the sub-transformation / mapping has everything it
    // needs.
    // So that means that the MappingOutput steps know exactly what the output
    // is going to be.
    // That could basically be anything.
    // It also could have absolutely no resemblance to what came in on the
    // input.
    // The relative old approach is therefore no longer suited.
    //
    // OK, but what we *can* do is have the MappingOutput step rename the
    // appropriate fields.
    // The mapping step will tell this step how it's done.
    //
    // Let's look for the mapping output step that is relevant for this actual
    // call...
    //
    MappingIODefinition mappingOutputDefinition = null;
    if (nextStep == null) {
      // This is the main step we read from...
      // Look up the main step to write to.
      // This is the output mapping definition with "main path" enabled.
      //
      for (MappingIODefinition definition : outputMappings) {
        if (definition.isMainDataPath() || Const.isEmpty(definition.getOutputStepname())) {
          // This is the definition to use...
          //
          mappingOutputDefinition = definition;
        }
      }
    } else {
      // Is there an output mapping definition for this step?
      // If so, we can look up the Mapping output step to see what has changed.
      //

      for (MappingIODefinition definition : outputMappings) {
        if (nextStep.getName().equals(definition.getOutputStepname())
            || definition.isMainDataPath()
            || Const.isEmpty(definition.getOutputStepname())) {
          mappingOutputDefinition = definition;
        }
      }
    }

    if (mappingOutputDefinition == null) {
      throw new KettleStepException(
          BaseMessages.getString(PKG, "MappingMeta.Exception.UnableToFindMappingDefinition"));
    }

    // OK, now find the mapping output step in the mapping...
    // This method in TransMeta takes into account a number of things, such as
    // the step not specified, etc.
    // The method never returns null but throws an exception.
    //
    StepMeta mappingOutputStep =
        mappingTransMeta.findMappingOutputStep(mappingOutputDefinition.getInputStepname());

    // We know it's a mapping output step...
    MappingOutputMeta mappingOutputMeta =
        (MappingOutputMeta) mappingOutputStep.getStepMetaInterface();

    // Change a few columns.
    mappingOutputMeta.setOutputValueRenames(mappingOutputDefinition.getValueRenames());

    // Perhaps we need to change a few input columns back to the original?
    //
    mappingOutputMeta.setInputValueRenames(inputRenameList);

    // Now we know wat's going to come out of there...
    // This is going to be the full row, including all the remapping, etc.
    //
    RowMetaInterface mappingOutputRowMeta = mappingTransMeta.getStepFields(mappingOutputStep);

    row.clear();
    row.addRowMeta(mappingOutputRowMeta);
  }
  public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    Object[] r = getRow(); // Get row from input rowset & set row busy!
    if (r == null) // no more input to be expected...
    {
      setOutputDone();
      return false;
    }

    if (first) {
      first = false;

      data.outputRowMeta = getInputRowMeta().clone();
      meta.getFields(data.outputRowMeta, getStepname(), null, null, this);

      data.schemaTable =
          meta.getDatabaseMeta()
              .getQuotedSchemaTableCombination(data.realSchemaName, data.realTableName);

      determineTechKeyCreation();

      // The indexes of the key values...
      //
      data.keynrs = new int[meta.getKeyField().length];

      for (int i = 0; i < meta.getKeyField().length; i++) {
        data.keynrs[i] = getInputRowMeta().indexOfValue(meta.getKeyField()[i]);
        if (data.keynrs[i] < 0) // couldn't find field!
        {
          throw new KettleStepException(
              BaseMessages.getString(
                  PKG,
                  "CombinationLookup.Exception.FieldNotFound",
                  meta.getKeyField()[i])); // $NON-NLS-1$ //$NON-NLS-2$
        }
      }

      // Determine for each input field if we want it removed or not.
      //
      data.removeField = new boolean[getInputRowMeta().size()];

      // Sort lookup values keys so that we
      //
      for (int i = 0; i < getInputRowMeta().size(); i++) {
        ValueMetaInterface valueMeta = getInputRowMeta().getValueMeta(i);
        // Is this one of the keys?
        int idx = Const.indexOfString(valueMeta.getName(), meta.getKeyField());
        data.removeField[i] = idx >= 0;
      }

      // Determine the metadata row to calculate hashcodes.
      //
      data.hashRowMeta = new RowMeta();
      for (int i = 0; i < meta.getKeyField().length; i++) {
        data.hashRowMeta.addValueMeta(getInputRowMeta().getValueMeta(data.keynrs[i])); // KEYi = ?
      }

      setCombiLookup(getInputRowMeta());
    }

    try {
      Object[] outputRow =
          lookupValues(getInputRowMeta(), r); // add new values to the row in rowset[0].
      putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s);

      if (checkFeedback(getLinesRead())) {
        if (log.isBasic())
          logBasic(
              BaseMessages.getString(PKG, "CombinationLookup.Log.LineNumber")
                  + getLinesRead()); //$NON-NLS-1$
      }
    } catch (KettleException e) {
      if (getStepMeta().isDoingErrorHandling()) {
        putError(getInputRowMeta(), r, 1L, Const.getStackTracker(e), null, "CBL001");
      } else {
        logError(
            BaseMessages.getString(PKG, "CombinationLookup.Log.ErrorInStepRunning")
                + e.getMessage()); // $NON-NLS-1$
        setErrors(1);
        stopAll();
        setOutputDone(); // signal end to receiver(s)
        return false;
      }
    }

    return true;
  }
Exemplo n.º 9
0
  @Override
  public String encode(AbstractMeta meta) throws Exception {
    TransMeta transMeta = (TransMeta) meta;

    mxGraph graph = new mxGraph();
    graph.getModel().beginUpdate();
    mxCell parent = (mxCell) graph.getDefaultParent();
    Document doc = mxUtils.createDocument();

    try {
      Element e = super.encodeCommRootAttr(transMeta, doc);
      e.setAttribute("trans_version", transMeta.getTransversion());
      e.setAttribute("trans_type", transMeta.getTransformationType().getCode());
      e.setAttribute("trans_status", String.valueOf(transMeta.getTransstatus()));

      // variables
      Properties sp = new Properties();
      JSONArray jsonArray = new JSONArray();

      String[] keys = Variables.getADefaultVariableSpace().listVariables();
      for (int i = 0; i < keys.length; i++) {
        sp.put(keys[i], Variables.getADefaultVariableSpace().getVariable(keys[i]));
      }

      List<String> vars = transMeta.getUsedVariables();
      for (int i = 0; i < vars.size(); i++) {
        String varname = vars.get(i);
        if (!varname.startsWith(Const.INTERNAL_VARIABLE_PREFIX)
            && Const.indexOfString(varname, transMeta.listParameters()) < 0) {
          JSONObject param = new JSONObject();
          param.put("var_name", varname);
          param.put("var_value", sp.getProperty(varname, ""));
          jsonArray.add(param);
        }
      }

      for (String varname : Const.INTERNAL_JOB_VARIABLES) {
        String value = transMeta.getVariable(varname);
        if (!Const.isEmpty(value)) {

          JSONObject param = new JSONObject();
          param.put("var_name", varname);
          param.put("var_value", value);
          jsonArray.add(param);
        }
      }
      e.setAttribute("variables", jsonArray.toString());

      TransLogTable transLogTable = transMeta.getTransLogTable();
      JSONObject jsonObject = new JSONObject();
      jsonObject.put("connection", transLogTable.getConnectionName());
      jsonObject.put("schema", transLogTable.getSchemaName());
      jsonObject.put("table", transLogTable.getTableName());
      jsonObject.put("size_limit_lines", transLogTable.getLogSizeLimit());
      jsonObject.put("interval", transLogTable.getLogInterval());
      jsonObject.put("timeout_days", transLogTable.getTimeoutInDays());
      JSONArray fields = new JSONArray();
      for (LogTableField field : transLogTable.getFields()) {
        JSONObject jsonField = new JSONObject();
        jsonField.put("id", field.getId());
        jsonField.put("enabled", field.isEnabled());
        jsonField.put("name", field.getFieldName());
        jsonField.put("subjectAllowed", field.isSubjectAllowed());
        if (field.isSubjectAllowed()) {
          jsonField.put("subject", field.getSubject() == null ? "" : field.getSubject().toString());
        } else {
          jsonField.put("subject", "-");
        }
        jsonField.put("description", StringEscapeHelper.encode(field.getDescription()));
        fields.add(jsonField);
      }
      jsonObject.put("fields", fields);
      e.setAttribute("transLogTable", jsonObject.toString());

      StepLogTable stepLogTable = transMeta.getStepLogTable();
      jsonObject = new JSONObject();
      jsonObject.put("connection", stepLogTable.getConnectionName());
      jsonObject.put("schema", stepLogTable.getSchemaName());
      jsonObject.put("table", stepLogTable.getTableName());
      jsonObject.put("timeout_days", stepLogTable.getTimeoutInDays());
      fields = new JSONArray();
      for (LogTableField field : stepLogTable.getFields()) {
        JSONObject jsonField = new JSONObject();
        jsonField.put("id", field.getId());
        jsonField.put("enabled", field.isEnabled());
        jsonField.put("name", field.getFieldName());
        jsonField.put("description", StringEscapeHelper.encode(field.getDescription()));
        fields.add(jsonField);
      }
      jsonObject.put("fields", fields);
      e.setAttribute("stepLogTable", jsonObject.toString());

      PerformanceLogTable performanceLogTable = transMeta.getPerformanceLogTable();
      jsonObject = new JSONObject();
      jsonObject.put("connection", performanceLogTable.getConnectionName());
      jsonObject.put("schema", performanceLogTable.getSchemaName());
      jsonObject.put("table", performanceLogTable.getTableName());
      jsonObject.put("interval", performanceLogTable.getLogInterval());
      jsonObject.put("timeout_days", performanceLogTable.getTimeoutInDays());
      fields = new JSONArray();
      for (LogTableField field : performanceLogTable.getFields()) {
        JSONObject jsonField = new JSONObject();
        jsonField.put("id", field.getId());
        jsonField.put("enabled", field.isEnabled());
        jsonField.put("name", field.getFieldName());
        jsonField.put("description", StringEscapeHelper.encode(field.getDescription()));
        fields.add(jsonField);
      }
      jsonObject.put("fields", fields);
      e.setAttribute("performanceLogTable", jsonObject.toString());

      MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable();
      jsonObject = new JSONObject();
      jsonObject.put("connection", metricsLogTable.getConnectionName());
      jsonObject.put("schema", metricsLogTable.getSchemaName());
      jsonObject.put("table", metricsLogTable.getTableName());
      jsonObject.put("timeout_days", metricsLogTable.getTimeoutInDays());
      fields = new JSONArray();
      for (LogTableField field : metricsLogTable.getFields()) {
        JSONObject jsonField = new JSONObject();
        jsonField.put("id", field.getId());
        jsonField.put("enabled", field.isEnabled());
        jsonField.put("name", field.getFieldName());
        jsonField.put("description", StringEscapeHelper.encode(field.getDescription()));
        fields.add(jsonField);
      }
      jsonObject.put("fields", fields);
      e.setAttribute("metricsLogTable", jsonObject.toString());

      jsonObject = new JSONObject();
      jsonObject.put(
          "connection",
          transMeta.getMaxDateConnection() == null
              ? ""
              : transMeta.getMaxDateConnection().getName());
      jsonObject.put("table", transMeta.getMaxDateTable());
      jsonObject.put("field", transMeta.getMaxDateField());
      jsonObject.put("offset", transMeta.getMaxDateOffset());
      jsonObject.put("maxdiff", transMeta.getMaxDateDifference());
      e.setAttribute("maxdate", jsonObject.toString());

      e.setAttribute("size_rowset", String.valueOf(transMeta.getSizeRowset()));
      e.setAttribute("sleep_time_empty", String.valueOf(transMeta.getSleepTimeEmpty()));
      e.setAttribute("sleep_time_full", String.valueOf(transMeta.getSleepTimeFull()));
      e.setAttribute("unique_connections", transMeta.isUsingUniqueConnections() ? "Y" : "N");
      e.setAttribute("feedback_shown", transMeta.isFeedbackShown() ? "Y" : "N");
      e.setAttribute("feedback_size", String.valueOf(transMeta.getFeedbackSize()));
      e.setAttribute(
          "using_thread_priorities", transMeta.isUsingThreadPriorityManagment() ? "Y" : "N");
      e.setAttribute(
          "capture_step_performance", transMeta.isCapturingStepPerformanceSnapShots() ? "Y" : "N");
      e.setAttribute(
          "step_performance_capturing_delay",
          String.valueOf(transMeta.getStepPerformanceCapturingDelay()));
      e.setAttribute(
          "step_performance_capturing_size_limit",
          transMeta.getStepPerformanceCapturingSizeLimit());

      super.encodeSlaveServers(e, transMeta);
      encodeClusterSchema(e, transMeta);
      encodePartitionSchema(e, transMeta);

      try {
        if (transMeta.getKey() != null) {
          e.setAttribute("key_for_session_key", XMLHandler.encodeBinaryData(transMeta.getKey()));
        } else {
          e.setAttribute("key_for_session_key", "");
        }
      } catch (Exception e1) {
        e1.printStackTrace();
        e.setAttribute("key_for_session_key", "");
      }
      e.setAttribute("is_key_private", transMeta.isPrivateKey() ? "Y" : "N");

      super.encodeNote(doc, graph, transMeta);

      super.encodeDatabases(e, transMeta);
      parent.setValue(e);

      // encode steps and hops
      HashMap<StepMeta, Object> cells = new HashMap<StepMeta, Object>();
      List<StepMeta> list = transMeta.getSteps();
      for (int i = 0; i < list.size(); i++) {
        StepMeta step = (StepMeta) list.get(i);
        Point p = step.getLocation();
        StepEncoder stepEncoder = (StepEncoder) PluginFactory.getBean(step.getStepID());

        PluginInterface plugin =
            PluginRegistry.getInstance().getPlugin(StepPluginType.class, step.getStepID());
        Object cell =
            graph.insertVertex(
                parent,
                null,
                stepEncoder.encodeStep(step),
                p.x,
                p.y,
                40,
                40,
                "icon;image=" + SvgImageUrl.getUrl(plugin));
        cells.put(step, cell);
      }

      for (int i = 0; i < transMeta.nrTransHops(); i++) {
        TransHopMeta transHopMeta = transMeta.getTransHop(i);

        Object v1 = cells.get(transHopMeta.getFromStep());
        Object v2 = cells.get(transHopMeta.getToStep());

        graph.insertEdge(parent, null, TransHopMetaCodec.encode(transHopMeta), v1, v2);
      }
    } finally {
      graph.getModel().endUpdate();
    }

    mxCodec codec = new mxCodec();
    return mxUtils.getPrettyXml(codec.encode(graph.getModel()));
  }
  protected boolean importJob(Node jobnode, RepositoryImportFeedbackInterface feedback)
      throws KettleException {
    // Load the job from the XML node.
    //
    JobMeta jobMeta = createJobMetaForNode(jobnode);
    feedback.setLabel(
        BaseMessages.getString(
            PKG,
            "RepositoryImporter.ImportJob.Label",
            Integer.toString(jobNumber),
            jobMeta.getName()));
    validateImportedElement(importRules, jobMeta);

    // What's the directory path?
    String directoryPath =
        Const.NVL(XMLHandler.getTagValue(jobnode, "directory"), Const.FILE_SEPARATOR);

    if (jobDirOverride != null) {
      directoryPath = jobDirOverride;
    }

    if (directoryPath.startsWith("/")) {
      // remove the leading root, we don't need it.
      directoryPath = directoryPath.substring(1);
    }

    // If we have a set of source directories to limit ourselves to, consider this.
    //
    if (limitDirs.size() > 0 && Const.indexOfString(directoryPath, limitDirs) < 0) {
      // Not in the limiting set of source directories, skip the import of this transformation...
      //
      feedback.addLog(
          BaseMessages.getString(
              PKG,
              "RepositoryImporter.SkippedJobNotPartOfLimitingDirectories.Log",
              jobMeta.getName()));
      return true;
    }

    RepositoryDirectoryInterface targetDirectory =
        getTargetDirectory(directoryPath, jobDirOverride, feedback);

    // OK, we loaded the job from XML and all went well...
    // See if the job already exists!
    ObjectId existintId = rep.getJobId(jobMeta.getName(), targetDirectory);
    if (existintId != null && askOverwrite) {
      overwrite = feedback.jobOverwritePrompt(jobMeta);
      askOverwrite = feedback.isAskingOverwriteConfirmation();
    } else {
      updateDisplay();
    }

    if (existintId == null || overwrite) {
      replaceSharedObjects(jobMeta);
      jobMeta.setRepositoryDirectory(targetDirectory);
      jobMeta.setObjectId(existintId);
      patchJobEntries(jobMeta);
      try {
        saveJobMeta(jobMeta);

        if (jobMeta.hasRepositoryReferences()) {
          referencingObjects.add(
              new RepositoryObject(
                  jobMeta.getObjectId(),
                  jobMeta.getName(),
                  jobMeta.getRepositoryDirectory(),
                  null,
                  null,
                  RepositoryObjectType.JOB,
                  null,
                  false));
        }

        feedback.addLog(
            BaseMessages.getString(
                PKG,
                "RepositoryImporter.JobSaved.Log",
                Integer.toString(jobNumber),
                jobMeta.getName()));
      } catch (Exception e) {
        feedback.addLog(
            BaseMessages.getString(
                PKG,
                "RepositoryImporter.ErrorSavingJob.Log",
                Integer.toString(jobNumber),
                jobMeta.getName(),
                Const.getStackTracker(e)));

        if (!feedback.askContinueOnErrorQuestion(
            BaseMessages.getString(PKG, "RepositoryImporter.DoYouWantToContinue.Title"),
            BaseMessages.getString(PKG, "RepositoryImporter.DoYouWantToContinue.Message"))) {
          return false;
        }
      }
    } else {
      feedback.addLog(
          BaseMessages.getString(
              PKG, "RepositoryImporter.SkippedExistingJob.Log", jobMeta.getName()));
    }
    return true;
  }
  /**
   * @param transnode The XML DOM node to read the transformation from
   * @return false if the import should be canceled.
   * @throws KettleException in case there is an unexpected error
   */
  protected boolean importTransformation(Node transnode, RepositoryImportFeedbackInterface feedback)
      throws KettleException {
    //
    // Load transformation from XML into a directory, possibly created!
    //
    TransMeta transMeta = createTransMetaForNode(transnode); // ignore shared objects
    feedback.setLabel(
        BaseMessages.getString(
            PKG,
            "RepositoryImporter.ImportTrans.Label",
            Integer.toString(transformationNumber),
            transMeta.getName()));

    validateImportedElement(importRules, transMeta);

    // What's the directory path?
    String directoryPath =
        Const.NVL(XMLHandler.getTagValue(transnode, "info", "directory"), Const.FILE_SEPARATOR);
    if (transDirOverride != null) {
      directoryPath = transDirOverride;
    }

    if (directoryPath.startsWith("/")) {
      // remove the leading root, we don't need it.
      directoryPath = directoryPath.substring(1);
    }

    // If we have a set of source directories to limit ourselves to, consider this.
    //
    if (limitDirs.size() > 0 && Const.indexOfString(directoryPath, limitDirs) < 0) {
      // Not in the limiting set of source directories, skip the import of this transformation...
      //
      feedback.addLog(
          BaseMessages.getString(
              PKG,
              "RepositoryImporter.SkippedTransformationNotPartOfLimitingDirectories.Log",
              transMeta.getName()));
      return true;
    }

    RepositoryDirectoryInterface targetDirectory =
        getTargetDirectory(directoryPath, transDirOverride, feedback);

    // OK, we loaded the transformation from XML and all went well...
    // See if the transformation already existed!
    ObjectId existingId = rep.getTransformationID(transMeta.getName(), targetDirectory);
    if (existingId != null && askOverwrite) {
      overwrite = feedback.transOverwritePrompt(transMeta);
      askOverwrite = feedback.isAskingOverwriteConfirmation();
    } else {
      updateDisplay();
    }

    if (existingId == null || overwrite) {
      replaceSharedObjects(transMeta);
      transMeta.setObjectId(existingId);
      transMeta.setRepositoryDirectory(targetDirectory);
      patchMappingSteps(transMeta);

      try {
        // Keep info on who & when this transformation was created...
        if (transMeta.getCreatedUser() == null || transMeta.getCreatedUser().equals("-")) {
          transMeta.setCreatedDate(new Date());
          if (rep.getUserInfo() != null) {
            transMeta.setCreatedUser(rep.getUserInfo().getLogin());
          } else {
            transMeta.setCreatedUser(null);
          }
        }
        saveTransMeta(transMeta);
        feedback.addLog(
            BaseMessages.getString(
                PKG,
                "RepositoryImporter.TransSaved.Log",
                Integer.toString(transformationNumber),
                transMeta.getName()));

        if (transMeta.hasRepositoryReferences()) {
          referencingObjects.add(
              new RepositoryObject(
                  transMeta.getObjectId(),
                  transMeta.getName(),
                  transMeta.getRepositoryDirectory(),
                  null,
                  null,
                  RepositoryObjectType.TRANSFORMATION,
                  null,
                  false));
        }

      } catch (Exception e) {
        feedback.addLog(
            BaseMessages.getString(
                PKG,
                "RepositoryImporter.ErrorSavingTrans.Log",
                Integer.toString(transformationNumber),
                transMeta.getName(),
                Const.getStackTracker(e)));

        if (!feedback.askContinueOnErrorQuestion(
            BaseMessages.getString(PKG, "RepositoryImporter.DoYouWantToContinue.Title"),
            BaseMessages.getString(PKG, "RepositoryImporter.DoYouWantToContinue.Message"))) {
          return false;
        }
      }
    } else {
      feedback.addLog(
          BaseMessages.getString(
              PKG, "RepositoryImporter.SkippedExistingTransformation.Log", transMeta.getName()));
    }
    return true;
  }