private void saveJobToRepo(JobMeta jobMeta, RepositoryImportFeedbackInterface feedback)
     throws KettleException {
   try {
     jobMeta.lookupRepositoryReferences(rep);
   } catch (LookupReferencesException e) {
     // log and continue; might fail from exports performed before PDI-5294
     feedback.addLog(
         BaseMessages.getString(
             PKG,
             "RepositoryImporter.LookupRepoRefsError.Log",
             jobMeta.getName(),
             RepositoryObjectType.JOB));
     feedback.addLog(
         BaseMessages.getString(
             PKG,
             "RepositoryImporter.LookupRepoRefsError.Log.Cause",
             e.objectTypePairsToString()));
   }
   rep.save(jobMeta, "import object reference specification", null);
 }
  @Override
  protected void customAnalyze(JobExecutorMeta meta, IMetaverseNode node)
      throws MetaverseAnalyzerException {

    String jobPath = meta.getFileName();
    JobMeta subJobMeta = null;
    Repository repo = parentTransMeta.getRepository();

    switch (meta.getSpecificationMethod()) {
      case FILENAME:
        jobPath = parentTransMeta.environmentSubstitute(meta.getFileName());
        try {
          String normalized = KettleAnalyzerUtil.normalizeFilePath(jobPath);

          subJobMeta = getSubJobMeta(parentTransMeta, normalized);
          jobPath = normalized;

        } catch (Exception e) {
          throw new MetaverseAnalyzerException(
              "Sub transformation can not be found - " + jobPath, e);
        }
        break;
      case REPOSITORY_BY_NAME:
        if (repo != null) {
          String dir = parentTransMeta.environmentSubstitute(meta.getDirectoryPath());
          String file = parentTransMeta.environmentSubstitute(meta.getJobName());
          try {
            RepositoryDirectoryInterface rdi = repo.findDirectory(dir);
            subJobMeta = repo.loadJob(file, rdi, null, null);
            String filename =
                subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
            jobPath = filename + "." + subJobMeta.getDefaultExtension();
          } catch (KettleException e) {
            throw new MetaverseAnalyzerException(
                "Sub transformation can not be found in repository - " + file, e);
          }
        } else {
          throw new MetaverseAnalyzerException(
              "Not connected to a repository, can't get the transformation");
        }
        break;
      case REPOSITORY_BY_REFERENCE:
        if (repo != null) {
          try {
            subJobMeta = repo.loadJob(meta.getJobObjectId(), null);
            String filename =
                subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
            jobPath = filename + "." + subJobMeta.getDefaultExtension();
          } catch (KettleException e) {
            throw new MetaverseAnalyzerException(
                "Sub transformation can not be found by reference - " + meta.getJobObjectId(), e);
          }
        } else {
          throw new MetaverseAnalyzerException(
              "Not connected to a repository, can't get the transformation");
        }
        break;
    }

    // analyze the sub trans?

    IComponentDescriptor ds =
        new MetaverseComponentDescriptor(
            subJobMeta.getName(),
            DictionaryConst.NODE_TYPE_JOB,
            descriptor.getNamespace().getParentNamespace());

    IMetaverseNode jobNode = createNodeFromDescriptor(ds);
    jobNode.setProperty(DictionaryConst.PROPERTY_NAMESPACE, ds.getNamespaceId());
    jobNode.setProperty(DictionaryConst.PROPERTY_PATH, jobPath);
    jobNode.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);

    metaverseBuilder.addLink(node, DictionaryConst.LINK_EXECUTES, jobNode);

    connectToSubJobOutputFields(meta, subJobMeta, jobNode, descriptor);

    node.setProperty(JOB_TO_EXECUTE, jobPath);

    if (StringUtils.isNotEmpty(meta.getExecutionResultTargetStep())) {
      node.setProperty(EXECUTION_RESULTS_TARGET, meta.getExecutionResultTargetStep());
    }

    /* TODO remove? if ( StringUtils.isNotEmpty( meta.getOutputRowsSourceStep() ) ) {
      node.setProperty( OUTPUT_ROWS_TARGET, meta.getOutputRowsSourceStep() );
    }*/

    if (StringUtils.isNotEmpty(meta.getResultFilesTargetStep())) {
      node.setProperty(RESULT_FILES_TARGET, meta.getResultFilesTargetStep());
    }
  }
  protected boolean importJob(Node jobnode, RepositoryImportFeedbackInterface feedback)
      throws KettleException {
    // Load the job from the XML node.
    //
    JobMeta jobMeta = createJobMetaForNode(jobnode);
    feedback.setLabel(
        BaseMessages.getString(
            PKG,
            "RepositoryImporter.ImportJob.Label",
            Integer.toString(jobNumber),
            jobMeta.getName()));
    validateImportedElement(importRules, jobMeta);

    // What's the directory path?
    String directoryPath =
        Const.NVL(XMLHandler.getTagValue(jobnode, "directory"), Const.FILE_SEPARATOR);

    if (jobDirOverride != null) {
      directoryPath = jobDirOverride;
    }

    if (directoryPath.startsWith("/")) {
      // remove the leading root, we don't need it.
      directoryPath = directoryPath.substring(1);
    }

    // If we have a set of source directories to limit ourselves to, consider this.
    //
    if (limitDirs.size() > 0 && Const.indexOfString(directoryPath, limitDirs) < 0) {
      // Not in the limiting set of source directories, skip the import of this transformation...
      //
      feedback.addLog(
          BaseMessages.getString(
              PKG,
              "RepositoryImporter.SkippedJobNotPartOfLimitingDirectories.Log",
              jobMeta.getName()));
      return true;
    }

    RepositoryDirectoryInterface targetDirectory =
        getTargetDirectory(directoryPath, jobDirOverride, feedback);

    // OK, we loaded the job from XML and all went well...
    // See if the job already exists!
    ObjectId existintId = rep.getJobId(jobMeta.getName(), targetDirectory);
    if (existintId != null && askOverwrite) {
      overwrite = feedback.jobOverwritePrompt(jobMeta);
      askOverwrite = feedback.isAskingOverwriteConfirmation();
    } else {
      updateDisplay();
    }

    if (existintId == null || overwrite) {
      replaceSharedObjects(jobMeta);
      jobMeta.setRepositoryDirectory(targetDirectory);
      jobMeta.setObjectId(existintId);
      patchJobEntries(jobMeta);
      try {
        saveJobMeta(jobMeta);

        if (jobMeta.hasRepositoryReferences()) {
          referencingObjects.add(
              new RepositoryObject(
                  jobMeta.getObjectId(),
                  jobMeta.getName(),
                  jobMeta.getRepositoryDirectory(),
                  null,
                  null,
                  RepositoryObjectType.JOB,
                  null,
                  false));
        }

        feedback.addLog(
            BaseMessages.getString(
                PKG,
                "RepositoryImporter.JobSaved.Log",
                Integer.toString(jobNumber),
                jobMeta.getName()));
      } catch (Exception e) {
        feedback.addLog(
            BaseMessages.getString(
                PKG,
                "RepositoryImporter.ErrorSavingJob.Log",
                Integer.toString(jobNumber),
                jobMeta.getName(),
                Const.getStackTracker(e)));

        if (!feedback.askContinueOnErrorQuestion(
            BaseMessages.getString(PKG, "RepositoryImporter.DoYouWantToContinue.Title"),
            BaseMessages.getString(PKG, "RepositoryImporter.DoYouWantToContinue.Message"))) {
          return false;
        }
      }
    } else {
      feedback.addLog(
          BaseMessages.getString(
              PKG, "RepositoryImporter.SkippedExistingJob.Log", jobMeta.getName()));
    }
    return true;
  }