예제 #1
0
  public void analyseImpact(
      List<DatabaseImpact> impact,
      TransMeta transMeta,
      StepMeta stepMeta,
      RowMetaInterface prev,
      String[] input,
      String[] output,
      RowMetaInterface info,
      Repository repository,
      IMetaStore metaStore)
      throws KettleStepException {
    if (prev != null) {
      // Lookup: we do a lookup on the natural keys
      for (int i = 0; i < keyLookup.length; i++) {
        ValueMetaInterface v = prev.searchValueMeta(keyStream[i]);

        DatabaseImpact ii =
            new DatabaseImpact(
                DatabaseImpact.TYPE_IMPACT_READ,
                transMeta.getName(),
                stepMeta.getName(),
                databaseMeta.getDatabaseName(),
                tableName,
                keyLookup[i],
                keyStream[i],
                v != null ? v.getOrigin() : "?",
                "",
                "Type = " + v.toStringMeta());
        impact.add(ii);
      }

      // Insert update fields : read/write
      for (int i = 0; i < updateLookup.length; i++) {
        ValueMetaInterface v = prev.searchValueMeta(updateStream[i]);

        DatabaseImpact ii =
            new DatabaseImpact(
                DatabaseImpact.TYPE_IMPACT_READ_WRITE,
                transMeta.getName(),
                stepMeta.getName(),
                databaseMeta.getDatabaseName(),
                tableName,
                updateLookup[i],
                updateStream[i],
                v != null ? v.getOrigin() : "?",
                "",
                "Type = " + v.toStringMeta());
        impact.add(ii);
      }
    }
  }
  public void analyseImpact(
      List<DatabaseImpact> impact,
      TransMeta transMeta,
      StepMeta stepMeta,
      RowMetaInterface prev,
      String input[],
      String output[],
      RowMetaInterface info) {
    // The keys are read-only...
    for (int i = 0; i < keyField.length; i++) {
      ValueMetaInterface v = prev.searchValueMeta(keyField[i]);
      DatabaseImpact ii =
          new DatabaseImpact(
              DatabaseImpact.TYPE_IMPACT_READ_WRITE,
              transMeta.getName(),
              stepMeta.getName(),
              databaseWriteMeta.getDatabaseName(),
              tablename,
              keyLookup[i],
              keyField[i],
              v != null ? v.getOrigin() : "?", // $NON-NLS-1$
              "", //$NON-NLS-1$
              useHash
                  ? Messages.getString("ConcurrentCombinationLookupMeta.ReadAndInsert.Label")
                  : Messages.getString(
                      "ConcurrentCombinationLookupMeta.LookupAndInsert.Label") //$NON-NLS-1$
              // //$NON-NLS-2$
              );
      impact.add(ii);
    }

    // Do we lookup-on the hash-field?
    if (useHash) {
      DatabaseImpact ii =
          new DatabaseImpact(
              DatabaseImpact.TYPE_IMPACT_READ_WRITE,
              transMeta.getName(),
              stepMeta.getName(),
              databaseWriteMeta.getDatabaseName(),
              tablename,
              hashField,
              "", //$NON-NLS-1$
              "", //$NON-NLS-1$
              "", //$NON-NLS-1$
              Messages.getString("ConcurrentCombinationLookupMeta.KeyLookup.Label") // $NON-NLS-1$
              );
      impact.add(ii);
    }
  }
  public void analyseImpact(
      List<DatabaseImpact> impact,
      TransMeta transMeta,
      StepMeta stepMeta,
      RowMetaInterface prev,
      String input[],
      String output[],
      RowMetaInterface info)
      throws KettleStepException {
    if (prev != null) {
      /* DEBUG CHECK THIS */
      // Insert dateMask fields : read/write
      for (int i = 0; i < fieldTable.length; i++) {
        ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]);

        DatabaseImpact ii =
            new DatabaseImpact(
                DatabaseImpact.TYPE_IMPACT_READ_WRITE,
                transMeta.getName(),
                stepMeta.getName(),
                databaseMeta.getDatabaseName(),
                transMeta.environmentSubstitute(tableName),
                fieldTable[i],
                fieldStream[i],
                v != null ? v.getOrigin() : "?",
                "",
                "Type = " + v.toStringMeta()); // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
        impact.add(ii);
      }
    }
  }
예제 #4
0
  public void analyseImpact(
      List<DatabaseImpact> impact,
      TransMeta transMeta,
      StepMeta stepMeta,
      RowMetaInterface prev,
      String[] input,
      String[] output,
      RowMetaInterface info)
      throws KettleStepException {
    if (prev != null) {
      // Lookup: we do a lookup on the natural keys
      for (int i = 0; i < keyLookup.length; i++) {
        ValueMetaInterface v = prev.searchValueMeta(keyStream[i]);

        DatabaseImpact ii =
            new DatabaseImpact(
                DatabaseImpact.TYPE_IMPACT_DELETE,
                transMeta.getName(),
                stepMeta.getName(),
                databaseMeta.getDatabaseName(),
                tableName,
                keyLookup[i],
                keyStream[i],
                v != null ? v.getOrigin() : "?",
                "",
                "Type = " + v.toStringMeta()); // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
        impact.add(ii);
      }
    }
  }
  public void analyseImpact(
      List<DatabaseImpact> impact,
      TransMeta transMeta,
      StepMeta stepinfo,
      RowMetaInterface prev,
      String input[],
      String output[],
      RowMetaInterface info) {
    // The keys are read-only...
    for (int i = 0; i < streamKeyField1.length; i++) {
      ValueMetaInterface v = prev.searchValueMeta(streamKeyField1[i]);
      DatabaseImpact ii =
          new DatabaseImpact(
              DatabaseImpact.TYPE_IMPACT_READ,
              transMeta.getName(),
              stepinfo.getName(),
              databaseMeta.getDatabaseName(),
              tablename,
              tableKeyField[i],
              streamKeyField1[i],
              v != null ? v.getOrigin() : "?", // $NON-NLS-1$
              "", //$NON-NLS-1$
              Messages.getString("DatabaseLookupMeta.Impact.Key") // $NON-NLS-1$
              );
      impact.add(ii);
    }

    // The Return fields are read-only too...
    for (int i = 0; i < returnValueField.length; i++) {
      DatabaseImpact ii =
          new DatabaseImpact(
              DatabaseImpact.TYPE_IMPACT_READ,
              transMeta.getName(),
              stepinfo.getName(),
              databaseMeta.getDatabaseName(),
              tablename,
              returnValueField[i],
              "", //$NON-NLS-1$
              "", //$NON-NLS-1$
              "", //$NON-NLS-1$
              Messages.getString("DatabaseLookupMeta.Impact.ReturnValue") // $NON-NLS-1$
              );
      impact.add(ii);
    }
  }
 public List<ResourceReference> getResourceDependencies(JobMeta jobMeta) {
   List<ResourceReference> references = super.getResourceDependencies(jobMeta);
   if (connection != null) {
     ResourceReference reference = new ResourceReference(this);
     reference.getEntries().add(new ResourceEntry(connection.getHostname(), ResourceType.SERVER));
     reference
         .getEntries()
         .add(new ResourceEntry(connection.getDatabaseName(), ResourceType.DATABASENAME));
     references.add(reference);
   }
   return references;
 }
 /** Adapted from KettleDatabaseRepositoryDatabaseDelegate.saveDatabaseMeta */
 protected boolean equals(DatabaseMeta databaseMeta, DatabaseMeta databaseMeta2) {
   if (!equals(databaseMeta.getName(), databaseMeta2.getName())) {
     return false;
   } else if (!equals(databaseMeta.getPluginId(), databaseMeta2.getPluginId())) {
     return false;
   } else if (!equals(databaseMeta.getAccessType(), databaseMeta2.getAccessType())) {
     return false;
   } else if (!equals(databaseMeta.getHostname(), databaseMeta2.getHostname())) {
     return false;
   } else if (!equals(databaseMeta.getDatabaseName(), databaseMeta2.getDatabaseName())) {
     return false;
   } else if (!equals(
       databaseMeta.getDatabasePortNumberString(), databaseMeta2.getDatabasePortNumberString())) {
     return false;
   } else if (!equals(databaseMeta.getUsername(), databaseMeta2.getUsername())) {
     return false;
   } else if (!equals(databaseMeta.getPassword(), databaseMeta2.getPassword())) {
     return false;
   } else if (!equals(databaseMeta.getServername(), databaseMeta2.getServername())) {
     return false;
   } else if (!equals(databaseMeta.getDataTablespace(), databaseMeta2.getDataTablespace())) {
     return false;
   } else if (!equals(databaseMeta.getIndexTablespace(), databaseMeta2.getIndexTablespace())) {
     return false;
   }
   Map<Object, Object> databaseMeta2Attributes =
       new HashMap<Object, Object>(databaseMeta2.getAttributes());
   for (Entry<Object, Object> databaseMetaEntry :
       new HashMap<Object, Object>(databaseMeta.getAttributes()).entrySet()) {
     Object value = databaseMeta2Attributes.remove(databaseMetaEntry.getKey());
     if (!equals(value, databaseMetaEntry.getValue())) {
       return false;
     }
   }
   if (databaseMeta2Attributes.size() > 0) {
     return false;
   }
   return true;
 }
예제 #8
0
  /**
   * Get the contents of the control file as specified in the meta object
   *
   * @param meta the meta object to model the control file after
   * @return a string containing the control file contents
   */
  public String getControlFileContents(GPLoadMeta meta, RowMetaInterface rm, Object[] r)
      throws KettleException {
    DatabaseMeta dm = meta.getDatabaseMeta();

    StringBuffer contents = new StringBuffer(500);

    // Source: GP Admin Guide 3.3.6, page 635:
    //
    contents.append("VERSION: 1.0.0.1").append(Const.CR);
    contents
        .append("DATABASE: ")
        .append(environmentSubstitute(dm.getDatabaseName()))
        .append(Const.CR);
    contents.append("USER: "******"HOST: ").append(environmentSubstitute(dm.getHostname())).append(Const.CR);
    contents
        .append("PORT: ")
        .append(environmentSubstitute(dm.getDatabasePortNumberString()))
        .append(Const.CR);
    contents.append("GPLOAD:").append(Const.CR);
    contents.append("   INPUT:").append(Const.CR);

    contents.append("    - SOURCE: ").append(Const.CR);

    // TODO: Stream to a temporary file and then bulk load OR optionally stream to a named pipe
    // (like MySQL bulk loader)
    // TODO: allow LOCAL_HOSTNAME/PORT/PORT_RANGE to be specified
    //
    String inputName = "'" + environmentSubstitute(meta.getDataFile()) + "'";
    contents.append("        FILE: ").append('[').append(inputName).append(']').append(Const.CR);

    // COLUMNS is optional, takes the existing fields in the table
    // contents.append("    - COLUMNS:").append(Const.CR);

    // See also page 155 for formatting information & escaping
    //
    contents.append("    - FORMAT: TEXT").append(Const.CR);
    contents
        .append("    - DELIMITER: '")
        .append(environmentSubstitute(meta.getDelimiter()))
        .append("'")
        .append(Const.CR);

    // TODO: implement escape character, null_as
    //
    // contents.append("    - ESCAPE:
    // '").append(environmentSubstitute(meta.getEscapeCharacter)).append("'").append(Const.CR);

    contents
        .append("    - QUOTE: '")
        .append(environmentSubstitute(meta.getEnclosure()))
        .append("'")
        .append(Const.CR);
    contents.append("    - HEADER: FALSE").append(Const.CR);

    // TODO: implement database encoding support
    // contents.append("    - ENCODING: ").append(Const.CR);

    contents.append("    - ERROR_LIMIT: ").append(meta.getMaxErrors()).append(Const.CR);

    if (!Const.isEmpty(meta.getErrorTableName())) {
      contents.append("    - ERROR_TABLE: ").append(meta.getErrorTableName()).append(Const.CR);
    }

    contents.append("   OUTPUT:").append(Const.CR);

    String tableName =
        dm.getQuotedSchemaTableCombination(
            environmentSubstitute(meta.getSchemaName()),
            environmentSubstitute(meta.getTableName()));

    contents.append("    - TABLE: ").append(tableName).append(Const.CR);
    contents.append("    - MODE: ").append(meta.getLoadAction()).append(Const.CR);

    // TODO: add support for MATCH_COLUMNS, UPDATE_COLUMN, UPDATE_CONDITION, MAPPING
    // TODO: add suport for BEFORE and AFTER SQL

    /*
         String streamFields[] = meta.getFieldStream();
    String tableFields[] = meta.getFieldTable();

    if ( streamFields == null || streamFields.length == 0 )
    {
    	throw new KettleException("No fields defined to load to database");
    }

    for (int i = 0; i < streamFields.length; i++)
    {
    	if ( i!=0 ) contents.append(", ");
    	contents.append(dm.quoteField(tableFields[i]));
          }
      */

    return contents.toString();
  }
  /**
   * Saves the database information into a given repository.
   *
   * @param databaseMeta The database metadata object to store
   * @throws KettleException if an error occurs.
   */
  public void saveDatabaseMeta(DatabaseMeta databaseMeta) throws KettleException {
    try {
      // If we don't have an ID, we don't know which entry in the database we need to update.
      // See if a database with the same name is already available...
      if (databaseMeta.getObjectId() == null) {
        databaseMeta.setObjectId(getDatabaseID(databaseMeta.getName()));
      }

      // Still not found? --> Insert
      if (databaseMeta.getObjectId() == null) {
        // Insert new Note in repository
        //
        databaseMeta.setObjectId(
            insertDatabase(
                databaseMeta.getName(),
                databaseMeta.getPluginId(),
                DatabaseMeta.getAccessTypeDesc(databaseMeta.getAccessType()),
                databaseMeta.getHostname(),
                databaseMeta.getDatabaseName(),
                databaseMeta.getDatabasePortNumberString(),
                databaseMeta.getUsername(),
                databaseMeta.getPassword(),
                databaseMeta.getServername(),
                databaseMeta.getDataTablespace(),
                databaseMeta.getIndexTablespace()));
      } else {
        // --> found entry with the same name...

        // Update the note...
        updateDatabase(
            databaseMeta.getObjectId(),
            databaseMeta.getName(),
            databaseMeta.getPluginId(),
            DatabaseMeta.getAccessTypeDesc(databaseMeta.getAccessType()),
            databaseMeta.getHostname(),
            databaseMeta.getDatabaseName(),
            databaseMeta.getDatabasePortNumberString(),
            databaseMeta.getUsername(),
            databaseMeta.getPassword(),
            databaseMeta.getServername(),
            databaseMeta.getDataTablespace(),
            databaseMeta.getIndexTablespace());
      }

      // For the extra attributes, just delete them and re-add them.
      delDatabaseAttributes(databaseMeta.getObjectId());

      // OK, now get a list of all the attributes set on the database connection...
      //
      Properties attributes = databaseMeta.getAttributes();
      Enumeration<Object> keys = databaseMeta.getAttributes().keys();
      while (keys.hasMoreElements()) {
        String code = (String) keys.nextElement();
        String attribute = (String) attributes.get(code);

        // Save this attribute
        //
        insertDatabaseAttribute(databaseMeta.getObjectId(), code, attribute);
      }
    } catch (KettleDatabaseException dbe) {
      throw new KettleException(
          "Error saving database connection or one of its attributes to the repository.", dbe);
    }
  }
예제 #10
0
  /**
   * Create the command line for an sqlldr process depending on the meta information supplied.
   *
   * @param meta The meta data to create the command line from
   * @param password Use the real password or not
   * @return The string to execute.
   * @throws KettleException Upon any exception
   */
  public String createCommandLine(OraBulkLoaderMeta meta, boolean password) throws KettleException {
    StringBuilder sb = new StringBuilder(300);

    if (meta.getSqlldr() != null) {
      try {
        FileObject fileObject =
            KettleVFS.getFileObject(environmentSubstitute(meta.getSqlldr()), getTransMeta());
        String sqlldr = KettleVFS.getFilename(fileObject);
        sb.append(sqlldr);
      } catch (KettleFileException ex) {
        throw new KettleException("Error retrieving sqlldr string", ex);
      }
    } else {
      throw new KettleException("No sqlldr application specified");
    }

    if (meta.getControlFile() != null) {
      try {
        FileObject fileObject =
            KettleVFS.getFileObject(environmentSubstitute(meta.getControlFile()), getTransMeta());

        sb.append(" control=\'");
        sb.append(KettleVFS.getFilename(fileObject));
        sb.append("\'");
      } catch (KettleFileException ex) {
        throw new KettleException("Error retrieving controlfile string", ex);
      }
    } else {
      throw new KettleException("No control file specified");
    }

    if (OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals(meta.getLoadMethod())) {
      sb.append(" data=\'-\'");
    }

    if (meta.getLogFile() != null) {
      try {
        FileObject fileObject =
            KettleVFS.getFileObject(environmentSubstitute(meta.getLogFile()), getTransMeta());

        sb.append(" log=\'");
        sb.append(KettleVFS.getFilename(fileObject));
        sb.append("\'");
      } catch (KettleFileException ex) {
        throw new KettleException("Error retrieving logfile string", ex);
      }
    }

    if (meta.getBadFile() != null) {
      try {
        FileObject fileObject =
            KettleVFS.getFileObject(environmentSubstitute(meta.getBadFile()), getTransMeta());

        sb.append(" bad=\'");
        sb.append(KettleVFS.getFilename(fileObject));
        sb.append("\'");
      } catch (KettleFileException ex) {
        throw new KettleException("Error retrieving badfile string", ex);
      }
    }

    if (meta.getDiscardFile() != null) {
      try {
        FileObject fileObject =
            KettleVFS.getFileObject(environmentSubstitute(meta.getDiscardFile()), getTransMeta());

        sb.append(" discard=\'");
        sb.append(KettleVFS.getFilename(fileObject));
        sb.append("\'");
      } catch (KettleFileException ex) {
        throw new KettleException("Error retrieving discardfile string", ex);
      }
    }

    DatabaseMeta dm = meta.getDatabaseMeta();
    if (dm != null) {
      String user = Const.NVL(dm.getUsername(), "");
      String pass =
          Const.NVL(
              Encr.decryptPasswordOptionallyEncrypted(environmentSubstitute(dm.getPassword())), "");
      if (!password) {
        pass = "******";
      }
      String dns = Const.NVL(dm.getDatabaseName(), "");
      sb.append(" userid=")
          .append(environmentSubstitute(user))
          .append("/")
          .append(environmentSubstitute(pass))
          .append("@");

      String overrideName = meta.getDbNameOverride();
      if (Utils.isEmpty(Const.rtrim(overrideName))) {
        sb.append(environmentSubstitute(dns));
      } else {
        // if the database name override is filled in, do that one.
        sb.append(environmentSubstitute(overrideName));
      }
    } else {
      throw new KettleException("No connection specified");
    }

    if (meta.isDirectPath()) {
      sb.append(" DIRECT=TRUE");

      if (getStepMeta().getCopies() > 1 || meta.isParallel()) {
        sb.append(" PARALLEL=TRUE");
      }
    }

    return sb.toString();
  }
예제 #11
0
  /**
   * Create the command line for a sql process depending on the meta information supplied.
   *
   * @param meta The meta data to create the command line from
   * @return The string to execute.
   * @throws KettleException Upon any exception
   */
  public String createCommandLine(IngresVectorwiseLoaderMeta meta) throws KettleException {
    StringBuffer sb = new StringBuffer(300);

    if (!Const.isEmpty(meta.getSqlPath())) {
      try {
        FileObject fileObject =
            KettleVFS.getFileObject(environmentSubstitute(meta.getSqlPath()), getTransMeta());
        String sqlexec = Const.optionallyQuoteStringByOS(KettleVFS.getFilename(fileObject));
        sb.append(sqlexec);
        // sql @tc-dwh-test.timocom.net,tcp_ip,VW[ingres,pwd]::dwh
      } catch (KettleFileException ex) {
        throw new KettleException("Error retrieving command string", ex);
      }
    } else {
      if (meta.isUsingVwload()) {
        if (isDetailed()) logDetailed("vwload defaults to system path");
        sb.append("vwload");
      } else {
        if (isDetailed()) logDetailed("sql defaults to system path");
        sb.append("sql");
      }
    }

    DatabaseMeta dm = meta.getDatabaseMeta();
    if (dm != null) {
      String databaseName = environmentSubstitute(Const.NVL(dm.getDatabaseName(), ""));
      String password =
          Encr.decryptPasswordOptionallyEncrypted(
              environmentSubstitute(Const.NVL(dm.getDatabaseInterface().getPassword(), "")));
      String port =
          environmentSubstitute(Const.NVL(dm.getDatabasePortNumberString(), "")).replace("7", "");
      String username =
          environmentSubstitute(Const.NVL(dm.getDatabaseInterface().getUsername(), ""));
      String hostname =
          environmentSubstitute(Const.NVL(dm.getDatabaseInterface().getHostname(), ""));
      String schemaTable =
          dm.getQuotedSchemaTableCombination(null, environmentSubstitute(meta.getTablename()));
      String encoding = environmentSubstitute(Const.NVL(meta.getEncoding(), ""));
      String fifoFile =
          Const.optionallyQuoteStringByOS(
              environmentSubstitute(Const.NVL(meta.getFifoFileName(), "")));
      String errorFile =
          Const.optionallyQuoteStringByOS(
              environmentSubstitute(Const.NVL(meta.getErrorFileName(), "")));
      int maxNrErrors =
          Const.toInt(environmentSubstitute(Const.NVL(meta.getMaxNrErrors(), "0")), 0);

      if (meta.isUsingVwload()) {
        sb.append(" -u ").append(username);
        sb.append(" -P ").append(password);
        sb.append(" -f ").append(meta.getDelimiter()).append("");
        sb.append(" -t ").append(schemaTable);

        if (!Const.isEmpty(encoding)) {
          sb.append(" -C ").append(encoding);
        }
        if (!Const.isEmpty(errorFile)) {
          sb.append(" -l ").append(errorFile);
        }
        if (maxNrErrors > 0) {
          sb.append(" -x ").append(maxNrErrors);
        }
        sb.append(" ").append(databaseName);
        sb.append(" ").append(fifoFile);

      } else if (meta.isUseDynamicVNode()) {
        // logical portname in JDBC use a 7

        sb.append(" @")
            .append(hostname)
            .append(",")
            .append(port)
            .append("[")
            .append(username)
            .append(",")
            .append(password)
            .append("]::")
            .append(databaseName);
      } else {
        // Database Name
        //
        sb.append(" ").append(databaseName);
        if (meta.isUseAuthentication()) {
          sb.append("-P").append(password);
        }
      }
    } else {
      throw new KettleException("No connection specified");
    }

    return sb.toString();
  }