Ejemplo n.º 1
0
  private void closeOutput() throws Exception {

    // Flush the rest of the buffer to disk!
    //
    if (data.byteBuffer.position() > 0) {
      data.byteBuffer.flip();
      data.fileChannel.write(data.byteBuffer);
    }

    // Close the fifo file...
    //
    data.fifoOpener.close();
    data.fileChannel = null;

    // wait for the INSERT statement to finish and check for any
    // error and/or warning...
    //
    data.sqlRunner.join();
    SqlRunner sqlRunner = data.sqlRunner;
    data.sqlRunner = null;
    sqlRunner.checkExcn();

    data.sqlOutputStream.close();
    data.sqlOutputStream = null;
  }
Ejemplo n.º 2
0
  public boolean closeClientConnections(IngresVectorwiseLoaderData data) {
    // Close the output streams if still needed.
    //
    try {
      if (data.fifoOpener != null) {
        data.fifoOpener.close();
      }

      // Stop the SQL execution thread
      //
      if (data.sqlRunner != null) {
        data.sqlRunner.join();
        data.sqlRunner = null;
      }

      // remove the fifo file...
      //
      try {
        if (data.fifoFilename != null) {
          new File(data.fifoFilename).delete();
        }
      } catch (Exception e) {
        logError("Unable to delete FIFO file : " + data.fifoFilename, e);
      }
    } catch (Exception e) {
      setErrors(1L);
      logError("Unexpected error encountered while closing the client connection", e);
      return false;
    }
    return true;
  }
Ejemplo n.º 3
0
 public void run() {
   try {
     data.sqlOutputStream.write(data.getBytes(loadCommand));
     data.sqlOutputStream.flush();
   } catch (Exception ex) {
     this.ex = ex;
   }
 }
Ejemplo n.º 4
0
  private void openFifoFile() throws Exception {

    // Ready to start writing rows to the FIFO file now...
    //
    logDetailed("Opening fifo file " + data.fifoFilename + " for writing.");
    data.fifoOpener = new FifoOpener(data.fifoFilename);
    data.fifoOpener.start();
  }
Ejemplo n.º 5
0
  public boolean execute(IngresVectorwiseLoaderMeta meta) throws KettleException {
    Runtime rt = Runtime.getRuntime();

    try {
      // 1) Create the FIFO file using the "mkfifo" command...
      // Make sure to log all the possible output, also from STDERR
      //
      data.fifoFilename = environmentSubstitute(meta.getFifoFileName());

      File fifoFile = new File(data.fifoFilename);
      if (!fifoFile.exists()) {
        // MKFIFO!
        //
        String mkFifoCmd = "mkfifo " + data.fifoFilename;
        logDetailed("Creating FIFO file using this command : " + mkFifoCmd);
        Process mkFifoProcess = rt.exec(mkFifoCmd);
        StreamLogger errorLogger =
            new StreamLogger(log, mkFifoProcess.getErrorStream(), "mkFifoError");
        StreamLogger outputLogger =
            new StreamLogger(log, mkFifoProcess.getInputStream(), "mkFifoOuptut");
        new Thread(errorLogger).start();
        new Thread(outputLogger).start();
        int result = mkFifoProcess.waitFor();
        if (result != 0) {
          throw new Exception("Return code " + result + " received from statement : " + mkFifoCmd);
        }

        String chmodCmd = "chmod 666 " + data.fifoFilename;
        logDetailed("Setting FIFO file permissings using this command : " + chmodCmd);
        Process chmodProcess = rt.exec(chmodCmd);
        errorLogger = new StreamLogger(log, chmodProcess.getErrorStream(), "chmodError");
        outputLogger = new StreamLogger(log, chmodProcess.getInputStream(), "chmodOuptut");
        new Thread(errorLogger).start();
        new Thread(outputLogger).start();
        result = chmodProcess.waitFor();
        if (result != 0) {
          throw new Exception("Return code " + result + " received from statement : " + chmodCmd);
        }
      }

      // 2) Execute the Ingres "sql" command...
      //

      String cmd = createCommandLine(meta);

      try {
        // masquerading the password for log
        if (meta.isUseDynamicVNode()) {
          logDetailed(
              "Executing command: "
                  + cmd.substring(0, cmd.indexOf("["))
                  + "[username,password]"
                  + cmd.substring(cmd.indexOf("]") + 1));
        } else {
          logDetailed("Executing command: " + cmd);
        }
        data.sqlProcess = rt.exec(cmd);

        // any error message?
        //
        data.errorLogger = new StreamLogger(log, data.sqlProcess.getErrorStream(), "ERR_SQL");

        // any output?
        data.outputLogger = new StreamLogger(log, data.sqlProcess.getInputStream(), "OUT_SQL");

        // Where do we send the data to? --> To STDIN of the sql process
        //
        data.sqlOutputStream = data.sqlProcess.getOutputStream();

        // kick them off
        new Thread(data.errorLogger).start();
        new Thread(data.outputLogger).start();

      } catch (Exception ex) {
        throw new KettleException("Error while executing psql : " + cmd, ex);
      }

      logDetailed("Connected to VectorWise with the 'sql' command.");

      // OK, from here on, we need to feed in the COPY command followed by the
      // data into the pgOutputStream
      //
      String loadCommand = createLoadCommand();
      logDetailed("Executing command: " + loadCommand);
      data.sqlRunner = new SqlRunner(data, loadCommand);
      data.sqlRunner.start();

      logDetailed("LOAD TABLE command started");

      // Open a new fifo output stream, buffered.
      //
      openFifoFile();

      logDetailed("Fifo stream opened");

      // Wait until it all hooks up in the FIFO
      //
      waitForAConnection();

      logDetailed("Ready to start bulk loading!");
    } catch (Exception ex) {
      throw new KettleException(ex);
    }

    return true;
  }
Ejemplo n.º 6
0
  public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
    meta = (IngresVectorwiseLoaderMeta) smi;
    data = (IngresVectorwiseLoaderData) sdi;

    if (super.init(smi, sdi)) {
      if (Const.isEmpty(meta.getDelimiter())) {
        data.separator = data.getBytes("|");
      } else {
        data.separator = data.getBytes(meta.getDelimiter());
      }

      data.newline = data.getBytes("\n");
      data.semicolon = data.getBytes(";");
      data.doubleQuote = data.getBytes("\"");

      // Schema-table combination...
      data.schemaTable =
          meta.getDatabaseMeta()
              .getQuotedSchemaTableCombination(null, environmentSubstitute(meta.getTablename()));

      data.encoding = environmentSubstitute(meta.getEncoding());
      data.isEncoding = !Const.isEmpty(environmentSubstitute(meta.getEncoding()));

      data.byteBuffer = null;

      String bufferSizeString = environmentSubstitute(meta.getBufferSize());
      data.bufferSize =
          Const.isEmpty(bufferSizeString) ? 5000 : Const.toInt(bufferSizeString, 5000);

      if (meta.isTruncatingTable() && meta.getDatabaseMeta() != null) {

        // Connect to Vectorwise over standard JDBC and truncate the table
        //
        Database db = new Database(this, meta.getDatabaseMeta());
        try {
          db.connect();
          db.execStatement(
              "CALL VECTORWISE( COMBINE '" + data.schemaTable + " - " + data.schemaTable + "' )");

          // Just to make sure VW gets the message
          //
          db.execStatement(
              "CALL VECTORWISE( COMBINE '" + data.schemaTable + " - " + data.schemaTable + "' )");
          log.logDetailed(
              "Table " + data.schemaTable + " was truncated using a 'combine' statement.");
        } catch (Exception e) {
          log.logError("Error truncating table", e);
          return false;
        } finally {
          db.disconnect();
        }
      }

      return true;
    }
    return false;
  }
Ejemplo n.º 7
0
  private void writeRowToBulk(RowMetaInterface rowMeta, Object[] r) throws KettleException {

    try {
      // So, we have this output stream to which we can write CSV data to.
      // Basically, what we need to do is write the binary data (from strings to
      // it as part of this proof of concept)
      //
      // The data format required is essentially "value|value|value|value"
      // new feature implemented
      // "use SSV which requires the format to be '"value";"value","value"'
      byte[] delimiter;
      if (meta.isUseSSV()) {
        delimiter = data.semicolon;
      } else {
        delimiter = data.separator;
      }

      for (int i = 0; i < data.keynrs.length; i++) {
        if (i > 0) {
          // Write a separator
          //
          write(delimiter);
        }

        int index = data.keynrs[i];
        ValueMetaInterface valueMeta = rowMeta.getValueMeta(index);
        Object valueData = r[index];

        if (valueData == null) {
          // Don't output anything for null
          //
        } else {
          if (valueMeta.isStorageBinaryString()) {
            byte[] value = valueMeta.getBinaryString(valueData);
            write(value);
          } else {
            // We're using the bulk row metadata so dates and numerics should be in the correct
            // format now...
            //
            String string = valueMeta.getString(valueData);
            if (string != null) {
              // support of SSV feature
              //
              if (meta.isUseSSV()) {

                // replace " in string fields
                //
                if (meta.isEscapingSpecialCharacters() && valueMeta.isString()) {

                  StringBuilder builder = new StringBuilder(string);
                  String[] escapeStrings =
                      new String[] {
                        "\"", "\n", "\r",
                      };
                  String[] replaceStrings =
                      new String[] {
                        "\\\"", "\\n", "\\r",
                      };
                  for (int e = 0; e < escapeStrings.length; e++) {
                    String chr = escapeStrings[e];
                    String rep = replaceStrings[e];
                    int idx = builder.indexOf(chr, 0);
                    while (idx > 0) {
                      builder.replace(idx, idx + chr.length(), rep);
                      idx = builder.indexOf(chr, idx + rep.length());
                    }
                  }
                  string = builder.toString();
                }
                write(data.doubleQuote);
                write(data.getBytes(string));
                write(data.doubleQuote);
              } else {
                write(data.getBytes(string));
              }
            }
          }
        }
      }

      // finally write a newline
      //
      write(data.newline);
    } catch (Exception e) {
      // If something went wrong with the import,
      // rather return that error, in stead of "Pipe Broken"
      try {
        data.sqlRunner.checkExcn();
      } catch (Exception loadEx) {
        throw new KettleException("Error serializing rows of data to the fifo file", loadEx);
      }

      throw new KettleException("Error serializing rows of data to the fifo file", e);
    }
  }
Ejemplo n.º 8
0
  public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    meta = (IngresVectorwiseLoaderMeta) smi;
    data = (IngresVectorwiseLoaderData) sdi;

    try {
      Object[] r = getRow(); // Get row from input rowset & set row busy!
      if (r == null) // no more input to be expected...
      {
        setOutputDone();
        // only close output after the first row was processed
        // to prevent error (NPE) on empty rows set
        if (!first) {
          closeOutput();
        }

        return false;
      }

      if (first) {
        first = false;

        // Cache field indexes.
        //
        data.keynrs = new int[meta.getFieldStream().length];
        for (int i = 0; i < data.keynrs.length; i++) {
          data.keynrs[i] = getInputRowMeta().indexOfValue(meta.getFieldStream()[i]);
        }
        data.bulkRowMeta = getInputRowMeta().clone();
        if (meta.isUseStandardConversion()) {
          for (int i = 0; i < data.bulkRowMeta.size(); i++) {
            ValueMetaInterface valueMeta = data.bulkRowMeta.getValueMeta(i);
            if (valueMeta.isStorageNormal()) {
              if (valueMeta.isDate()) {
                valueMeta.setConversionMask("yyyy-MM-dd HH:mm:ss");
              } else if (valueMeta.isNumeric()) {
                valueMeta.setDecimalSymbol(".");
                valueMeta.setGroupingSymbol("");
              }
            }
          }
        }

        // execute the client statement...
        //
        execute(meta);

        // Allocate a buffer
        //
        data.fileChannel = data.fifoOpener.getFileChannel();
        data.byteBuffer = ByteBuffer.allocate(data.bufferSize);
      }

      // check if SQL process is still running before processing row
      if (!checkSqlProcessRunning(data.sqlProcess)) {
        throw new Exception("Ingres SQL process has stopped");
      }

      writeRowToBulk(data.bulkRowMeta, r);
      putRow(getInputRowMeta(), r);
      incrementLinesOutput();

      if (checkFeedback(getLinesOutput()))
        logBasic(
            BaseMessages.getString(PKG, "IngresVectorwiseLoader.Log.LineNumber")
                + getLinesOutput()); //$NON-NLS-1$

      return true;

    } catch (Exception e) {
      logError(
          BaseMessages.getString(PKG, "IngresVectorwiseLoader.Log.ErrorInStep"), e); // $NON-NLS-1$
      setErrors(1);
      stopAll();
      setOutputDone(); // signal end to receiver(s)
      return false;
    }
  }