/**
  * Create database writer based on configuration
  *
  * @param config
  * @param dbConfigName
  * @throws DataAccessObjectInitializationException
  */
 DatabaseWriter(Config config, String dbConfigName)
     throws DataAccessObjectInitializationException {
   this.config = config;
   String dbConfigFilename =
       config.constructConfigFilePath(DatabaseContext.DEFAULT_CONFIG_FILENAME);
   if (!(new File(dbConfigFilename).exists())) {
     throw new DataAccessObjectInitializationException(
         Messages.getFormattedString(
             "DatabaseDAO.errorConfigFileExists", dbConfigFilename)); // $NON-NLS-1$
   }
   DatabaseConfig dbConfig = DatabaseConfig.getInstance(dbConfigFilename, dbConfigName);
   dataSource = dbConfig.getDataSource();
   sqlConfig = dbConfig.getSqlConfig();
   dbContext = new DatabaseContext(dbConfigName);
 }
Example #2
0
 /*
  * (non-Javadoc)
  * @see com.salesforce.dataloader.dao.DataAccessObject#open()
  */
 public void open() throws DataAccessObjectInitializationException {
   try {
     if (!useDefaultEncoding) {
       fileOut =
           new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), encoding));
     } else {
       fileOut = new BufferedWriter(new FileWriter(fileName));
     }
     currentRowNumber = 0;
     setOpen(true);
   } catch (IOException e) {
     String errMsg = Messages.getFormattedString("CSVWriter.errorOpening", fileName);
     logger.error(errMsg, e); // $NON-NLS-1$
     throw new DataAccessObjectInitializationException(errMsg, e); // $NON-NLS-1$
   }
 }
 /** @param sqe */
 private void endException(int batchSize) {
   // Rollback if dbContext.getAutoCommit() is false
   try {
     dbContext.getDataConnection().rollback();
   } catch (SQLException sqe) {
     logger.error(
         Messages.getFormattedString(
             "DatabaseDAO.sqlExceptionRollback",
             new String[] {
               String.valueOf(currentRowNumber + 1 - batchSize),
               String.valueOf(currentRowNumber + 1),
               dbContext.getDbConfigName(),
               sqe.getMessage()
             }),
         sqe);
   }
 }
Example #4
0
 /**
  * Validate column names and return a string if there's a validation warning.
  *
  * @param dao Data access object to validate columns for
  * @return A validation warning or null
  */
 public static String validateColumns(DataAccessObject dao) {
   HashSet<String> uniqueHeaders = new HashSet<String>();
   String warning = null;
   for (String header : dao.getColumnNames()) {
     if (header == null || header.length() == 0) {
       warning = Messages.getString("RowUtil.warningEmptyColumn"); // $NON-NLS-1$
       break;
     } else if (uniqueHeaders.contains(header)) {
       warning =
           Messages.getFormattedString("RowUtil.warningDuplicateColumn", header); // $NON-NLS-1$
       break;
     }
     uniqueHeaders.add(header);
   }
   if (warning != null) {
     logger.warn(warning);
   }
   return warning;
 }
Example #5
0
  /**
   * Gets the next row from the current data access object data source. <i>Side effect:</i> Updates
   * the current record number
   */
  @Override
  public Row readRow() throws DataAccessObjectException {
    if (!isOpen) {
      open();
    }

    List<String> record;
    synchronized (lock) {
      try {
        record = csvReader.nextRecord();
      } catch (IOException e) {
        throw new DataAccessObjectException(e);
      }
    }

    if (!DAORowUtil.isValidRow(record)) {
      return null;
    }

    if (record.size() > headerRow.size()) {
      String errMsg =
          Messages.getFormattedString(
              "CSVFileDAO.errorRowTooLarge",
              new String[] {
                String.valueOf(currentRowNumber),
                String.valueOf(record.size()),
                String.valueOf(headerRow.size())
              });
      throw new DataAccessRowException(errMsg);
    }

    Row row = new Row(record.size());

    for (int i = 0; i < headerRow.size(); i++) {
      String value = record.get(i);
      if (value == null) {
        value = "";
      }
      row.put(headerRow.get(i), value);
    }
    currentRowNumber++;
    return row;
  }
Example #6
0
 private void initalizeInput() throws DataAccessObjectInitializationException {
   try {
     input = new FileInputStream(file);
     if (forceUTF8 || isUTF8File(file)) {
       csvReader = new CSVReader(input, "UTF-8", new char[] {',', '\t'});
     } else {
       csvReader = new CSVReader(input, new char[] {',', '\t'});
     }
     csvReader.setMaxRowsInFile(Integer.MAX_VALUE);
     csvReader.setMaxCharsInFile(Integer.MAX_VALUE);
   } catch (FileNotFoundException e) {
     String errMsg = Messages.getFormattedString("CSVFileDAO.errorOpen", file.getAbsolutePath());
     LOGGER.error(errMsg, e);
     throw new DataAccessObjectInitializationException(errMsg, e);
   } catch (UnsupportedEncodingException e) {
     String errMsg = Messages.getString("CSVFileDAO.errorUnsupportedEncoding");
     LOGGER.error(errMsg, e);
     throw new DataAccessObjectInitializationException(errMsg, e);
   } finally {
     if (csvReader == null) {
       IOUtils.closeQuietly(input);
     }
   }
 }
  /*
   * (non-Javadoc)
   * @see com.salesforce.dataloader.dao.DataWriter#writeRowList(java.util.List)
   */
  @Override
  public boolean writeRowList(List<Row> inputRowList) throws DataAccessObjectException {

    // make sure that the update is setup and ready to go, otherwise stop
    if (!dbContext.isOpen()) {
      throw new DataAccessObjectInitializationException(
          Messages.getString("DatabaseDAO.errorUpdateNotOpen"));
    }

    boolean success = true;
    int startingRowNumber = currentRowNumber;

    try {
      // for batchsize = 1, don't do batching, this provides much better error output
      if (inputRowList.size() == 1) {
        dbContext.setSqlParamValues(sqlConfig, config, inputRowList.get(0));
        currentRowNumber++;
      } else {
        // for each row set the Sql params in the prepared statement
        dbContext.getDataStatement().clearBatch();
        for (Row inputRow : inputRowList) {
          dbContext.setSqlParamValues(sqlConfig, config, inputRow);
          dbContext.getDataStatement().addBatch();
          currentRowNumber++;
        }
      }
    } catch (ParameterLoadException e) {
      throw new DataAccessObjectException(e.getMessage(), e);
    } catch (SQLException sqe) {
      String errMsg =
          Messages.getFormattedString(
              "DatabaseDAO.sqlExceptionPrepareRow",
              new String[] {
                String.valueOf(currentRowNumber + 1), String.valueOf(startingRowNumber + 1),
                String.valueOf(startingRowNumber + inputRowList.size() + 1),
                    dbContext.getDbConfigName(),
                sqe.getMessage()
              });
      logger.error(errMsg, sqe);
      // batch failed: set current row number to the end of the batch
      currentRowNumber = startingRowNumber + inputRowList.size();
      throw new DataAccessObjectException(errMsg, sqe);
    } catch (Exception e) {
      String errMsg =
          Messages.getFormattedString(
              "DatabaseDAO.exceptionPrepareRow",
              new String[] {
                String.valueOf(currentRowNumber + 1), String.valueOf(startingRowNumber + 1),
                String.valueOf(startingRowNumber + inputRowList.size() + 1),
                    dbContext.getDbConfigName(),
                e.getMessage()
              });
      logger.error(errMsg, e);
      // batch failed: set current row number to the end of the batch
      currentRowNumber = startingRowNumber + inputRowList.size();
      throw new DataAccessObjectException(errMsg, e);
    }

    try {
      // for batchsize = 1, don't do batching, this provides much better error output
      int totalSuccessRows = 0;
      if (inputRowList.size() == 1) {
        // non-batch update returns exception, so it's always success unless exception is returned
        dbContext.getDataStatement().executeUpdate();
        success = true;
        totalSuccessRows = 1;
      } else {
        // execute the update SQL in batch
        int[] rowsUpdatedArray = dbContext.getDataStatement().executeBatch();
        for (int rowsUpdated : rowsUpdatedArray) {
          if (rowsUpdated == PreparedStatement.SUCCESS_NO_INFO) {
            totalSuccessRows = rowsUpdatedArray.length;
            success = true;
            break;
          }
        }
      }
      logger.debug(
          Messages.getFormattedString(
              "DatabaseDAO.updatedStatus",
              new String[] {String.valueOf(totalSuccessRows), String.valueOf(currentRowNumber)}));

      // commit the change
      dbContext.getDataConnection().commit();

    } catch (SQLException sqe) {
      if (sqe instanceof BatchUpdateException) {
        int[] updateCountArray = ((BatchUpdateException) sqe).getUpdateCounts();
        for (int i = 0; i < updateCountArray.length; i++) {
          if (updateCountArray[i] == PreparedStatement.EXECUTE_FAILED) {
            // FIXME all results are the same, return
            success = false;
            break;
          }
        }
      }
      String errMsg =
          Messages.getFormattedString(
              "DatabaseDAO.sqlExceptionWriteRow",
              new String[] {
                String.valueOf(currentRowNumber + 1 - inputRowList.size()),
                    String.valueOf(currentRowNumber + 1),
                dbContext.getDbConfigName(), sqe.getMessage()
              });
      logger.error(errMsg, sqe);

      endException(inputRowList.size());
      throw new DataAccessObjectException(errMsg, sqe);
    } catch (Exception e) {
      String errMsg =
          Messages.getFormattedString(
              "DatabaseDAO.exceptionWriteRow",
              new String[] {
                String.valueOf(currentRowNumber + 1 - inputRowList.size()),
                    String.valueOf(currentRowNumber + 1),
                dbContext.getDbConfigName(), e.getMessage()
              });
      logger.error(errMsg, e);

      endException(inputRowList.size());
      throw new DataAccessObjectException(errMsg, e);
    }

    return success;
  }