예제 #1
0
파일: DBFUtils.java 프로젝트: vigupta/Weave
  /**
   * @param dbfFile a list of DBF files to merge
   * @param conn a database connection
   * @param sqlSchema schema to store table
   * @param sqlTable table name to store data
   * @return The number of rows affected after sql INSERT queries
   * @throws IOException,SQLException
   */
  public static void storeAttributes(
      File[] dbfFiles,
      Connection conn,
      String sqlSchema,
      String sqlTable,
      boolean overwriteTables,
      String[] nullValues)
      throws IOException, SQLException {
    if (!overwriteTables && SQLUtils.tableExists(conn, sqlSchema, sqlTable))
      throw new SQLException("SQL Tables already exist and overwriteTables is false.");

    // read records from each file
    List<String> fieldNames = new Vector<String>(); // order corresponds to fieldTypes order
    List<String> fieldTypes = new Vector<String>(); // order corresponds to fieldNames order

    FileInputStream[] inputStreams = new FileInputStream[dbfFiles.length];
    DbaseFileHeader[] headers = new DbaseFileHeader[dbfFiles.length];
    DbaseFileReader[] readers = new DbaseFileReader[dbfFiles.length];

    // open each file, read each header, get the complete list of field names and types
    for (int i = 0; i < dbfFiles.length; i++) {
      inputStreams[i] = new FileInputStream(dbfFiles[i]);
      readers[i] =
          new DbaseFileReader(inputStreams[i].getChannel(), false, Charset.forName("ISO-8859-1"));
      headers[i] = readers[i].getHeader();

      int numFields = headers[i].getNumFields();
      // keep track of the full set of field names
      for (int col = 0; col < numFields; col++) {
        String newFieldName = headers[i].getFieldName(col);
        if (ListUtils.findString(newFieldName, fieldNames) < 0) {
          fieldNames.add(newFieldName);
          fieldTypes.add(getSQLDataType(conn, headers[i], col));
        }
      }
    }

    // begin SQL code
    try {
      conn.setAutoCommit(false);

      // create the table
      if (overwriteTables) SQLUtils.dropTableIfExists(conn, sqlSchema, sqlTable);
      fieldNames.add(0, "the_geom_id");
      fieldTypes.add(0, SQLUtils.getSerialPrimaryKeyTypeString(conn));
      SQLUtils.createTable(conn, sqlSchema, sqlTable, fieldNames, fieldTypes);

      // import data from each file
      for (int f = 0; f < dbfFiles.length; f++) {
        int numFields = headers[f].getNumFields();
        int numRecords = headers[f].getNumRecords();
        // insert records from this file
        for (int r = 0; r < numRecords; r++) {
          Map<String, Object> record = new HashMap<String, Object>();
          Object[] entry = readers[f].readEntry();
          for (int c = 0; c < numFields; c++) {
            if (ListUtils.findIgnoreCase(entry[c].toString(), nullValues) < 0)
              record.put(headers[f].getFieldName(c), entry[c]);
          }

          // insert the record in the table
          try {
            SQLUtils.insertRow(conn, sqlSchema, sqlTable, record);
          } catch (SQLException e) {
            System.out.println(
                String.format(
                    "Insert failed on row %s of %s: %s", r, dbfFiles[f].getName(), record));
            throw e;
          }
        }
        // close the file
        readers[f].close();
        inputStreams[f].close();
        // clean up pointers
        readers[f] = null;
        inputStreams[f] = null;
        headers[f] = null;
      }
    } finally {
      conn.setAutoCommit(true);
    }
  }