private void flushBuffer() throws SQLException { if (rowBuffer.size() == 0) return; // flatten rows into a single params list Object[] queryParams = new Object[rowBuffer.size() * fieldNames.length]; int i = 0; for (Object[] row : rowBuffer) for (Object value : row) queryParams[i++] = value; setQueryRowCount(rowBuffer.size()); try { SQLUtils.setPreparedStatementParams(stmt, queryParams); stmt.execute(); } catch (SQLException e) { try { conn.rollback(); } catch (SQLException ex) { } try { finalize(); } catch (Throwable thrown) { } stmt = null; conn = null; System.err.println("Failed to insert values: " + Arrays.deepToString(rowBuffer.toArray())); e = new SQLExceptionWithQuery(baseQuery + "(...)", e); throw e; } finally { rowBuffer.removeAllElements(); } }
public BulkSQLLoader_Direct(Connection conn, String schema, String table, String[] fieldNames) throws RemoteException { super(conn, schema, table, fieldNames); try { prevAutoCommit = conn.getAutoCommit(); if (prevAutoCommit) conn.setAutoCommit(false); String quotedTable = SQLUtils.quoteSchemaTable(conn, schema, table); String[] columns = new String[fieldNames.length]; for (int i = 0; i < fieldNames.length; i++) columns[i] = SQLUtils.quoteSymbol(conn, fieldNames[i]); baseQuery = String.format( "INSERT INTO %s(%s) VALUES ", quotedTable, StringUtils.join(",", columns)); rowQuery = "(" + StringUtils.mult(",", "?", fieldNames.length) + ")"; rowBuffer = new Vector<Object[]>(setQueryRowCount(DEFAULT_BUFFER_SIZE)); } catch (SQLException e) { throw new RemoteException("Error initializing BulkSQLLoader_Direct", e); } }
public static void copyCsvToDatabase( Connection conn, String csvPath, String sqlSchema, String sqlTable) throws SQLException, IOException { String query = null; String formatted_CSV_path = csvPath.replace('\\', '/'); String dbms = conn.getMetaData().getDatabaseProductName(); Statement stmt = null; String quotedTable = SQLUtils.quoteSchemaTable(conn, sqlSchema, sqlTable); try { if (dbms.equalsIgnoreCase(SQLUtils.MYSQL)) { stmt = conn.createStatement(); // ignoring 1st line so that we don't put the column headers as the first row of data query = String.format( "load data local infile '%s' into table %s fields terminated by ',' enclosed by '\"' lines terminated by '\\n' ignore 1 lines", formatted_CSV_path, quotedTable); stmt.executeUpdate(query); stmt.close(); } else if (dbms.equalsIgnoreCase(SQLUtils.POSTGRESQL)) { query = String.format("COPY %s FROM STDIN WITH CSV HEADER", quotedTable); ((PGConnection) conn).getCopyAPI().copyIn(query, new FileInputStream(formatted_CSV_path)); } } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(stmt); } }
public static ResultSet getPosts() throws SQLException { Connection conn = SQLUtils.getConnection(); String query = "SELECT * FROM placowki;"; Statement st = conn.createStatement(); ResultSet rs = st.executeQuery(query); return rs; }
/** * @param args * @throws SQLException */ public static void addPost(String nazwa, String adres, String telefon, String email) throws SQLException { Connection conn = SQLUtils.getConnection(); String query = "insert into placowki (nazwa, adres, telefon, mail) values (\'" + nazwa + "\', \'" + adres + "\', \'" + telefon + "\', \'" + email + "\');"; try (Statement st = conn.createStatement()) { st.executeUpdate(query); } catch (SQLException e) { System.out.println(e); return; } System.out.println( "Dodano placowke do bazy: (nazwa: " + nazwa + ", adres: " + adres + ", telefon: " + telefon + ", email: " + email + ")"); }
public ResultSet addRecord(final ProductEntry record) throws SQLException { stmtSaveNewRecord.clearParameters(); int i = 1; if (record.getFile() == null) stmtSaveNewRecord.setString(i++, ""); else stmtSaveNewRecord.setString(i++, record.getFile().getAbsolutePath()); stmtSaveNewRecord.setString(i++, record.getName()); stmtSaveNewRecord.setString(i++, record.getMission()); stmtSaveNewRecord.setString(i++, record.getProductType()); stmtSaveNewRecord.setString(i++, record.getAcquisitionMode()); stmtSaveNewRecord.setString(i++, record.getPass()); stmtSaveNewRecord.setDouble(i++, record.getFirstNearGeoPos().getLat()); stmtSaveNewRecord.setDouble(i++, record.getFirstNearGeoPos().getLon()); stmtSaveNewRecord.setDouble(i++, record.getFirstFarGeoPos().getLat()); stmtSaveNewRecord.setDouble(i++, record.getFirstFarGeoPos().getLon()); stmtSaveNewRecord.setDouble(i++, record.getLastNearGeoPos().getLat()); stmtSaveNewRecord.setDouble(i++, record.getLastNearGeoPos().getLon()); stmtSaveNewRecord.setDouble(i++, record.getLastFarGeoPos().getLat()); stmtSaveNewRecord.setDouble(i++, record.getLastFarGeoPos().getLon()); stmtSaveNewRecord.setDouble(i++, record.getRangeSpacing()); stmtSaveNewRecord.setDouble(i++, record.getAzimuthSpacing()); stmtSaveNewRecord.setDate(i++, SQLUtils.toSQLDate(record.getFirstLineTime())); stmtSaveNewRecord.setDouble(i++, record.getFileSize()); stmtSaveNewRecord.setDouble(i++, record.getLastModified()); stmtSaveNewRecord.setString(i++, record.getFileFormat()); final String geoStr = record.formatGeoBoundayString(); if (geoStr.length() > 1200) { System.out.println("Geoboundary string exceeds 1200"); stmtSaveNewRecord.setString(i++, ""); } else { stmtSaveNewRecord.setString(i++, geoStr); } final int rowCount = stmtSaveNewRecord.executeUpdate(); return stmtSaveNewRecord.getGeneratedKeys(); }
public BulkSQLLoader_CSV(Connection conn, String schema, String table, String[] fieldNames) throws RemoteException { super(conn, schema, table, fieldNames); try { String dbms = conn.getMetaData().getDatabaseProductName(); if (!dbms.equalsIgnoreCase(SQLUtils.MYSQL) && !dbms.equalsIgnoreCase(SQLUtils.POSTGRESQL)) throw new RemoteException("BulkSQLLoader_CSV does not support " + dbms); } catch (SQLException e) { throw new RemoteException("Unable to initialize bulk loader", e); } try { this.tempRows[0] = new Object[fieldNames.length]; outputNullValue = SQLUtils.getCSVNullValue(conn); quoteEmptyStrings = outputNullValue.length() > 0; file = File.createTempFile("Weave", ".csv", temporaryFilesDirectory); file.deleteOnExit(); writer = new FileWriter(file); addRow((Object[]) fieldNames); // header } catch (Exception e) { throw new RemoteException("Unable to initialize bulk loader", e); } }
/** * @param args * @throws SQLException */ public static void submit(String command) throws SQLException { Connection conn = SQLUtils.getConnection(); try (Statement st = conn.createStatement()) { st.executeUpdate(command); } catch (SQLException e) { System.out.println(e); return; } }
// returns a string format of the SQL Datatype of the column using the getFieldType function from // DBaseFileHeader private static String getSQLDataType(Connection conn, DbaseFileHeader dbfHeader, int index) { char dataType = dbfHeader.getFieldType(index); String sqlDataType = ""; if (dataType == 'C') sqlDataType = SQLUtils.getVarcharTypeString(conn, dbfHeader.getFieldLength(index)); else if (dataType == 'N' || dataType == 'F') { // if it has not 0 decimals return type as integer else Double Precision if (dbfHeader.getFieldDecimalCount(index) == 0) sqlDataType = SQLUtils.getBigIntTypeString(conn); else sqlDataType = SQLUtils.getDoubleTypeString(conn); } else if (dataType == 'D') { sqlDataType = SQLUtils.getDateTimeTypeString(conn); } else { throw new RuntimeException( "Unknown DBF data type: " + dataType + " in column " + dbfHeader.getFieldName(index)); } return sqlDataType; }
public static BulkSQLLoader newInstance( Connection conn, String schema, String table, String[] fieldNames) throws RemoteException { // BulkSQLLoader_CSV doesn't work for mysql unless LOAD DATA LOCAL feature is enabled... String dbms = SQLUtils.getDbmsFromConnection(conn); if (dbms.equals(SQLUtils.POSTGRESQL)) return new BulkSQLLoader_CSV(conn, schema, table, fieldNames); else return new BulkSQLLoader_Direct(conn, schema, table, fieldNames); }
@Override protected void finalize() throws Throwable { SQLUtils.cleanup(stmt); try { conn.setAutoCommit(prevAutoCommit); } catch (SQLException ex) { } super.finalize(); }
/** * Get All acquisition modes for specified mission * * @param missions the selected missions * @return list of acquisition modes * @throws SQLException . */ public String[] getAcquisitionModes(final String[] missions) throws SQLException { if (missions == null || missions.length == 0) return new String[] {}; String strMissionAcquisitionModes = "SELECT DISTINCT " + AbstractMetadata.ACQUISITION_MODE + " FROM " + TABLE + " WHERE "; strMissionAcquisitionModes += SQLUtils.getOrList(AbstractMetadata.MISSION, missions); final List<String> listEntries = new ArrayList<>(); final Statement queryStatement = dbConnection.createStatement(); final ResultSet results = queryStatement.executeQuery(strMissionAcquisitionModes); while (results.next()) { listEntries.add(results.getString(1)); } return listEntries.toArray(new String[listEntries.size()]); }
private static String vstr(TemplateModel o) { try { if (o == null) { return ""; } if (o instanceof TemplateBooleanModel) { return Boolean.toString(((TemplateBooleanModel) o).getAsBoolean()); } else if (o instanceof TemplateNumberModel) { return ((TemplateNumberModel) o).getAsNumber().toString(); } else if (o instanceof TemplateScalarModel) { return SQLUtils.toSql(((TemplateScalarModel) o).getAsString()); } else { throw new TemplateModelException("Object type error"); } } catch (TemplateModelException e) { throw new ServerException(BaseErrors.PLATFORM_SQL_TEMPLATE_ERROR, e); } }
/** * @param dbfFile a list of DBF files to merge * @param conn a database connection * @param sqlSchema schema to store table * @param sqlTable table name to store data * @return The number of rows affected after sql INSERT queries * @throws IOException,SQLException */ public static void storeAttributes( File[] dbfFiles, Connection conn, String sqlSchema, String sqlTable, boolean overwriteTables, String[] nullValues) throws IOException, SQLException { if (!overwriteTables && SQLUtils.tableExists(conn, sqlSchema, sqlTable)) throw new SQLException("SQL Tables already exist and overwriteTables is false."); // read records from each file List<String> fieldNames = new Vector<String>(); // order corresponds to fieldTypes order List<String> fieldTypes = new Vector<String>(); // order corresponds to fieldNames order FileInputStream[] inputStreams = new FileInputStream[dbfFiles.length]; DbaseFileHeader[] headers = new DbaseFileHeader[dbfFiles.length]; DbaseFileReader[] readers = new DbaseFileReader[dbfFiles.length]; // open each file, read each header, get the complete list of field names and types for (int i = 0; i < dbfFiles.length; i++) { inputStreams[i] = new FileInputStream(dbfFiles[i]); readers[i] = new DbaseFileReader(inputStreams[i].getChannel(), false, Charset.forName("ISO-8859-1")); headers[i] = readers[i].getHeader(); int numFields = headers[i].getNumFields(); // keep track of the full set of field names for (int col = 0; col < numFields; col++) { String newFieldName = headers[i].getFieldName(col); if (ListUtils.findString(newFieldName, fieldNames) < 0) { fieldNames.add(newFieldName); fieldTypes.add(getSQLDataType(conn, headers[i], col)); } } } // begin SQL code try { conn.setAutoCommit(false); // create the table if (overwriteTables) SQLUtils.dropTableIfExists(conn, sqlSchema, sqlTable); fieldNames.add(0, "the_geom_id"); fieldTypes.add(0, SQLUtils.getSerialPrimaryKeyTypeString(conn)); SQLUtils.createTable(conn, sqlSchema, sqlTable, fieldNames, fieldTypes); // import data from each file for (int f = 0; f < dbfFiles.length; f++) { int numFields = headers[f].getNumFields(); int numRecords = headers[f].getNumRecords(); // insert records from this file for (int r = 0; r < numRecords; r++) { Map<String, Object> record = new HashMap<String, Object>(); Object[] entry = readers[f].readEntry(); for (int c = 0; c < numFields; c++) { if (ListUtils.findIgnoreCase(entry[c].toString(), nullValues) < 0) record.put(headers[f].getFieldName(c), entry[c]); } // insert the record in the table try { SQLUtils.insertRow(conn, sqlSchema, sqlTable, record); } catch (SQLException e) { System.out.println( String.format( "Insert failed on row %s of %s: %s", r, dbfFiles[f].getName(), record)); throw e; } } // close the file readers[f].close(); inputStreams[f].close(); // clean up pointers readers[f] = null; inputStreams[f] = null; headers[f] = null; } } finally { conn.setAutoCommit(true); } }