Reputation: 21
I am querying a postgres db utilizing the jdbc driver and this is a going to be a fairly basic standalone java app querying a local installation/hosted server.
I wanted to log the sql that I pass with format strings to check whether I am simply passing question marks or actual column names, but turns out jdbc doesn't support that kind of logging for thread safety. I constructed my prepared statement to be different on different conditions.
So I am using a statement to select everything I need and then retrieving using a resultset and meanwhile creating a prepared object depending on those resultsets. My prepared statement can vary between two sql updates when before I used two different prepared statements and I received a deadlock. Therefore do I need connection pooling? I would rather not as this standalone app shouldn't take up too much more time as I've already invested too much time trying to debug and troubleshoot server issues. My code is below:
public static void handleEntries(Long level,
PreparedStatement w_ustmt, String base_url, String name5,
String name4, String name3, String name2, String name1, String name0) throws SQLException {
String wiki_url_entry = "";
try {
if (level.equals((long) 5)) {
// wiki_url = wiki_url + name5;
w_ustmt.setString(1, base_url + name5);
w_ustmt.addBatch();
System.out.println(w_ustmt.toString());
} else if (level.equals((long) 4)) {
// wiki_url = wiki_url + name4;
w_ustmt.setString(1, base_url + name4);
w_ustmt.addBatch();
System.out.println(w_ustmt.toString());
} else if (level.equals((long) 3)) {
//for some reason all the results are coming from this loop
// wiki_url = wiki_url + name3;
w_ustmt.setString(1, base_url + name3);
w_ustmt.addBatch();
System.out.println(w_ustmt.toString());
} else if (level.equals((long) 2)) {
// wiki_url = wiki_url + name2;
w_ustmt.setString(1, base_url + name2);
w_ustmt.addBatch();
System.out.println(w_ustmt.toString());
} else if (level.equals((long) 1)) {
// wiki_url = wiki_url + name1;
w_ustmt.setString(1, base_url + name1);
w_ustmt.addBatch();
System.out.println(w_ustmt.toString());
} else {
// wiki_url = wiki_url + name0;
w_ustmt.setString(1, base_url + name0);
w_ustmt.addBatch();
System.out.println(w_ustmt.toString());
}
} catch (SQLException ex) {
Logger lgr = Logger.getLogger(Shapefile_Repair.class.getName());
lgr.log(Level.SEVERE, ex.getMessage());
while (ex != null){
System.out.println(ex.getNextException());
}
//System.out.println("SQLState: A " + ex.getSQLState());
//System.out.println("VendorError A: " + ex.getErrorCode());
}
}
/* this function will handle all the urls which have same entries */
public static void handleDups(String wiki_url,
String base_url, String name5, String name4,
String name3, String name2, String name1, String name0,
PreparedStatement w_ustmt) {
String wiki_url_entry = "";
try {
if (wiki_url.toString().equals(base_url + name5)) {
wiki_url_entry = wiki_url + "(" + name0 + "." + name1 + "." + name2 + "." + name3 + "." +
name4 + ")";
// rewriting here, yet it is only printing ?, ?
w_ustmt.setString(1, wiki_url_entry);
} else if (wiki_url.toString().equals(base_url + name4)) {
wiki_url_entry = wiki_url + "(" + name0 + "." + name1 + "." + name2 + "." + name3 +
")";
w_ustmt.setString(1, wiki_url_entry);
} else if (wiki_url.toString().equals(base_url + name3)) {
wiki_url_entry = wiki_url + "(" + name0 + "." + name1 + "." + name2
+ ")";
w_ustmt.setString(1, wiki_url_entry);
} else if (wiki_url.toString().equals(base_url + name2)) {
wiki_url_entry = wiki_url + "(" + name0 + "." + name1 + ")";
w_ustmt.setString(1, wiki_url_entry);
} else if (wiki_url.toString().equals(base_url + name1)) {
wiki_url_entry = wiki_url + "(" + name0 + ")";
w_ustmt.setString(1, wiki_url_entry);
}
w_ustmt.addBatch();
System.out.println("B");
} catch (SQLException ex) {
Logger lgr = Logger.getLogger(Shapefile_Repair.class.getName());
lgr.log(Level.SEVERE, ex.getMessage());
System.out.println("SQLState: B " + ex.getSQLState());
System.out.println("VendorError B :" + ex.getErrorCode());
}
}
/* main method */
@SuppressWarnings("resource")
public static void main(String[] args) throws InstantiationException,
IllegalAccessException, SQLException {
// TODO Auto-generated method stub
Connection conn = null;
// use the log4jdbc4 wrapper for the connection object
// conn = new net.sf.log4jdbc.ConnectionSpy(conn);
Statement stmt = null;
ResultSet rs = null;
try {
// conn = makeConnection(args[0], args[1], args[2], args[3], "");
String host = args[0];
String port = args[1];
String database = args[2];
String user = args[3];
String password = args[4];
String sql_query = "SELECT \"NAME_0\", \"NAME_1\", \"NAME_2\", \"NAME_3\", \"NAME_4\", \"NAME_5\", \"WIKI_URL\", \"LEVEL_DEPT\" FROM"
+ " AdminBoundaries WHERE \"WIKI_URL\" IN(SELECT \"WIKI_URL\" FROM AdminBoundaries"
+ " GROUP By \"WIKI_URL\" HAVING (count (\"WIKI_URL\") > 1)) ORDER BY \"WIKI_URL\";";
Class.forName("org.postgresql.Driver").newInstance();
String url = "jdbc:postgresql://" + host + ":" + port + "/"
+ database;
conn = DriverManager.getConnection(url, user, password);
// if(conn.equals(null)){
// System.err.println("Connection complete");
// }
DatabaseMetaData meta;
try {
if (conn.isClosed()) {
System.out.println("closed");
}
System.out.println(conn.getWarnings());
meta = conn.getMetaData();
boolean updateable = meta.supportsResultSetConcurrency(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
/*if(updateable) System.out.println("yes");
System.out.println("Does database support batch processes?:"
+ meta.supportsBatchUpdates());
ResultSet columns = meta.getColumns(null, null,
"adminboundaries", "%");
while (columns.next()) {
System.out.println(columns.getString(4));
}*/
// after checking the table, adminboundaries was in fact
// evident, so the issue now is changing the sql queries to
// lower case
/*
* ResultSet tables = meta.getTables(null, null, "%", null);
* while (tables.next()){
* System.out.println(tables.getString(3)); }
*/
} finally {
System.out.println(conn.getTransactionIsolation());
System.out.println(sql_query);
stmt = conn.createStatement(ResultSet.CLOSE_CURSORS_AT_COMMIT,
ResultSet.CONCUR_UPDATABLE,
ResultSet.TYPE_SCROLL_INSENSITIVE);
System.out.println(stmt.getFetchSize());
stmt.setMaxRows(900);
System.out.println(conn.getTypeMap());
// stmt.addBatch(sql_query);
rs = stmt.executeQuery(sql_query);
// batch_counter is for the row count of the SQL statements
// executed
// int batch_counter = pstmt.executeUpdate();
// update dup_sql's arguments being set as it will vary
// String wiki_sql =
// "UPDATE AdminBoundaries SET \"WIKI_URL\" = ?";
PreparedStatement w_ustmt = conn
.prepareStatement("UPDATE AdminBoundaries SET WIKI_URL = ?");
// int counter = 0;
// for (counter = 0; counter < 1000; counter++) {
conn.setAutoCommit(false);
// stmt.addBatch(sql_query);
// stmt.setQueryTimeout(30);
System.out.println(rs.getFetchSize());
SQLWarning resultsetWarning = rs.getWarnings();
System.out.println(resultsetWarning);
// consider changing tables/columns to lower case as sources say
// to do
while (rs.next()) {
String base_url = "http://127.0.0.1/mediawiki/index.php/";
String name0 = rs.getString("NAME_0");
String name1 = rs.getString("NAME_1");
String name2 = rs.getString("NAME_2");
String name3 = rs.getString("NAME_3");
String name4 = rs.getString("NAME_4");
String name5 = rs.getString("NAME_5");
String wiki_url = rs.getString("WIKI_URL");
Long level = rs.getLong("LEVEL_DEPT");
/*for (int i = 1; i < rs.getMetaData().getColumnCount() + 1; i++) {
while (rs.next()) {
System.out.println(i + " " + rs.getMetaData().getColumnName(i));
}
}*/
// use 127.0.0.1, not ncsirad-pc b/c wiki_urls are coming
// back 127.0.0.1
// TO DO: rethink this if statement to include in function
// and execute
// both functions without conditionals above. So it will run
// through first
// then run through second.
if (wiki_url.toString().equals(base_url)) {
// Savepoint savepoint1 =
// conn.setSavepoint("wiki_entry");
// w_ustmt.setQueryTimeout(30);
// System.out.println("Getting into duplicates loop");
handleEntries(level, w_ustmt, base_url, name5, name4,
name3, name2, name1, name0);
// add to batch
// w_ustmt.addBatch(wiki_sql);
// int counts2[] = w_ustmt.executeBatch();
// make sure to try the below
// w_ustmt.executeUpdate();
// conn.rollback(savepoint1);
} else {
handleDups(wiki_url, base_url, name5, name4, name3,
name2, name1, name0, w_ustmt);
w_ustmt.setQueryTimeout(30);
// dup_pstmt.addBatch(dup_sql);
// int count3[] = dup_pstmt.executeBatch();
}
}
// may not need executeUpdate
int counts[] = w_ustmt.executeBatch();
System.out.println("here is " + counts);
conn.commit();
conn.setAutoCommit(true);
// counter = 0;
// }
}
System.out.println("finished queries");
rs.close();
} catch (ClassNotFoundException e) {
e.printStackTrace();
// System.exit(1);
} catch (SQLException ex) {
Logger lgr = Logger.getLogger(Shapefile_Repair.class.getName());
lgr.log(Level.SEVERE, ex.getMessage());
System.out.println("SQLState C: " + ex.getSQLState() + " " + lgr);
System.out.println("VendorError C: " + ex.getErrorCode());
if (ex != null) {
System.out.println(ex.getNextException());
}
} finally {
try {
if (rs != null) {
rs.close();
}
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
} catch (SQLException e) {
}
}
conn.close();
}
}
The errors/SQL returned from printing was long with the first couple print statements indicating the # of allowable rows so I shortened it:
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/null'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/null'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/null'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/null'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/null'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/?uilly (France.Picardie.Aisne.Laon.Craonne)'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/?uilly (France.Champagne-Ardenne.Marne.Épernay.Dormans)'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/01 (Vietnam.Ðông Nam B?.H? Chí Minh city.Qu?n 3)'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/01 (Vietnam.Ðông Nam B?.H? Chí Minh city.Qu?n 10)'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/01 (Vietnam.Ðông Nam B?.H? Chí Minh city.Qu?n 6)'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/01 (Vietnam.Ðông Nam B?.H? Chí Minh city.Phú Nhu?n)'
UPDATE adminboundaries SET WIKI_URL = 'http://127.0.0.1/mediawiki/index.php/01 (Vietnam.Ðông Nam B?.H? Chí Minh city.Qu?n 4)'
Dec 18, 2012 4:31:05 PM Shapefile_Repair main
SEVERE: Batch entry 0 UPDATE adminboundaries SET
WIKI_URL='http://127.0.0.1/mediawiki/index.php/null' was aborted.
Call getNextException to see the cause.
SQLState C: 42703 java.util.logging.Logger@37504d
VendorError C: 0
org.postgresql.util.PSQLException: ERROR: column "wiki_url" of relation
"adminboundaries" does not exist
Position: 28
Upvotes: 1
Views: 315
Reputation: 11487
The exception cleary says wiki_url
does not exist in table adminboundaries
.
org.postgresql.util.PSQLException: ERROR: column "wiki_url" of relation "adminboundaries" does not exist
I think it comes from the update
query are executing
conn.prepareStatement("UPDATE adminboundaries SET WIKI_URL = ?");
Since postgresql is case sensitive Change your query to
conn.prepareStatement("UPDATE AdminBoundaries SET WIKI_URL = ?");
assuming your table name is AdminBoundaries
.
Upvotes: 1