/** * Test batched prepared statement concurrency. Batch prepares must not disappear between the * moment when they were created and when they are executed. */ public void testConcurrentBatching() throws Exception { // Create a connection with a batch size of 1. This should cause prepares and actual batch // execution to become // interspersed (if correct synchronization is not in place) and greatly increase the chance of // prepares // being rolled back before getting executed. Properties props = new Properties(); props.setProperty(Messages.get(net.sourceforge.jtds.jdbc.Driver.BATCHSIZE), "1"); props.setProperty( Messages.get(net.sourceforge.jtds.jdbc.Driver.PREPARESQL), String.valueOf(TdsCore.TEMPORARY_STORED_PROCEDURES)); Connection con = getConnection(props); try { Statement stmt = con.createStatement(); stmt.execute( "create table #testConcurrentBatch (v1 int, v2 int, v3 int, v4 int, v5 int, v6 int)"); stmt.close(); Vector exceptions = new Vector(); con.setAutoCommit(false); Thread t1 = new ConcurrentBatchingHelper(con, exceptions); Thread t2 = new ConcurrentBatchingHelper(con, exceptions); t1.start(); t2.start(); t1.join(); t2.join(); assertEquals(0, exceptions.size()); } finally { con.close(); } }
// ----This function gets the raw json from raw_data collection---- // ---------the function separate the tweets and inserts it to all_tweets collection and calling // all other data processing methods----------- // ---------------------------------------------------------------------------------------- public void update_all_tweets(final double num_of_slots, final double max_time_frame_hours) throws MongoException, ParseException { log4j.info("starting update_all_tweets function"); String res = new String(); Integer countelements = 0; while (true) { // DBCursor cursor = this.collrd.find(); while (this.collrd.count() < 1) { // no documents to process try { log4j.info("there is no raw data at the moment, going to sleep for 10 seconds"); Thread.currentThread(); Thread.sleep(1000 * 10); log4j.info("woke up, continues"); // cursor = this.collrd.find(); } catch (InterruptedException e) { log4j.error("InterruptedException caught, at update_all_tweets"); log4j.error(e); } } DBCursor cursor = this.collrd.find(); // get all documents from raw_data collection try { while (cursor.hasNext()) { DBObject currdoc = cursor.next(); log4j.info("getting a document from the raw data db"); Object results = currdoc.get("results"); // result - json array of tweets try { res = results.toString(); } catch (NullPointerException e) { res = ""; } Object obj = JSONValue.parse(res); log4j.info("making an array from the jsons tweets"); JSONArray array = (JSONArray) obj; // make an array of tweets // JSONParser parser = new JSONParser(); try { if (res != "") { // if there are tweets @SuppressWarnings("rawtypes") Iterator iterArray = array.iterator(); log4j.info("iterating over array tweets"); try { while (iterArray.hasNext()) { Object current = iterArray.next(); final DBObject dbObject = (DBObject) JSON.parse(current.toString()); // parse all tweet data to json countelements++; // System.out.println("element number" + countelements.toString()); dbObject.put("max_id", currdoc.get("max_id")); // add max_id to tweet data dbObject.put("query", currdoc.get("query")); // add query word to tweet data dbObject.put( "query_time", currdoc.get("query_time")); // add query time to tweet data dbObject.put("query_time_string", currdoc.get("query_time_string")); dbObject.put( "text", "@" + dbObject.get("from_user").toString() + ": " + dbObject.get("text").toString()); // add user_name to beginning of text dbObject.put("count", 1L); // add appearance counter to tweet data log4j.info("inserting tweet id: " + dbObject.get("id").toString()); try { DBObject object = new BasicDBObject(); object.put( "id", Long.parseLong(dbObject.get("id").toString())); // object to search DBObject newobject = collat.findOne(object); if (newobject != null) { newobject.put( "count", Long.parseLong(newobject.get("count").toString()) + 1); // update counter if id already exists collat.update(object, newobject); } } catch (NullPointerException e) { } collat.insert(dbObject); // collrd.findAndRemove(currdoc); // log4j.info("calling function update_search_terms"); // final String text = "@" + dbObject.get("from_user").toString() + ": " + // dbObject.get("text").toString(); /*Thread t10=new Thread(new Runnable(){ public void run(){ UpdateTweetCounterId(Long.parseLong(dbObject.get("id").toString())); } });*/ Thread t11 = new Thread( new Runnable() { public void run() { update_search_terms( dbObject.get("text").toString(), num_of_slots, max_time_frame_hours, dbObject.get("query").toString()); } }); Thread t12 = new Thread( new Runnable() { public void run() { rate_user( Long.parseLong(dbObject.get("from_user_id").toString()), dbObject.get("from_user").toString(), max_time_frame_hours); // UpdateUserRate((long)num_of_slots,slot_time_millis,Long.parseLong(dbObject.get("from_user_id").toString()) , dbObject.get("from_user").toString() ,(long)0); } }); Thread t13 = new Thread( new Runnable() { public void run() { String quer = dbObject.get("query").toString(); quer = quer.replaceAll("%40", "@"); quer = quer.replaceAll("%23", "#"); long id = (long) (Double.parseDouble(dbObject.get("query_time").toString()) * 1000); String idplus = dbObject.get("id").toString() + "," + id; SearchResultId(quer, idplus); } }); // t10.start(); t11.start(); t12.start(); t13.start(); try { log4j.info("Waiting for threads to finish."); // t10.join(); t11.join(); t12.join(); t13.join(); } catch (InterruptedException e) { log4j.error("Main thread (update_all_tweets) Interrupted"); } } } catch (Exception e) { log4j.error(e); e.printStackTrace(); } } } catch (NullPointerException e) { log4j.error(e); log4j.info("NullPointerException caught, at update_all_tweets"); } log4j.info("removing processed document from raw_data collection"); try { this.collrd.remove(currdoc); } catch (Exception e) { log4j.debug(e); } } } catch (MongoException e) { log4j.error(e); } } }