public void testDuplicatesWithDeletion() throws Throwable { createEnvAndDbs(1 << 20, true, NUM_DBS); int numRecs = 10; int nDups = N_DUPLICATES_PER_KEY; try { /* Set up an repository of expected data. */ Map<TestData, Set<TestData>> expectedData = new HashMap<TestData, Set<TestData>>(); /* Insert all the data. */ Transaction txn = env.beginTransaction(null, null); insertData(txn, 0, numRecs - 1, expectedData, nDups, true, NUM_DBS); /* Delete all the even records. */ deleteData(txn, expectedData, false, true, NUM_DBS); txn.commit(); /* Modify all the records. */ // modifyData(expectedData); closeEnv(); recoverAndVerify(expectedData, NUM_DBS); } catch (Throwable t) { t.printStackTrace(); throw t; } }
/** Write data into the database. */ private void generateData( Environment master, int numTxns, Durability durability, boolean doCommit) { /* Write some data. */ DatabaseEntry key = new DatabaseEntry(); byte[] dataPadding = new byte[1000]; DatabaseEntry data = new DatabaseEntry(dataPadding); TransactionConfig txnConfig = new TransactionConfig(); txnConfig.setDurability(durability); for (int i = 0; i < numTxns; i++) { final Transaction txn = master.beginTransaction(null, txnConfig); // long keyPrefix = i << 10; // LongBinding.longToEntry(keyPrefix + i, key); LongBinding.longToEntry(i, key); db.put(txn, key, data); if (doCommit) { txn.commit(); } else { txn.abort(); } } }
public void put(WebURL url) throws DatabaseException { /* * The key that is used for storing URLs determines the order * they are crawled. Lower key values results in earlier crawling. * Here our keys are 6 bytes. The first byte comes from the URL priority. * The second byte comes from depth of crawl at which this URL is first found. * The rest of the 4 bytes come from the docid of the URL. As a result, * URLs with lower priority numbers will be crawled earlier. If priority * numbers are the same, those found at lower depths will be crawled earlier. * If depth is also equal, those found earlier (therefore, smaller docid) will * be crawled earlier. */ byte[] keyData = new byte[6]; keyData[0] = url.getPriority(); keyData[1] = (url.getDepth() > Byte.MAX_VALUE ? Byte.MAX_VALUE : (byte) url.getDepth()); Util.putIntInByteArray(url.getDocid(), keyData, 2); DatabaseEntry value = new DatabaseEntry(); webURLBinding.objectToEntry(url, value); Transaction txn; if (resumable) { txn = env.beginTransaction(null, null); } else { txn = null; } urlsDB.put(txn, new DatabaseEntry(keyData), value); if (resumable) { if (txn != null) { txn.commit(); } } }
@Override public String saveDocument( DsSessionDto aSessionDto, DsDocumentDto aDocumentDto, String aKey, boolean aKeyIsNumeric, String aValue, String aSecurityPolicyName, String aContentFileKey) { if ("bug".equals(aSessionDto.getUsername())) { throw new IllegalStateException("this is a bug"); } Transaction tx = entityStore.getEnvironment().beginTransaction(null, null); Document doc = new Document(); doc.setFileKey(aContentFileKey); doc.setFileName(getFileName(aDocumentDto)); documentPrimaryIndex.put(doc); tx.commit(); return String.valueOf(doc.getId()); }
public V remove(K key, V value) throws Exception { DatabaseEntry keyEntry = new DatabaseEntry(); DatabaseEntry foundEntry = new DatabaseEntry(); keyBinding.objectToEntry(key, keyEntry); V ret = null; Transaction txn = env.beginTransaction(null, null); Cursor cursor = db.openCursor(txn, null); if ((cursor.getSearchKey(keyEntry, foundEntry, LockMode.DEFAULT)) == OperationStatus.SUCCESS) { do { V v = (V) dataBinding.entryToObject(foundEntry); if (value.equals(v)) { ret = v; cursor.delete(); break; } } while ((cursor.getNextDup(keyEntry, foundEntry, LockMode.DEFAULT)) == OperationStatus.SUCCESS); } // Cannot use Cursor#getSearchBoth() because it compare values byte-wise without Object#equal(). cursor.close(); txn.commit(); return ret; }
@Override public void deleteDocument(DsSessionDto aSessionDto, String aDocumentId) { if ("bug".equals(aSessionDto.getUsername())) { throw new IllegalStateException("this is a bug"); } Transaction tx = entityStore.getEnvironment().beginTransaction(null, null); documentPrimaryIndex.delete(Long.valueOf(aDocumentId)); tx.commit(); }
public void run() { for (int i = 0; i < numOperations; i++) { Transaction txn = master.beginTransaction(null, null); int keyVal = random.nextInt(); AppData data = new AppData(keyVal); primaryIndex.put(txn, data); txn.commitSync(); txn = master.beginTransaction(null, null); primaryIndex.delete(txn, keyVal); txn.commitSync(); } }
private void attemptAbort(Transaction transaction) { try { if (transaction != null) transaction.abort(); } catch (Exception e) { logger.error("Abort failed!", e); } }
/** * delete from the database * * @param key * @throws Exception */ public void delete(Object key) throws Exception { Serializer serializer = new Serializer(); byte[] keyBytes = serializer.serialize(key); final DatabaseEntry keyEntry = new DatabaseEntry(keyBytes); final Transaction transaction = myEnv.beginTransaction(null, null); final OperationStatus res = myDatabase.delete(transaction, keyEntry); if (res != OperationStatus.SUCCESS) { throw new Exception("Error retrieving from database"); } transaction.commit(); return; }
private void attemptCommit(Transaction transaction) { try { transaction.commit(); } catch (DatabaseException e) { logger.error("Transaction commit failed!", e); attemptAbort(transaction); throw new PersistenceFailureException(e); } }
public V put(K key, V value) throws Exception { DatabaseEntry keyEntry = new DatabaseEntry(); DatabaseEntry dataEntry = new DatabaseEntry(); keyBinding.objectToEntry(key, keyEntry); dataBinding.objectToEntry(value, dataEntry); V ret = null; Transaction txn = env.beginTransaction(null, null); Cursor cursor = db.openCursor(txn, null); if ((cursor.getSearchKey(keyEntry, dataEntry, LockMode.DEFAULT)) == OperationStatus.SUCCESS) { V v = (V) dataBinding.entryToObject(dataEntry); if (value.equals(v)) { // found ret = v; cursor.delete(); // remove here. and put later. } else { while ((cursor.getNextDup(keyEntry, dataEntry, LockMode.DEFAULT)) == OperationStatus.SUCCESS) { v = (V) dataBinding.entryToObject(dataEntry); if (value.equals(v)) { // found ret = v; cursor.delete(); // remove here. and put later. break; } } } } // put keyBinding.objectToEntry(key, keyEntry); dataBinding.objectToEntry(value, dataEntry); if ((cursor.put(keyEntry, dataEntry)) != OperationStatus.SUCCESS) { String msg = "Could not put: " + key + ", " + value; logger.log(Level.WARNING, msg); throw new DatabaseException(msg); } cursor.close(); txn.commit(); return ret; }
public List<WebURL> get(int max) throws DatabaseException { synchronized (mutex) { int matches = 0; List<WebURL> results = new ArrayList<WebURL>(max); Cursor cursor = null; OperationStatus result; DatabaseEntry key = new DatabaseEntry(); DatabaseEntry value = new DatabaseEntry(); Transaction txn; if (resumable) { txn = env.beginTransaction(null, null); } else { txn = null; } try { cursor = urlsDB.openCursor(txn, null); result = cursor.getFirst(key, value, null); while (matches < max && result == OperationStatus.SUCCESS) { if (value.getData().length > 0) { results.add(webURLBinding.entryToObject(value)); matches++; } result = cursor.getNext(key, value, null); } } catch (DatabaseException e) { if (txn != null) { txn.abort(); txn = null; } throw e; } finally { if (cursor != null) { cursor.close(); } if (txn != null) { txn.commit(); } } return results; } }
private void testMemberRemoveAckInteraction(final boolean delete) { createGroup(groupSize); Transaction txn; Database db; try { MasterTxn.setFactory(new TxnFactory(delete)); ReplicatedEnvironment master = repEnvInfo[0].getEnv(); txn = master.beginTransaction(null, null); /* Write to the environment. */ db = master.openDatabase(txn, "random", dbconfig); db.close(); txn.commit(); } catch (InsufficientAcksException e) { fail("No exception expected."); } finally { MasterTxn.setFactory(null); } }
@Override public synchronized void abort() { try { closeOpenedCursors(); dbTransaction.abort(); getPersistenceManager().unregisterTransaction(this); super.abort(); } catch (DatabaseException e) { throw getPersistenceManager().convertDatabaseException(e); } }
/** * gets object from databse * * @param key * @return * @throws Exception */ public Object get(Object key) throws Exception { Serializer serializer = new Serializer(); byte[] keyBytes = serializer.serialize(key); final DatabaseEntry keyEntry = new DatabaseEntry(keyBytes); Object result; final DatabaseEntry dataEntry = new DatabaseEntry(); final Transaction transaction = myEnv.beginTransaction(null, null); final OperationStatus res = myDatabase.get(transaction, keyEntry, dataEntry, null); if (res != OperationStatus.SUCCESS) { // throw new Exception("Error retrieving from database"); return null; } else { result = serializer.deserialize(dataEntry.getData()); } transaction.commit(); return result; }
/** * inserts into databse * * @param key * @param data * @throws Exception */ public void insert(Object key, Object data) throws Exception { Serializer serializer = new Serializer(); byte[] keyBytes = serializer.serialize(key); byte[] dataBytes = serializer.serialize(data); final DatabaseEntry keyEntry = new DatabaseEntry(keyBytes); final DatabaseEntry dataEntry = new DatabaseEntry(dataBytes); try { final Transaction transaction = myEnv.beginTransaction(null, null); final OperationStatus result = myDatabase.put(transaction, keyEntry, dataEntry); if (result != OperationStatus.SUCCESS) { System.out.println("operation status-failure"); throw new Exception("Error"); } transaction.commit(); } catch (Exception DE) { System.out.println(DE); } }
public void delete(int count) throws DatabaseException { synchronized (mutex) { int matches = 0; Cursor cursor = null; OperationStatus result; DatabaseEntry key = new DatabaseEntry(); DatabaseEntry value = new DatabaseEntry(); Transaction txn; if (resumable) { txn = env.beginTransaction(null, null); } else { txn = null; } try { cursor = urlsDB.openCursor(txn, null); result = cursor.getFirst(key, value, null); while (matches < count && result == OperationStatus.SUCCESS) { cursor.delete(); matches++; result = cursor.getNext(key, value, null); } } catch (DatabaseException e) { if (txn != null) { txn.abort(); txn = null; } throw e; } finally { if (cursor != null) { cursor.close(); } if (txn != null) { txn.commit(); } } } }
/** {@inheritDoc} */ public void abort() { try { if (xid != null) { env.rollback(xid); } else { txn.abort(); } } catch (DatabaseException e) { throw JeEnvironment.convertException(e, false); } catch (XAException e) { throw JeEnvironment.convertException(e, false); } }
/** {@inheritDoc} */ public void commit() { try { if (xid != null) { env.commit(xid, true /* ignored */); } else { txn.commit(); } } catch (DatabaseException e) { throw JeEnvironment.convertException(e, false); } catch (XAException e) { throw JeEnvironment.convertException(e, false); } }
/* * See SR11455 for details. * * This test is checking that the maxTxnId gets recovered properly during * recovery. The SR has to do with the INFileReader not including * DupCountLN_TX and DelDupLN_TX's in its txnIdTrackingMap. When these * were not included, it was possible for a transaction to consist solely * of DupCountLN_TX/DelDupLN_TX pairs. The "deleteData" transaction below * does just this. If no checkpoint occurred following such a transaction, * then the correct current txnid would not be written to the log and * determining this value during recovery would be left up to the * INFileReader. However, without reading the DupCountLN_TX/DelDupLN_TX * records, it would not recover the correct value. * * We take the poor man's way out of creating this situation by just * manually asserting the txn id is correct post-recovery. The txnid of 12 * was determined by looking through logs before and after the fix. */ public void testSR11455() throws Throwable { createEnvAndDbs(1 << 20, true, 1); int numRecs = 1; int nDups = 3; try { /* Set up an repository of expected data. */ Map<TestData, Set<TestData>> expectedData = new HashMap<TestData, Set<TestData>>(); /* Insert all the data. */ Transaction txn = env.beginTransaction(null, null); insertData(txn, 0, numRecs - 1, expectedData, nDups, true, 1); txn.commit(); txn = env.beginTransaction(null, null); /* Delete all the even records. */ deleteData(txn, expectedData, false, false, 1); txn.abort(); closeEnv(); /* Open it again, which will run recovery. */ EnvironmentConfig recoveryConfig = TestUtils.initEnvConfig(); recoveryConfig.setTransactional(true); recoveryConfig.setConfigParam(EnvironmentParams.ENV_RUN_CLEANER.getName(), "false"); recoveryConfig.setConfigParam(EnvironmentParams.ENV_RUN_EVICTOR.getName(), "false"); env = new Environment(envHome, recoveryConfig); txn = env.beginTransaction(null, null); assertEquals(6, txn.getId()); txn.commit(); env.close(); } catch (Throwable t) { t.printStackTrace(); throw t; } }
/** * Creates an instance of this class. * * @param env the Berkeley DB environment * @param timeout the number of milliseconds the transaction should be allowed to run * @throws IllegalArgumentException if timeout is less than {@code 1} * @throws DbDatabaseException if an unexpected database problem occurs */ JeTransaction(XAEnvironment env, long timeout) { this.env = env; if (timeout <= 0) { throw new IllegalArgumentException("Timeout must be greater than 0"); } try { txn = env.beginTransaction(null, null); /* Avoid overflow -- BDB treats 0 as unlimited */ long timeoutMicros = (timeout < (Long.MAX_VALUE / 1000)) ? timeout * 1000 : 0; txn.setTxnTimeout(timeoutMicros); } catch (DatabaseException e) { throw JeEnvironment.convertException(e, false); } }
@Override public synchronized void commit() { try { if (!openedCursors.isEmpty()) { closeOpenedCursors(); throw new BerkeleyDBPersistenceOpenedCursorsException(); } dbTransaction.commit(); getPersistenceManager().unregisterTransaction(this); super.commit(); } catch (DatabaseException e) { throw getPersistenceManager().convertDatabaseException(e); } }
private Set<V> getAndRemove(K key, boolean remove) throws DatabaseException { DatabaseEntry searchKey = new DatabaseEntry(); keyBinding.objectToEntry(key, searchKey); DatabaseEntry foundKey = new DatabaseEntry(); DatabaseEntry foundData = new DatabaseEntry(); Transaction txn = env.beginTransaction(null, null); Cursor cursor = db.openCursor(txn, null); Set<V> s = null; if ((cursor.getSearchKey(searchKey, foundData, LockMode.DEFAULT)) == OperationStatus.SUCCESS) { s = new HashSet<V>(); s.add((V) dataBinding.entryToObject(foundData)); while ((cursor.getNextDup(foundKey, foundData, LockMode.DEFAULT)) == OperationStatus.SUCCESS) { // K k = (K)keyBinding.entryToObject(foundKey); // if (!key.equals(k)) { // break; // } s.add((V) dataBinding.entryToObject(foundData)); } } // remove all entries associated with the given key if (remove) { db.delete(txn, searchKey); // does not check the result of this operation. } cursor.close(); txn.commit(); return s; }
@Override public synchronized boolean trim(Long instance) { if (instance == 0) { return true; } // fast track Transaction t = null; if (db.getConfig().getTransactional()) { t = env.beginTransaction(null, null); } Cursor cursor = db.openCursor(t, null); boolean dirty = false; try { while (cursor.getNext(key, data, LockMode.READ_UNCOMMITTED) == OperationStatus.SUCCESS) { Long i = keyBinding.entryToObject(key); if (i < instance && cursor.delete() != OperationStatus.SUCCESS) { logger.error("Error deleting instance " + i + " from DB!"); dirty = true; } } } finally { cursor.close(); if (!dirty) { if (t != null) { t.commit(); } } else { if (t != null) { t.abort(); } return false; } } putDecision(-1L, new Decision(0, instance, 0, null)); logger.debug("DB deltete up to instance " + instance); return true; }
/** Insert or retrieve data */ public void run() throws DatabaseException { /* Create a new, transactional database environment */ EnvironmentConfig envConfig = new EnvironmentConfig(); envConfig.setTransactional(true); envConfig.setAllowCreate(true); Environment exampleEnv = new Environment(envDir, envConfig); /* Make a database within that environment */ Transaction txn = exampleEnv.beginTransaction(null, null); DatabaseConfig dbConfig = new DatabaseConfig(); dbConfig.setTransactional(true); dbConfig.setAllowCreate(true); dbConfig.setSortedDuplicates(true); Database exampleDb = exampleEnv.openDatabase(txn, "bindingsDb", dbConfig); /* * In our example, the database record is composed of an integer * key and and instance of the MyData class as data. * * A class catalog database is needed for storing class descriptions * for the serial binding used below. This avoids storing class * descriptions redundantly in each record. */ DatabaseConfig catalogConfig = new DatabaseConfig(); catalogConfig.setTransactional(true); catalogConfig.setAllowCreate(true); Database catalogDb = exampleEnv.openDatabase(txn, "catalogDb", catalogConfig); StoredClassCatalog catalog = new StoredClassCatalog(catalogDb); /* * Create a serial binding for MyData data objects. Serial bindings * can be used to store any Serializable object. */ EntryBinding<MyData> dataBinding = new SerialBinding<MyData>(catalog, MyData.class); txn.commit(); /* * Further below we'll use a tuple binding (IntegerBinding * specifically) for integer keys. Tuples, unlike serialized Java * objects, have a well defined sort order. */ /* DatabaseEntry represents the key and data of each record */ DatabaseEntry keyEntry = new DatabaseEntry(); DatabaseEntry dataEntry = new DatabaseEntry(); if (doInsert) { /* put some data in */ for (int i = offset; i < numRecords + offset; i++) { StringBuilder stars = new StringBuilder(); for (int j = 0; j < i; j++) { stars.append('*'); } MyData data = new MyData(i, stars.toString()); IntegerBinding.intToEntry(i, keyEntry); dataBinding.objectToEntry(data, dataEntry); txn = exampleEnv.beginTransaction(null, null); OperationStatus status = exampleDb.put(txn, keyEntry, dataEntry); /* * Note that put will throw a DatabaseException when * error conditions are found such as deadlock. * However, the status return conveys a variety of * information. For example, the put might succeed, * or it might not succeed if the record exists * and duplicates were not */ if (status != OperationStatus.SUCCESS) { throw new RuntimeException("Data insertion got status " + status); } txn.commit(); } } else { /* retrieve the data */ Cursor cursor = exampleDb.openCursor(null, null); while (cursor.getNext(keyEntry, dataEntry, LockMode.DEFAULT) == OperationStatus.SUCCESS) { int key = IntegerBinding.entryToInt(keyEntry); MyData data = dataBinding.entryToObject(dataEntry); System.out.println("key=" + key + " data=" + data); } cursor.close(); } catalogDb.close(); exampleDb.close(); exampleEnv.close(); }
/** Insert or retrieve data. */ public void run() throws DatabaseException { /* Create a new, transactional database environment. */ EnvironmentConfig envConfig = new EnvironmentConfig(); envConfig.setTransactional(true); envConfig.setAllowCreate(true); Environment exampleEnv = new Environment(envDir, envConfig); /* * Make a database within that environment. Because this will be used * as a primary database, it must not allow duplicates. The primary key * of a primary database must be unique. */ Transaction txn = exampleEnv.beginTransaction(null, null); DatabaseConfig dbConfig = new DatabaseConfig(); dbConfig.setTransactional(true); dbConfig.setAllowCreate(true); Database exampleDb = exampleEnv.openDatabase(txn, "bindingsDb", dbConfig); /* * In our example, the database record is composed of an integer key * and and instance of the MyData class as data. * * A class catalog database is needed for storing class descriptions * for the serial binding used below. This avoids storing class * descriptions redundantly in each record. */ DatabaseConfig catalogConfig = new DatabaseConfig(); catalogConfig.setTransactional(true); catalogConfig.setAllowCreate(true); Database catalogDb = exampleEnv.openDatabase(txn, "catalogDb", catalogConfig); StoredClassCatalog catalog = new StoredClassCatalog(catalogDb); /* * Create a serial binding for MyData data objects. Serial * bindings can be used to store any Serializable object. */ EntryBinding<MyData> dataBinding = new SerialBinding<MyData>(catalog, MyData.class); /* * Further below we'll use a tuple binding (IntegerBinding * specifically) for integer keys. Tuples, unlike serialized * Java objects, have a well defined sort order. */ /* * Define a String tuple binding for a secondary key. The * secondary key is the msg field of the MyData object. */ EntryBinding<String> secKeyBinding = TupleBinding.getPrimitiveBinding(String.class); /* * Open a secondary database to allow accessing the primary * database by the secondary key value. */ SecondaryConfig secConfig = new SecondaryConfig(); secConfig.setTransactional(true); secConfig.setAllowCreate(true); secConfig.setSortedDuplicates(true); secConfig.setKeyCreator(new MyKeyCreator(secKeyBinding, dataBinding)); SecondaryDatabase exampleSecDb = exampleEnv.openSecondaryDatabase(txn, "bindingsSecDb", exampleDb, secConfig); txn.commit(); /* DatabaseEntry represents the key and data of each record. */ DatabaseEntry keyEntry = new DatabaseEntry(); DatabaseEntry dataEntry = new DatabaseEntry(); if (doInsert) { /* * Put some data in. Note that the primary database is always used * to add data. Adding or changing data in the secondary database * is not allowed; however, deleting through the secondary database * is allowed. */ for (int i = offset; i < numRecords + offset; i++) { txn = exampleEnv.beginTransaction(null, null); StringBuffer stars = new StringBuffer(); for (int j = 0; j < i; j++) { stars.append('*'); } MyData data = new MyData(i, stars.toString()); IntegerBinding.intToEntry(i, keyEntry); dataBinding.objectToEntry(data, dataEntry); OperationStatus status = exampleDb.put(txn, keyEntry, dataEntry); /* * Note that put will throw a DatabaseException when error * conditions are found such as deadlock. However, the status * return conveys a variety of information. For example, the * put might succeed, or it might not succeed if the record * exists and duplicates were not */ if (status != OperationStatus.SUCCESS) { throw new RuntimeException("Data insertion got status " + status); } txn.commit(); } } else { /* * Retrieve the data by secondary key by opening a cursor on the * secondary database. The key parameter for a secondary cursor is * always the secondary key, but the data parameter is always the * data of the primary database. You can cast the cursor to a * SecondaryCursor and use additional method signatures for * retrieving the primary key also. Or you can call * openSecondaryCursor() to avoid casting. */ txn = exampleEnv.beginTransaction(null, null); Cursor cursor = exampleSecDb.openCursor(txn, null); while (cursor.getNext(keyEntry, dataEntry, LockMode.DEFAULT) == OperationStatus.SUCCESS) { String key = secKeyBinding.entryToObject(keyEntry); MyData data = dataBinding.entryToObject(dataEntry); System.out.println("key=" + key + " data=" + data); } cursor.close(); txn.commit(); } /* * Always close secondary databases before closing their associated * primary database. */ catalogDb.close(); exampleSecDb.close(); exampleDb.close(); exampleEnv.close(); }
public long getDbTransactionId() { return dbTransaction.getId(); }
@Override public String toString() { return dbTransaction.toString(); }
/** removes database */ public void removeDatabase() { final Transaction txn = myEnv.beginTransaction(null, null); // close(); myEnv.removeDatabase(txn, dbName); txn.commit(); }
/** * Synchronize publications with pubmed using pmid * * @throws Exception if an error occurs */ public void execute() throws Exception { // Needed so that STAX can find it's implementation classes ClassLoader cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); Database db = null; Transaction txn = null; try { if (osAlias == null) { throw new BuildException("osAlias attribute is not set"); } if (outputFile == null) { throw new BuildException("outputFile attribute is not set"); } // environment is transactional EnvironmentConfig envConfig = new EnvironmentConfig(); envConfig.setTransactional(true); envConfig.setAllowCreate(true); Environment env = new Environment(new File(cacheDirName), envConfig); DatabaseConfig dbConfig = new DatabaseConfig(); dbConfig.setTransactional(true); dbConfig.setAllowCreate(true); dbConfig.setSortedDuplicates(true); db = env.openDatabase(null, "publications_db", dbConfig); txn = env.beginTransaction(null, null); LOG.info("Starting EntrezPublicationsRetriever"); Writer writer = new FileWriter(outputFile); ObjectStore os = ObjectStoreFactory.getObjectStore(osAlias); Set<Integer> idsToFetch = new HashSet<Integer>(); itemFactory = new ItemFactory(os.getModel(), "-1_"); writer.write(FullRenderer.getHeader() + ENDL); for (Iterator<Publication> iter = getPublications(os).iterator(); iter.hasNext(); ) { String pubMedId = iter.next().getPubMedId(); Integer pubMedIdInteger; try { pubMedIdInteger = Integer.valueOf(pubMedId); } catch (NumberFormatException e) { // not a pubmed id continue; } if (seenPubMeds.contains(pubMedIdInteger)) { continue; } DatabaseEntry key = new DatabaseEntry(pubMedId.getBytes()); DatabaseEntry data = new DatabaseEntry(); if (db.get(txn, key, data, null).equals(OperationStatus.SUCCESS)) { try { ByteArrayInputStream mapInputStream = new ByteArrayInputStream(data.getData()); ObjectInputStream deserializer = new ObjectInputStream(mapInputStream); Map<String, Object> pubMap = (Map) deserializer.readObject(); writeItems(writer, mapToItems(itemFactory, pubMap)); seenPubMeds.add(pubMedIdInteger); } catch (EOFException e) { // ignore and fetch it again System.err.println( "found in cache, but igored due to cache problem: " + pubMedIdInteger); } } else { idsToFetch.add(pubMedIdInteger); } } Iterator<Integer> idIter = idsToFetch.iterator(); Set<Integer> thisBatch = new HashSet<Integer>(); while (idIter.hasNext()) { Integer pubMedIdInteger = idIter.next(); thisBatch.add(pubMedIdInteger); if (thisBatch.size() == BATCH_SIZE || !idIter.hasNext() && thisBatch.size() > 0) { try { // the server may return less publications than we ask for, so keep a Map Map<String, Map<String, Object>> fromServerMap = null; for (int i = 0; i < MAX_TRIES; i++) { BufferedReader br = new BufferedReader(getReader(thisBatch)); StringBuffer buf = new StringBuffer(); String line; while ((line = br.readLine()) != null) { buf.append(line + "\n"); } fromServerMap = new HashMap<String, Map<String, Object>>(); Throwable throwable = null; try { if (loadFullRecord) { SAXParser.parse( new InputSource(new StringReader(buf.toString())), new FullRecordHandler(fromServerMap), false); } else { SAXParser.parse( new InputSource(new StringReader(buf.toString())), new SummaryRecordHandler(fromServerMap), false); } } catch (Throwable e) { LOG.error("Couldn't parse PubMed XML", e); // try again or re-throw the Throwable throwable = e; } if (i == MAX_TRIES) { throw new RuntimeException( "failed to parse: " + buf.toString() + " - tried " + MAX_TRIES + " times", throwable); } else { if (throwable != null) { // try again continue; } } for (String id : fromServerMap.keySet()) { writeItems(writer, mapToItems(itemFactory, fromServerMap.get(id))); } addToDb(txn, db, fromServerMap); break; } thisBatch.clear(); } finally { txn.commit(); // start a new transaction incase there is an exception while parsing txn = env.beginTransaction(null, null); } } } writeItems(writer, authorMap.values()); writeItems(writer, meshTerms.values()); writer.write(FullRenderer.getFooter() + ENDL); writer.flush(); writer.close(); } catch (Throwable e) { throw new RuntimeException("failed to get all publications", e); } finally { txn.commit(); db.close(); Thread.currentThread().setContextClassLoader(cl); } }