@Override protected DBObject retrieveEntry( final PersistentEntity persistentEntity, String family, final Serializable key) { return mongoTemplate.execute( new DbCallback<DBObject>() { public DBObject doInDB(DB con) throws MongoException, DataAccessException { DBCollection dbCollection = con.getCollection(getCollectionName(persistentEntity)); return dbCollection.findOne(createDBObjectWithKey(key)); } }); }
@Override public void updateEntry( final PersistentEntity persistentEntity, final EntityAccess ea, final Object key, final DBObject entry) { mongoTemplate.execute( new DbCallback<Object>() { public Object doInDB(DB con) throws MongoException, DataAccessException { String collectionName = getCollectionName(persistentEntity, entry); DBCollection dbCollection = con.getCollection(collectionName); DBObject dbo = createDBObjectWithKey(key); boolean versioned = isVersioned(ea); if (versioned) { Object currentVersion = getCurrentVersion(ea); incrementVersion(ea); // query for old version to ensure atomicity if (currentVersion != null) { dbo.put("version", currentVersion); } } DBObject newEntry = modifyNullsToUnsets(entry); MongoSession mongoSession = (MongoSession) session; WriteConcern writeConcern = mongoSession.getDeclaredWriteConcern(getPersistentEntity()); WriteResult result; if (writeConcern != null) { result = dbCollection.update(dbo, newEntry, false, false, writeConcern); } else { result = dbCollection.update(dbo, newEntry, false, false); } if (versioned) { // ok, we need to check whether the write worked: // note that this will use the standard write concern unless it wasn't at least // ACKNOWLEDGE: CommandResult error = result.getLastError(WriteConcern.ACKNOWLEDGED); // may as well handle any networking errors: error.throwOnError(); // if the document count "n" of the update was 0, the versioning check must have // failed: if (error.getInt("n") == 0) { throw new OptimisticLockingException(persistentEntity, key); } } return null; } }); }
@Override protected Object storeEntry( final PersistentEntity persistentEntity, final EntityAccess entityAccess, final Object storeId, final DBObject nativeEntry) { return mongoTemplate.execute( new DbCallback<Object>() { public Object doInDB(DB con) throws MongoException, DataAccessException { removeNullEntries(nativeEntry); nativeEntry.put(MONGO_ID_FIELD, storeId); return nativeEntry.get(MONGO_ID_FIELD); } }); }
@Override protected void deleteEntries(String family, final List<Object> keys) { mongoTemplate.execute( new DbCallback<Object>() { public Object doInDB(DB con) throws MongoException, DataAccessException { String collectionName = getCollectionName(getPersistentEntity()); DBCollection dbCollection = con.getCollection(collectionName); MongoSession mongoSession = (MongoSession) getSession(); MongoQuery query = mongoSession.createQuery(getPersistentEntity().getJavaClass()); query.in(getPersistentEntity().getIdentity().getName(), keys); dbCollection.remove(query.getMongoQuery()); return null; } }); }
/** * Imports the sample dataset (zips.json) if necessary (e.g. if it doen't exist yet). The dataset * can originally be found on the mongodb aggregation framework example website: * * @see http://docs.mongodb.org/manual/tutorial/aggregation-examples/. */ private void initSampleDataIfNecessary() { if (!initialized) { CommandResult result = mongoTemplate.executeCommand("{ buildInfo: 1 }"); Object version = result.get("version"); LOGGER.debug("Server uses MongoDB Version: {}", version); mongoTemplate.dropCollection(ZipInfo.class); mongoTemplate.execute( ZipInfo.class, new CollectionCallback<Void>() { @Override public Void doInCollection(DBCollection collection) throws MongoException, DataAccessException { Scanner scanner = null; try { scanner = new Scanner( new BufferedInputStream( new ClassPathResource("zips.json").getInputStream())); while (scanner.hasNextLine()) { String zipInfoRecord = scanner.nextLine(); collection.save((DBObject) JSON.parse(zipInfoRecord)); } } catch (Exception e) { if (scanner != null) { scanner.close(); } throw new RuntimeException("Could not load mongodb sample dataset!", e); } return null; } }); long count = mongoTemplate.count(new Query(), ZipInfo.class); assertThat(count, is(29467L)); initialized = true; } }
@Override protected void deleteEntry(String family, final Object key, final Object entry) { mongoTemplate.execute( new DbCallback<Object>() { public Object doInDB(DB con) throws MongoException, DataAccessException { DBCollection dbCollection = getCollection(con); DBObject dbo = createDBObjectWithKey(key); MongoSession mongoSession = (MongoSession) session; WriteConcern writeConcern = mongoSession.getDeclaredWriteConcern(getPersistentEntity()); if (writeConcern != null) { dbCollection.remove(dbo, writeConcern); } else { dbCollection.remove(dbo); } return null; } protected DBCollection getCollection(DB con) { return con.getCollection(getCollectionName(getPersistentEntity())); } }); }
@Test public void testGeoLocation() { GeoLocation geo = new GeoLocation(new double[] {40.714346, -74.005966}); template.insert(geo); boolean hasIndex = template.execute( "geolocation", new CollectionCallback<Boolean>() { public Boolean doInCollection(DBCollection collection) throws MongoException, DataAccessException { List<DBObject> indexes = collection.getIndexInfo(); for (DBObject dbo : indexes) { if ("location".equals(dbo.get("name"))) { return true; } } return false; } }); assertTrue(hasIndex); }
@Override protected Object generateIdentifier( final PersistentEntity persistentEntity, final DBObject nativeEntry) { return mongoTemplate.execute( new DbCallback<Object>() { public Object doInDB(DB con) throws MongoException, DataAccessException { String collectionName = getCollectionName(persistentEntity, nativeEntry); DBCollection dbCollection = con.getCollection(collectionName + NEXT_ID_SUFFIX); // If there is a numeric identifier then we need to rely on optimistic concurrency // controls to obtain a unique identifer // sequence. If the identifier is not numeric then we assume BSON ObjectIds. if (hasNumericalIdentifier) { int attempts = 0; while (true) { DBObject result = dbCollection.findAndModify( new BasicDBObject(MONGO_ID_FIELD, collectionName), null, null, false, new BasicDBObject("$inc", new BasicDBObject("next_id", 1)), true, true); // result should never be null and we shouldn't come back with an error ,but you // never know. We should just retry if this happens... if (result != null && con.getLastError().ok()) { long nextId = getMappingContext() .getConversionService() .convert(result.get("next_id"), Long.class); nativeEntry.put(MONGO_ID_FIELD, nextId); break; } else { attempts++; if (attempts > 3) { throw new IdentityGenerationException( "Unable to generate identity using findAndModify after 3 attempts: " + con.getLastError().getErrorMessage()); } } } return nativeEntry.get(MONGO_ID_FIELD); } ObjectId objectId = ObjectId.get(); if (ObjectId.class.isAssignableFrom(persistentEntity.getIdentity().getType())) { nativeEntry.put(MONGO_ID_FIELD, objectId); return objectId; } String stringId = objectId.toString(); nativeEntry.put(MONGO_ID_FIELD, stringId); return stringId; } }); }