public void ensureCaps() { for (MappedClass mc : mapr.getMappedClasses()) if (mc.getEntityAnnotation() != null && mc.getEntityAnnotation().cap().value() > 0) { CappedAt cap = mc.getEntityAnnotation().cap(); String collName = mapr.getCollectionName(mc.getClazz()); BasicDBObjectBuilder dbCapOpts = BasicDBObjectBuilder.start("capped", true); if (cap.value() > 0) dbCapOpts.add("size", cap.value()); if (cap.count() > 0) dbCapOpts.add("max", cap.count()); DB db = getDB(); if (db.getCollectionNames().contains(collName)) { DBObject dbResult = db.command(BasicDBObjectBuilder.start("collstats", collName).get()); if (dbResult.containsField("capped")) { // TODO: check the cap options. log.warning("DBCollection already exists is cap'd already; doing nothing. " + dbResult); } else { log.warning( "DBCollection already exists with same name(" + collName + ") and is not cap'd; not creating cap'd version!"); } } else { getDB().createCollection(collName, dbCapOpts.get()); log.debug("Created cap'd DBCollection (" + collName + ") with opts " + dbCapOpts); } } }
public <T> T findAndDelete(Query<T> query) { DBCollection dbColl = ((QueryImpl<T>) query).getCollection(); // TODO remove this after testing. if (dbColl == null) dbColl = getCollection(((QueryImpl<T>) query).getEntityClass()); QueryImpl<T> qi = ((QueryImpl<T>) query); EntityCache cache = createCache(); if (log.isTraceEnabled()) log.trace("Executing findAndModify(" + dbColl.getName() + ") with delete ..."); DBObject result = dbColl.findAndModify( qi.getQueryObject(), qi.getFieldsObject(), qi.getSortObject(), true, null, false, false); if (result != null) { T entity = (T) mapr.fromDBObject(qi.getEntityClass(), result, cache); return entity; } return null; }
protected <T> void ensureIndex( Class<T> clazz, String name, BasicDBObject fields, boolean unique, boolean dropDupsOnCreate, boolean background, boolean sparse) { // validate field names and translate them to the stored values BasicDBObject keys = new BasicDBObject(); for (Entry<String, Object> entry : fields.entrySet()) { StringBuffer sb = new StringBuffer(entry.getKey()); Mapper.validate(clazz, mapr, sb, FilterOperator.IN, "", true, false); keys.put(sb.toString(), entry.getValue()); } BasicDBObjectBuilder keyOpts = new BasicDBObjectBuilder(); if (name != null && name.length() > 0) { keyOpts.add("name", name); } if (unique) { keyOpts.add("unique", true); if (dropDupsOnCreate) keyOpts.add("dropDups", true); } if (background) keyOpts.add("background", true); if (sparse) keyOpts.add("sparse", true); DBCollection dbColl = getCollection(clazz); BasicDBObject opts = (BasicDBObject) keyOpts.get(); if (opts.isEmpty()) { log.debug("Ensuring index for " + dbColl.getName() + " with keys:" + keys); dbColl.ensureIndex(keys); } else { log.debug( "Ensuring index for " + dbColl.getName() + " with keys:" + keys + " and opts:" + opts); dbColl.ensureIndex(keys, opts); } // TODO: remove this once using 2.4 driver does this in ensureIndex CommandResult cr = dbColl.getDB().getLastError(); cr.throwOnError(); }
private void addTypedConverter(Class type, TypeConverter tc) { if (tcMap.containsKey(type)) { tcMap.get(type).add(0, tc); log.warning("Added duplicate converter for " + type + " ; " + tcMap.get(type)); } else { ArrayList<TypeConverter> vals = new ArrayList<TypeConverter>(); vals.add(tc); tcMap.put(type, vals); } }
private TypeConverter getEncoder(final Class c) { List<TypeConverter> tcs = tcMap.get(c); if (tcs != null) { if (tcs.size() > 1) log.warning("Duplicate converter for " + c + ", returning first one from " + tcs); return tcs.get(0); } for (TypeConverter tc : untypedTypeEncoders) if (tc.canHandle(c)) return tc; throw new ConverterNotFoundException("Cannot find encoder for " + c.getName()); }
private <T> UpdateResults<T> update( Query<T> query, DBObject u, boolean createIfMissing, boolean multi, WriteConcern wc) { QueryImpl<T> qi = (QueryImpl<T>) query; DBCollection dbColl = qi.getCollection(); // TODO remove this after testing. if (dbColl == null) dbColl = getCollection(qi.getEntityClass()); if (qi.getSortObject() != null && qi.getSortObject().keySet() != null && !qi.getSortObject().keySet().isEmpty()) throw new QueryException("sorting is not allowed for updates."); if (qi.getOffset() > 0) throw new QueryException("a query offset is not allowed for updates."); if (qi.getLimit() > 0) throw new QueryException("a query limit is not allowed for updates."); DBObject q = qi.getQueryObject(); if (q == null) q = new BasicDBObject(); if (log.isTraceEnabled()) log.trace( "Executing update(" + dbColl.getName() + ") for query: " + q + ", ops: " + u + ", multi: " + multi + ", upsert: " + createIfMissing); WriteResult wr; if (wc == null) wr = dbColl.update(q, u, createIfMissing, multi); else wr = dbColl.update(q, u, createIfMissing, multi, wc); throwOnError(wc, wr); return new UpdateResults<T>(wr); }
public <T> T findAndModify( Query<T> query, UpdateOperations<T> ops, boolean oldVersion, boolean createIfMissing) { QueryImpl<T> qi = (QueryImpl<T>) query; DBCollection dbColl = qi.getCollection(); // TODO remove this after testing. if (dbColl == null) dbColl = getCollection(qi.getEntityClass()); if (log.isTraceEnabled()) log.info("Executing findAndModify(" + dbColl.getName() + ") with update "); DBObject res = dbColl.findAndModify( qi.getQueryObject(), qi.getFieldsObject(), qi.getSortObject(), false, ((UpdateOpsImpl<T>) ops).getOps(), !oldVersion, createIfMissing); if (res == null) return null; else return (T) mapr.fromDBObject(qi.getEntityClass(), res, createCache()); }
private TypeConverter getEncoder(Object val, MappedField mf) { List<TypeConverter> tcs = null; if (val != null) tcs = tcMap.get(val.getClass()); if (tcs == null || (tcs.size() > 0 && tcs.get(0) instanceof PassthroughConverter)) tcs = tcMap.get(mf.getType()); if (tcs != null) { if (tcs.size() > 1) log.warning( "Duplicate converter for " + mf.getType() + ", returning first one from " + tcs); return tcs.get(0); } for (TypeConverter tc : untypedTypeEncoders) if (tc.canHandle(mf) || (val != null && tc.isSupported(val.getClass(), mf))) return tc; throw new ConverterNotFoundException( "Cannot find encoder for " + mf.getType() + " as need for " + mf.getFullName()); }
@SuppressWarnings("rawtypes") public <T> MapreduceResults<T> mapReduce( MapreduceType type, Query query, String map, String reduce, String finalize, Map<String, Object> scopeFields, Class<T> outputType) { Assert.parametersNotNull("map", map); Assert.parameterNotEmpty(map, "map"); Assert.parametersNotNull("reduce", reduce); Assert.parameterNotEmpty(reduce, "reduce"); QueryImpl<T> qi = (QueryImpl<T>) query; DBCollection dbColl = qi.getCollection(); // TODO remove this after testing. if (dbColl == null) dbColl = getCollection(qi.getEntityClass()); if (log.isTraceEnabled()) log.info( "Executing mapReduce(" + dbColl.getName() + ") with query(" + qi.toString() + ") map(" + map + ") reduce(" + reduce + ") finalize(" + finalize + ") scope(" + scopeFields + ")"); // TODO replace this with the 2.4 driver impl. String outColl = mapr.getCollectionName(outputType); BasicDBObjectBuilder bldr = BasicDBObjectBuilder.start("mapreduce", mapr.getCollectionName(qi.getEntityClass())); switch (type) { case REDUCE: bldr.push("out").add("reduce", outColl).pop(); break; case MERGE: bldr.push("out").add("merge", outColl).pop(); break; case INLINE: bldr.push("out").add("inline", 1).pop(); break; default: bldr.add("out", outColl); break; } if (qi.getOffset() != 0 || qi.getFieldsObject() != null) throw new QueryException( "mapReduce does not allow the offset/retrievedFields query options."); if (qi.getQueryObject() != null) bldr.add("query", qi.getQueryObject()); if (qi.getLimit() > 0) bldr.add("limit", qi.getLimit()); if (qi.getSortObject() != null) bldr.add("sort", qi.getSortObject()); bldr.add("map", map); bldr.add("reduce", reduce); if (finalize != null && finalize.length() > 0) bldr.add("finalize", finalize); if (scopeFields != null && scopeFields.size() > 0) bldr.add("scope", mapr.toMongoObject(null, null, scopeFields)); DBObject dbObj = bldr.get(); CommandResult cr = dbColl.getDB().command(dbObj); cr.throwOnError(); MapreduceResults mrRes = (MapreduceResults) mapr.fromDBObject(MapreduceResults.class, cr, createCache()); QueryImpl baseQ = null; if (!MapreduceType.INLINE.equals(type)) baseQ = new QueryImpl(outputType, db.getCollection(mrRes.getOutputCollectionName()), this); // TODO Handle inline case and create an iterator/able. mrRes.setBits(type, baseQ); return mrRes; }