// FIXME convert to hbird archiver interface and move this to the archiver. private static DBObject buildMongoQuery(Map<String, String> aoData) { // Get datatables values long startTime = Long.parseLong(aoData.get("startTime")); long endTime = Long.parseLong(aoData.get("endTime")); String search = aoData.get("sSearch"); // Build mongo query // @formatter:off DBObject mongoQuery = new BasicDBObject(); mongoQuery.put( "receivedTime", BasicDBObjectBuilder.start("$gte", startTime).add("$lte", endTime).get()); if (search != null && (!search.isEmpty())) { LOG.trace("Adding search query " + search); Pattern match = Pattern.compile(search, Pattern.CASE_INSENSITIVE | Pattern.MULTILINE); // Note, normally you would pass the Pattern object to the Java Mongo driver but when using // distributed routing // over JMS you can only send objectified primitives. This means we have to create the search // string ourselves. DBObject matchString = new BasicDBObject("$regex", match.toString()).append("$options", "im"); mongoQuery.put("name", matchString); } // @formatter:on return mongoQuery; }
@Test public void testAnotherUpsert() { DBCollection collection = newCollection(); BasicDBObjectBuilder queryBuilder = BasicDBObjectBuilder.start() .push("_id") .append("f", "ca") .push("1") .append("l", 2) .pop() .push("t") .append("t", 11) .pop() .pop(); DBObject query = queryBuilder.get(); DBObject update = BasicDBObjectBuilder.start() .push("$inc") .append("n.!", 1) .append("n.a.b:false", 1) .pop() .get(); collection.update(query, update, true, false); DBObject expected = queryBuilder.push("n").append("!", 1).push("a").append("b:false", 1).pop().pop().get(); assertEquals(expected, collection.findOne()); }
/** * Creates a GridFS instance for the specified bucket in the given database. Set the preferred * WriteConcern on the give DB with DB.setWriteConcern * * @see com.mongodb.WriteConcern * @param db database to work with * @param bucket bucket to use in the given database * @throws MongoException */ public GridFS(DB db, String bucket) { _db = db; _bucketName = bucket; _filesCollection = _db.getCollection(_bucketName + ".files"); _chunkCollection = _db.getCollection(_bucketName + ".chunks"); // ensure standard indexes as long as collections are small try { if (_filesCollection.count() < 1000) { _filesCollection.ensureIndex( BasicDBObjectBuilder.start().add("filename", 1).add("uploadDate", 1).get()); } if (_chunkCollection.count() < 1000) { _chunkCollection.ensureIndex( BasicDBObjectBuilder.start().add("files_id", 1).add("n", 1).get(), BasicDBObjectBuilder.start().add("unique", true).get()); } } catch (MongoException e) { LOGGER.info( String.format( "Unable to ensure indices on GridFS collections in database %s", db.getName())); } _filesCollection.setObjectClass(GridFSDBFile.class); }
public void ensureCaps() { for (MappedClass mc : mapr.getMappedClasses()) if (mc.getEntityAnnotation() != null && mc.getEntityAnnotation().cap().value() > 0) { CappedAt cap = mc.getEntityAnnotation().cap(); String collName = mapr.getCollectionName(mc.getClazz()); BasicDBObjectBuilder dbCapOpts = BasicDBObjectBuilder.start("capped", true); if (cap.value() > 0) dbCapOpts.add("size", cap.value()); if (cap.count() > 0) dbCapOpts.add("max", cap.count()); DB db = getDB(); if (db.getCollectionNames().contains(collName)) { DBObject dbResult = db.command(BasicDBObjectBuilder.start("collstats", collName).get()); if (dbResult.containsField("capped")) { // TODO: check the cap options. log.warning("DBCollection already exists is cap'd already; doing nothing. " + dbResult); } else { log.warning( "DBCollection already exists with same name(" + collName + ") and is not cap'd; not creating cap'd version!"); } } else { getDB().createCollection(collName, dbCapOpts.get()); log.debug("Created cap'd DBCollection (" + collName + ") with opts " + dbCapOpts); } } }
protected <T, V> WriteResult delete(DBCollection dbColl, V id, WriteConcern wc) { WriteResult wr; if (wc == null) wr = dbColl.remove(BasicDBObjectBuilder.start().add(Mapper.ID_KEY, id).get()); else wr = dbColl.remove(BasicDBObjectBuilder.start().add(Mapper.ID_KEY, id).get(), wc); throwOnError(wc, wr); return wr; }
/** * {"pid":Xxx},{"$set":{"ack":false}} * * @param context * @param protocol * @return */ private boolean writeAndReturn(JIDContext context, Protocol protocol) { this.persistent.peek( MongoUtils.asMap( BasicDBObjectBuilder.start( Dictionary.FIELD_PID, protocol.cast(Message.class).getReceived().id()) .get()), MongoUtils.asMap( BasicDBObjectBuilder.start( "$set", BasicDBObjectBuilder.start(Dictionary.FIELD_ACK, false).get()) .get())); return false; }
/** @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String createTableName = request.getParameter("createTableName"); String createTableX = request.getParameter("createTableX"); try { boolean createTable = true; for (String name : Util.getMongoDb().getCollectionNames()) { if (name.equals(createTableName)) { createTable = false; } } if (createTable) { System.out.println("creating table"); DBObject options = BasicDBObjectBuilder.start().get(); DBCollection table = Util.getMongoDb().createCollection(createTableName, options); BasicDBObject document = new BasicDBObject(); if (createTableX.isEmpty()) { createTableX = "Date"; } document.put("_id", createTableX); document.put("value", "Value"); table.insert(document); boolean createTableCollections = true; for (String name : Util.getMongoDb().getCollectionNames()) { if (name.equals("TableCollections")) { createTableCollections = false; } } if (createTableCollections) { DBObject options2 = BasicDBObjectBuilder.start().get(); DBCollection tableResourceCollection = Util.getMongoDb().createCollection("TableCollections", options2); BasicDBObject documentResourceCollection = new BasicDBObject(); documentResourceCollection.put("_id", createTableName); documentResourceCollection.put("value", createTableX); tableResourceCollection.insert(documentResourceCollection); } else { DBCollection tableResourceCollection = Util.getMongoDb().getCollection("TableCollections"); BasicDBObject documentResourceCollection = new BasicDBObject(); documentResourceCollection.put("_id", createTableName); documentResourceCollection.put("value", createTableX); tableResourceCollection.insert(documentResourceCollection); } } else { request.setAttribute("message", "Table Already Exists"); } } catch (Exception e) { request.setAttribute("message", e.getMessage()); } request.setAttribute("tableNames", Util.getCollections()); request.getRequestDispatcher("createTable.jsp").forward(request, response); }
public void introduceType(SpaceTypeDescriptor typeDescriptor) { DBCollection m = getConnection().getCollection(METADATA_COLLECTION_NAME); BasicDBObjectBuilder builder = BasicDBObjectBuilder.start().add(Constants.ID_PROPERTY, typeDescriptor.getTypeName()); try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(bos); IOUtils.writeObject( out, SpaceTypeDescriptorVersionedSerializationUtils.toSerializableForm(typeDescriptor)); builder.add(TYPE_DESCRIPTOR_FIELD_NAME, bos.toByteArray()); WriteResult wr = m.save(builder.get()); if (logger.isTraceEnabled()) logger.trace(wr); indexBuilder.ensureIndexes(typeDescriptor); } catch (IOException e) { logger.error(e); throw new SpaceMongoException( "error occurs while serialize and save type descriptor: " + typeDescriptor, e); } }
@Override public DBObject toJSON() { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start(); BasicDBList jsonList = new BasicDBList(); Iterator<VoronoiSite> sitesIt = this.sites.iterator(); while (sitesIt.hasNext()) { jsonList.add(sitesIt.next().toJSON()); } builder.add("sites", jsonList); jsonList = new BasicDBList(); Iterator<VoronoiCorner> cornersIt = this.corners.iterator(); while (cornersIt.hasNext()) { jsonList.add(cornersIt.next().toJSON()); } builder.add("corners", jsonList); jsonList = new BasicDBList(); Iterator<VoronoiEdge> edgesIt = this.edges.iterator(); while (edgesIt.hasNext()) { jsonList.add(edgesIt.next().toJSON()); } builder.add("edges", jsonList); return builder.get(); }
/** Does a deep copy of an object to allow for subsequent modification */ public static DBObject copyDBObject(DBObject dbObject) { DBObject orig = dbObject; BasicDBObjectBuilder dbObjectBuilder = BasicDBObjectBuilder.start(); for (String field : orig.keySet()) { Object value = orig.get(field); dbObjectBuilder.add(field, value); } return dbObjectBuilder.get(); }
public boolean trace(String id) { return MongoUtils.asBoolean( this.config .collection() .findAndModify( BasicDBObjectBuilder.start().add(Dictionary.FIELD_PID, id).get(), this.failed), Dictionary.FIELD_ACTIVATE, false); }
@Override public DBCursor findEvents(DBCollection collection, MongoCriteria criteria) { DBObject filter = criteria == null ? null : criteria.asMongoObject(); DBObject sort = BasicDBObjectBuilder.start() .add(CommitEntry.TIME_STAMP_PROPERTY, ORDER_ASC) .add(CommitEntry.SEQUENCE_NUMBER_PROPERTY, ORDER_ASC) .get(); return collection.find(filter).sort(sort); }
public static DBObject toDBObject(Person p) { BasicDBObjectBuilder builder = BasicDBObjectBuilder.start() .append("name", p.getName()) .append("surname", p.getSurname()) .append("phone", p.getPhone()); if (p.getId() != null) builder = builder.append("_id", new ObjectId(p.getId())); return builder.get(); }
@Test public void testUpsertWithEmbeddedQuery() { DBCollection collection = newCollection(); DBObject update = BasicDBObjectBuilder.start().push("$set").append("a", 1).pop().get(); collection.update(new BasicDBObject("_id", 1).append("e.i", 1), update, true, false); DBObject expected = BasicDBObjectBuilder.start() .append("_id", 1) .push("e") .append("i", 1) .pop() .append("a", 1) .get(); assertEquals(expected, collection.findOne(new BasicDBObject("_id", 1))); }
@Override public DBCursor findLastSnapshot(DBCollection collection, String aggregateIdentifier) { DBObject mongoEntry = BasicDBObjectBuilder.start() .add(CommitEntry.AGGREGATE_IDENTIFIER_PROPERTY, aggregateIdentifier) .get(); return collection .find(mongoEntry) .sort(new BasicDBObject(CommitEntry.SEQUENCE_NUMBER_PROPERTY, ORDER_DESC)) .limit(1); }
public int delete(final String username) { LOG.debug("Deleting user(s) with username \"{}\"", username); final DBObject query = BasicDBObjectBuilder.start(UserImpl.USERNAME, username).get(); final int result = destroy(query, UserImpl.COLLECTION_NAME); if (result > 1) { LOG.warn("Removed {} users matching username \"{}\".", result, username); } return result; }
/** * Update status of feature. * * @param uid feature id * @param enable enabler */ private void updateStatus(String uid, boolean enable) { if (uid == null || uid.isEmpty()) { throw new IllegalArgumentException("Feature identifier cannot be null nor empty"); } if (!exist(uid)) { throw new FeatureNotFoundException(uid); } DBObject target = BUILDER.getFeatUid(uid); Object enabledd = BUILDER.getEnable(enable); collection.update(target, BasicDBObjectBuilder.start(MONGO_SET, enabledd).get()); }
/** @author kim 2014年5月8日 */ public class MongoTracerContext implements TracerContext { private final DBObject failed = BasicDBObjectBuilder.start( "$set", BasicDBObjectBuilder.start(Dictionary.FIELD_ACTIVATE, false).get()) .get(); private final MongoConfig config; public MongoTracerContext(MongoConfig config) { super(); this.config = config; } @Override public boolean trace(Tracer tracer) { return MongoUtils.effect( this.config .collection() .save( BasicDBObjectBuilder.start(tracer.plus()) .add(Dictionary.FIELD_PID, tracer.id()) .add(Dictionary.FIELD_FROM, tracer.initiator()) .add(Dictionary.FIELD_ACTIVATE, true) .add(Dictionary.FIELD_TO, tracer.target()) .add(Dictionary.FIELD_TIMESTAMP, System.currentTimeMillis()) .get(), WriteConcern.SAFE)); } public boolean trace(String id) { return MongoUtils.asBoolean( this.config .collection() .findAndModify( BasicDBObjectBuilder.start().add(Dictionary.FIELD_PID, id).get(), this.failed), Dictionary.FIELD_ACTIVATE, false); } }
public Person_login getPerson(String email) { DBObject query = BasicDBObjectBuilder.start().append("email", email).get(); DBObject data = this.col.findOne(query); if (data == null) { return null; } else { return PersonConverter.toPerson(data); } }
public DBObject asDBObject() { final BasicDBObjectBuilder entryBuilder = BasicDBObjectBuilder.start(); return entryBuilder .add(SERIALIZED_PAYLOAD_PROPERTY, serializedPayload) .add(PAYLOAD_TYPE_PROPERTY, payloadType) .add(PAYLOAD_REVISION_PROPERTY, payloadRevision) .add(EVENT_TIMESTAMP_PROPERTY, timestamp) .add(EVENT_SEQUENCE_NUMBER_PROPERTY, sequenceNumber) .add(META_DATA_PROPERTY, serializedMetaData) .add(EVENT_IDENTIFIER_PROPERTY, eventIdentifier) .get(); }
/** {@inheritDoc} */ @Override public void disableGroup(String groupName) { if (groupName == null || groupName.isEmpty()) { throw new IllegalArgumentException("Groupname cannot be null nor empty"); } if (!existGroup(groupName)) { throw new GroupNotFoundException(groupName); } for (DBObject dbObject : collection.find(BUILDER.getGroupName(groupName))) { Object enabledd = BUILDER.getEnable(false); collection.update(dbObject, BasicDBObjectBuilder.start(MONGO_SET, enabledd).get()); } }
/** * This test is green when {@link MyEntity#a} is annotated with {@code @Property}, as in this case * the field is not serialized at all. However, the bson encoder would fail to encode the object * of type A (as shown by {@link #testFullBSONSerialization()}). */ @Test @Ignore public void testDBObjectSerialization() { final MyEntity entity = new MyEntity(1l, new A(2)); final DBObject dbObject = morphia.toDBObject(entity); assertEquals(BasicDBObjectBuilder.start("_id", 1l).add("a", 2l).get(), dbObject); // fails with a // org.mongodb.morphia.mapping.MappingException: No usable // constructor // for InheritanceTest$A final MyEntity actual = morphia.fromDBObject(MyEntity.class, dbObject); assertEquals(entity, actual); }
@Test public void testUpsertOnIdWithPush() { DBCollection collection = newCollection(); DBObject update1 = BasicDBObjectBuilder.start() .push("$push") .push("c") .append("a", 1) .append("b", 2) .pop() .pop() .get(); DBObject update2 = BasicDBObjectBuilder.start() .push("$push") .push("c") .append("a", 3) .append("b", 4) .pop() .pop() .get(); collection.update(new BasicDBObject("_id", 1), update1, true, false); collection.update(new BasicDBObject("_id", 1), update2, true, false); DBObject expected = new BasicDBObject("_id", 1) .append( "c", Util.list( new BasicDBObject("a", 1).append("b", 2), new BasicDBObject("a", 3).append("b", 4))); assertEquals(expected, collection.findOne(new BasicDBObject("c.a", 3).append("c.b", 4))); }
@Override public boolean trace(Tracer tracer) { return MongoUtils.effect( this.config .collection() .save( BasicDBObjectBuilder.start(tracer.plus()) .add(Dictionary.FIELD_PID, tracer.id()) .add(Dictionary.FIELD_FROM, tracer.initiator()) .add(Dictionary.FIELD_ACTIVATE, true) .add(Dictionary.FIELD_TO, tracer.target()) .add(Dictionary.FIELD_TIMESTAMP, System.currentTimeMillis()) .get(), WriteConcern.SAFE)); }
/** {@inheritDoc} */ @Override public void addToGroup(String uid, String groupName) { if (uid == null || uid.isEmpty()) { throw new IllegalArgumentException("Feature identifier cannot be null nor empty"); } if (groupName == null || groupName.isEmpty()) { throw new IllegalArgumentException("Groupname cannot be null nor empty"); } if (!exist(uid)) { throw new FeatureNotFoundException(uid); } DBObject target = BUILDER.getFeatUid(uid); DBObject nGroupName = BUILDER.getGroupName(groupName); collection.update(target, BasicDBObjectBuilder.start(MONGO_SET, nGroupName).get()); }
@Override public Collection<User> loadAllForRole(Role role) { final String roleId = role.getId(); final DBObject query = BasicDBObjectBuilder.start(UserImpl.ROLES, new ObjectId(roleId)).get(); final List<DBObject> result = query(UserImpl.class, query); if (result == null || result.isEmpty()) { return Collections.emptySet(); } final Set<User> users = Sets.newHashSetWithExpectedSize(result.size()); for (DBObject dbObject : result) { //noinspection unchecked users.add(new UserImpl((ObjectId) dbObject.get("_id"), dbObject.toMap())); } return users; }
public void performBatch(List<BatchUnit> rows) { if (logger.isTraceEnabled()) { logger.trace("MongoClientWrapper.performBatch(" + rows + ")"); logger.trace("Batch size to be performed is " + rows.size()); } // List<Future<? extends Number>> pending = new ArrayList<Future<? extends Number>>(); for (BatchUnit row : rows) { SpaceDocument spaceDoc = row.getSpaceDocument(); SpaceTypeDescriptor typeDescriptor = types.get(row.getTypeName()).getTypeDescriptor(); SpaceDocumentMapper<DBObject> mapper = getMapper(typeDescriptor); DBObject obj = mapper.toDBObject(spaceDoc); DBCollection col = getCollection(row.getTypeName()); switch (row.getDataSyncOperationType()) { case WRITE: case UPDATE: col.save(obj); break; case PARTIAL_UPDATE: DBObject query = BasicDBObjectBuilder.start() .add(Constants.ID_PROPERTY, obj.get(Constants.ID_PROPERTY)) .get(); DBObject update = normalize(obj); col.update(query, update); break; // case REMOVE_BY_UID: // Not supported by this implementation case REMOVE: col.remove(obj); break; default: throw new IllegalStateException( "Unsupported data sync operation type: " + row.getDataSyncOperationType()); } } /*long totalCount = waitFor(pending); if (logger.isTraceEnabled()) { logger.trace("total accepted replies is: " + totalCount); }*/ }
/** * Returns the current CommitEntry as a mongo DBObject. * * @return DBObject representing the CommitEntry */ public DBObject asDBObject() { final BasicDBList events = new BasicDBList(); BasicDBObjectBuilder commitBuilder = BasicDBObjectBuilder.start() .add(AGGREGATE_IDENTIFIER_PROPERTY, aggregateIdentifier) .add(SEQUENCE_NUMBER_PROPERTY, firstSequenceNumber) .add(LAST_SEQUENCE_NUMBER_PROPERTY, lastSequenceNumber) .add(FIRST_SEQUENCE_NUMBER_PROPERTY, firstSequenceNumber) .add(TIME_STAMP_PROPERTY, firstTimestamp) .add(FIRST_TIME_STAMP_PROPERTY, firstTimestamp) .add(LAST_TIME_STAMP_PROPERTY, lastTimestamp) .add(EVENTS_PROPERTY, events); for (EventEntry eventEntry : eventEntries) { events.add(eventEntry.asDBObject()); } return commitBuilder.get(); }
public Usage getMdnUsageDetail(String mdn) { Usage usage = null; MongoClient mongo = null; String database = "usage"; try { Map<String, Object> dbMap = DataUtils.getConnection(); DB db = (DB) dbMap.get("db"); mongo = (MongoClient) dbMap.get("mongo"); /*MongoClientURI uri = new MongoClientURI("mongodb://*****:*****@ds051863.mongolab.com:51863/CloudFoundry_omfu0lp3_t4cigvf3"); mongo = new MongoClient(uri); DB db = mongo.getDB(uri.getDatabase());*/ /*if(!DataUtils.auth){ DataUtils.auth = db.authenticate("yoga", "test123".toCharArray()); System.out.println("db authenticated "+DataUtils.auth); }*/ DBCollection col = db.getCollection("usage"); DBObject query = BasicDBObjectBuilder.start().add("mdn", mdn).get(); DBCursor cursor = col.find(query); ObjectMapper objectMapper = new ObjectMapper(); if (cursor.hasNext()) { // System.out.println(cursor.next()); DBObject obj = cursor.next(); usage = objectMapper.readValue(obj.toString(), Usage.class); } } catch (UnknownHostException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (JsonParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (JsonMappingException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } finally { mongo.close(); } return usage; }
@SuppressWarnings({"rawtypes"}) public void ensureIndex( Class clazz, String name, IndexFieldDef[] defs, boolean unique, boolean dropDupsOnCreate, boolean background) { BasicDBObjectBuilder keys = BasicDBObjectBuilder.start(); for (IndexFieldDef def : defs) { String fieldName = def.getField(); IndexDirection dir = def.getDirection(); keys.add(fieldName, dir.toIndexValue()); } ensureIndex( clazz, name, (BasicDBObject) keys.get(), unique, dropDupsOnCreate, background, false); }