protected void createDatabase() { this.connect(); log.write("MongoDBConnector - enablesharding for database.."); log.write( this.mongoClient.getDB("admin").command(new BasicDBObject("enablesharding", ("twitter")))); log.write("MongoDBConnector - Creating hashed _id index.."); dbCollection.createIndex(new BasicDBObject("_id", "hashed")); log.write("shardCollection twitter.tweets.."); log.write( this.mongoClient .getDB("admin") .command( new BasicDBObject("shardCollection", "twitter.tweets") .append("key", new BasicDBObject("_id", "hashed")))); log.write("MongoDBConnector - Creating indexes"); // dbCollection.createIndex(new BasicDBObject("id", 1), new BasicDBObject("unique", true)); dbCollection.createIndex(new BasicDBObject("text", "text")); log.write("MongoDBConnector - database created"); }
@Override public void report( SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final Date timestamp = new Date(clock.getTime()); List<DBObject> docs = Lists.newArrayListWithExpectedSize( gauges.size() + counters.size() + histograms.size() + meters.size() + timers.size()); collectGaugeReports(docs, gauges, timestamp); collectCounterReports(docs, counters, timestamp); collectHistogramReports(docs, histograms, timestamp); collectMeterReports(docs, meters, timestamp); collectTimerReports(docs, timers, timestamp); try { final DBCollection collection = mongoConnection.getDatabase().getCollection("graylog2_metrics"); // don't hang on to the data for too long. final BasicDBObject indexField = new BasicDBObject("timestamp", 1); final BasicDBObject indexOptions = new BasicDBObject("expireAfterSeconds", 5 * 60); collection.createIndex(indexField, indexOptions); collection.insert(docs, WriteConcern.UNACKNOWLEDGED); } catch (Exception e) { LOG.warn("Unable to write graylog2 metrics to mongodb. Ignoring this error.", e); } }
public void run() throws UnknownHostException { final List<Integer> models = new ArrayList<Integer>(); final List<String> owners = new ArrayList<String>(); final MongoClient client = new MongoClient(); DB db = client.getDB("mongo_hadoop"); DBCollection devices = db.getCollection("devices"); DBCollection logs = db.getCollection("logs"); if ("true".equals(System.getenv("SENSOR_DROP"))) { LOG.info("Dropping sensor data"); devices.drop(); logs.drop(); devices.createIndex(new BasicDBObject("devices", 1)); } db.getCollection("logs_aggregate").createIndex(new BasicDBObject("devices", 1)); if (logs.count() == 0) { for (int i = 0; i < 10; i++) { owners.add(getRandomString(10)); } for (int i = 0; i < 10; i++) { models.add(getRandomInt(10, 20)); } List<ObjectId> deviceIds = new ArrayList<ObjectId>(); for (int i = 0; i < NUM_DEVICES; i++) { DBObject device = new BasicDBObject("_id", new ObjectId()) .append("name", getRandomString(5) + getRandomInt(3, 5)) .append("type", choose(TYPES)) .append("owner", choose(owners)) .append("model", choose(models)) .append("created_at", randomDate(new Date(2000, 1, 1, 16, 49, 29), new Date())); deviceIds.add((ObjectId) device.get("_id")); devices.insert(device); } for (int i = 0; i < NUM_LOGS; i++) { if (i % 50000 == 0) { LOG.info(format("Creating %d sensor log data entries: %d%n", NUM_LOGS, i)); } BasicDBList location = new BasicDBList(); location.add(getRandomInRange(-180, 180, 3)); location.add(getRandomInRange(-90, 90, 3)); DBObject log = new BasicDBObject("_id", new ObjectId()) .append("d_id", choose(deviceIds)) .append("v", getRandomInt(0, 10000)) .append("timestamp", randomDate(new Date(2013, 1, 1, 16, 49, 29), new Date())) .append("loc", location); logs.insert(log); } } }
/** * Creates an index on the desired field in the target collection. * * @param field * @param direction * @param isUnique * @param isSparse */ public void createIndex( String field, Sort.Direction direction, boolean isUnique, boolean isSparse) { Integer dir = direction.equals(Sort.Direction.ASC) ? 1 : -1; DBObject index = new BasicDBObject(field, dir); DBObject options = new BasicDBObject(); if (isSparse) options.put("sparse", true); if (isUnique) options.put("unique", true); DBCollection collection = mongoOperations.getCollection(mongoOperations.getCollectionName(model)); collection.createIndex(index, options); }
/** 创建索引</br> 日期:2014-3-6 下午04:57:59 */ @Test public void createIndex() { DBObject index = new BasicDBObject(); // combine name&age as hybrid index // index.put("secretStatus", 1); // index.put("secretLevel", 1); // index.put("timelimitType", 1); // index.put("clientId", 1); index.put("fileId", 1); bakFiles.createIndex(index); }
@Override public void connectDb(String keyword) { try { initMongoDB(); items = db.getCollection(keyword); BasicDBObject index = new BasicDBObject("tweet_ID", 1).append("unique", true); // items.ensureIndex(index, new BasicDBObject("unique", true)); items.createIndex(index); } catch (MongoException ex) { System.out.println("MongoException :" + ex.getMessage()); } }
/** * Get the message_counts collection. Lazily checks if correct indices are set. * * @return The messages collection */ public synchronized DBCollection getMessageCountsColl() { if (this.messageCountsCollection != null) { return this.messageCountsCollection; } // Collection has not been cached yet. Do it now. DBCollection coll = getDatabase().getCollection("message_counts"); coll.createIndex(new BasicDBObject("timestamp", 1)); this.messageCountsCollection = coll; return coll; }
public Conection(String host, int port, String dbName) throws UnknownHostException { MongoClient mongo = new MongoClient(host, port); db = mongo.getDB(dbName); // db.command("{enableSharding:\"DA1\"}"); article = db.getCollection("Article"); book = db.getCollection("Book"); incollection = db.getCollection("Incollection"); inproceeding = db.getCollection("Inproceeding"); mastersthesis = db.getCollection("Mastersthesis"); phdthesis = db.getCollection("Phdthesis"); proceedings = db.getCollection("Proceedings"); www = db.getCollection("Www"); article.createIndex(new BasicDBObject("key", 1)); book.createIndex(new BasicDBObject("key", 1)); incollection.createIndex(new BasicDBObject("key", 1)); inproceeding.createIndex(new BasicDBObject("key", 1)); mastersthesis.createIndex(new BasicDBObject("key", 1)); phdthesis.createIndex(new BasicDBObject("key", 1)); proceedings.createIndex(new BasicDBObject("key", 1)); www.createIndex(new BasicDBObject("key", 1)); }
@Test public void createIndex2() { DBObject index = new BasicDBObject(); // // index.put("secretLevel", 1); // bakFiles.createIndex(index); // index.put("secretLevel", 1); // bakFiles.createIndex(index); index.put("timelimitType", 1); bakFiles.createIndex(index); // // index.put("secretStatus", 1); // index.put("secretLevel", 1); // index.put("timelimitType", 1); // index.put("fileId", 1); // bakFiles.createIndex(index); // index.put("clientId", 1); // index.put("fileId", 1); // bakFiles.createIndex(index); }
public static void main(String[] args) throws UnknownHostException { MongoClient mongo = new MongoClient("10.66.218.46", 27017); DB db = mongo.getDB("mydb"); Set<String> collectionNames = db.getCollectionNames(); for (String s : collectionNames) { System.out.println(s); } DBCollection conn = db.getCollection("testCollection"); conn.drop(); BasicDBObject doc = new BasicDBObject() .append("name", "MongoDB") .append("type", "database") .append("count", 1) .append("info", new BasicDBObject("x", 203).append("y", 102)); conn.insert(doc); DBObject myDoc = conn.findOne(); System.out.println(myDoc); for (int i = 0; i < 100; i++) { conn.insert(new BasicDBObject().append("i", i)); } System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + conn.getCount()); // now use a query to get 1 document out DBCursor cursor = conn.find(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // now use a range query to get a larger subset BasicDBObject query = new BasicDBObject("i", 71); cursor = conn.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // range query with multiple constraints query = new BasicDBObject( "i", new BasicDBObject("$gt", 20).append("$lte", 30)); // i.e. 20 < i <= 30 cursor = conn.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // now use a range query to get a larger subset query = new BasicDBObject("i", new BasicDBObject("$gt", 50)); // i.e. find all where i > 50 cursor = conn.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // create an index on the "i" field conn.createIndex(new BasicDBObject("i", 1)); // create index on "i", ascending // list the indexes on the collection List<DBObject> list = conn.getIndexInfo(); for (DBObject o : list) { System.out.println(o); } // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); // see if any previous operation had an error System.out.println("Previous error : " + db.getPreviousError()); // force an error db.forceError(); // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); db.resetError(); mongo.close(); }