/** @throws UnknownHostException Opens database connection */ public void openConnection() throws UnknownHostException { mongoClient = new MongoClient("localhost", 27017); morphia = new Morphia(); ds = morphia.createDatastore((Mongo) mongoClient, "userData"); morphia.map(DbEntry.class); ds.ensureIndexes(); // creates indexes from @Index annotations in your entities ds.ensureCaps(); // creates capped collections from @Entity }
@BeforeClass public static void initMongo() throws Exception { log.info("Init test server"); final int port = Math.max(1025, RandomUtils.JVM_RANDOM.nextInt() % 15000); server = new JMongoServer(port); server.listen(); mongo = new Mongo("127.0.0.1:" + port); morphia = new Morphia(); morphia.map(Person.class); morphia.map(BlogEntry.class); }
@Provides Morphia createMorphia(final Application application) { Morphia morphia = new Morphia(); morphia.getMapper().getOptions().objectFactory = new DefaultCreator() { @Override protected ClassLoader getClassLoaderForClass(String clazz, DBObject object) { return application.classloader(); } }; morphia.mapPackage("models"); return morphia; }
<T> List<T> toList(Class<T> resultClass, DBCursor cursor) { List<T> results = new LinkedList<>(); while (cursor.hasNext()) { DBObject result = cursor.next(); results.add(morphia.fromDBObject(resultClass, result)); } return results; }
@Test public void testScanning() throws Exception { final Morphia m = new Morphia(); assertFalse(m.isMapped(E.class)); new EntityScanner(m, Predicates.equalTo(E.class.getName() + ".class")); assertTrue(m.isMapped(E.class)); assertFalse(m.isMapped(F.class)); new EntityScanner( m, new Predicate<String>() { public boolean apply(final String input) { return input.startsWith(EntityScannerTest.class.getPackage().getName()); } }); assertTrue(m.isMapped(F.class)); }
public static synchronized Morphia getMorphia() throws Exception { if (morphia == null) { mongo = getMongo(); morphia = new Morphia(); morphia.mapPackage("org.zkoss.mongodb.model"); } return morphia; }
@Before public void setUp() throws UnknownHostException, MongoException { // set up mongo + morphia mongo = new Mongo(); morphia = new Morphia(); morphia.map(Customer.class).map(Order.class); // set up neo neo = new EmbeddedGraphDatabase(NEO_DB_DIR); polymate = new Polymate(mongo, MONGO_DB_NAME, neo); }
private void configureDs_() { List<Class<?>> pending = new ArrayList<Class<?>>(); Map<Class<?>, Integer> retries = new HashMap<Class<?>, Integer>(); List<ApplicationClass> cs = Play.classes.all(); for (ApplicationClass c : cs) { Class<?> clz = c.javaClass; if (clz.isAnnotationPresent(Entity.class)) { try { debug("mapping class: %1$s", clz.getName()); morphia_.map(clz); } catch (ConstraintViolationException e) { error(e, "error mapping class [%1$s]", clz); pending.add(clz); retries.put(clz, 1); } } } while (!pending.isEmpty()) { for (Class<?> clz : pending) { try { debug("mapping class: ", clz.getName()); morphia_.map(clz); pending.remove(clz); } catch (ConstraintViolationException e) { error(e, "error mapping class [%1$s]", clz); int retry = retries.get(clz); if (retry > 2) { throw new RuntimeException("too many errories mapping Morphia Entity classes"); } retries.put(clz, retries.get(clz) + 1); } } } ds().ensureIndexes(); String writeConcern = Play.configuration.getProperty("morphia.defaultWriteConcern", "safe"); if (null != writeConcern) { ds().setDefaultWriteConcern(WriteConcern.valueOf(writeConcern.toUpperCase())); } }
public static Datastore ds(String dbName) { if (StringUtil.isEmpty(dbName)) return ds(); Datastore ds = dataStores_.get(dbName); if (null == ds) { Datastore ds0 = morphia_.createDatastore(mongo_, dbName); ds = dataStores_.putIfAbsent(dbName, ds0); if (null == ds) { ds = ds0; } } return ds; }
public MongoRolePersonRepository2(final String mongoUri) { super(); // WARNING: mongoUri may contain password! final MongoClientURI realMongoUri = new MongoClientURI(mongoUri); log.info( "Connecting to MongoDB role-person repository {}/{} as {}, rolePerson collection={}", realMongoUri.getHosts(), realMongoUri.getDatabase(), realMongoUri.getUsername(), ROLE_PERSON_COLL_NAME); try { final DB db = MongoUtils.getDb(realMongoUri, ReadPreference.primary()); rolePersonColl = db.getCollection(ROLE_PERSON_COLL_NAME); morphia = new Morphia(); morphia.map(Role2.class); morphia.getMapper().getOptions().objectFactory = new DefaultCreator() { @Override public Object createInstance(Class clazz, DBObject dbObj) { // TODO: Do not hardcode if (clazz == Email.class) { return new EmailImpl(); } return super.createInstance(clazz, dbObj); } }; } catch (Exception e) { throw new MongoRepositoryException( e, "Cannot connect to MongoDB role-person repository {}/{} as {} for collection '{}'", realMongoUri.getHosts(), realMongoUri.getDatabase(), realMongoUri.getUsername(), ROLE_PERSON_COLL_NAME); } }
@Provides Datastore createDatastore(Mongo mongo, Morphia morphia, final Application application) { Datastore datastore = morphia.createDatastore( mongo, application.configuration().getString("mongodb.db"), application.configuration().getString("mongodb.username"), application.configuration().getString("mongodb.password").toCharArray()); datastore.ensureIndexes(); Logger.info( "Connected to MongoDB [" + mongo.debugString() + "] database [" + datastore.getDB().getName() + "]"); return datastore; }
public static <T> void map(Class<T>[] c) { for (Class<T> cl : c) morphia.map(cl); }
@SuppressWarnings("unchecked") private void initMorphia_() { Properties c = Play.configuration; String dbName = c.getProperty(PREFIX + "name"); if (null == dbName) { warn("mongodb name not configured! using [test] db"); dbName = "test"; } DB db = mongo_.getDB(dbName); if (c.containsKey(PREFIX + "username") && c.containsKey(PREFIX + "password")) { String username = c.getProperty(PREFIX + "username"); String password = c.getProperty(PREFIX + "password"); if (!db.isAuthenticated() && !db.authenticate(username, password.toCharArray())) { throw new RuntimeException("MongoDB authentication failed: " + dbName); } } String loggerClass = c.getProperty("morphia.logger"); Class<? extends LogrFactory> loggerClazz = SilentLogrFactory.class; if (null != loggerClass) { final Pattern P_PLAY = Pattern.compile("(play|enable|true|yes|on)", Pattern.CASE_INSENSITIVE); final Pattern P_SILENT = Pattern.compile("(silent|disable|false|no|off)", Pattern.CASE_INSENSITIVE); if (P_PLAY.matcher(loggerClass).matches()) { loggerClazz = PlayLogrFactory.class; } else if (!P_SILENT.matcher(loggerClass).matches()) { try { loggerClazz = (Class<? extends LogrFactory>) Class.forName(loggerClass); } catch (Exception e) { warn( "Cannot init morphia logger factory using %s. Use PlayLogrFactory instead", loggerClass); } } } loggerRegistered_ = false; MorphiaLoggerFactory.reset(); MorphiaLoggerFactory.registerLogger(loggerClazz); morphia_ = new Morphia(); loggerRegistered_ = true; ds_ = morphia_.createDatastore(mongo_, dbName); dataStores_.put(dbName, ds_); String uploadCollection = c.getProperty("morphia.collection.upload", "uploads"); if (getBooleanProperty("gridfs.enabled")) { gridfs = new GridFS(MorphiaPlugin.ds().getDB(), uploadCollection); } morphia_ .getMapper() .addInterceptor( new AbstractEntityInterceptor() { @Override public void preLoad(Object ent, DBObject dbObj, Mapper mapr) { if (ent instanceof Model) { PlayPlugin.postEvent(MorphiaEvent.ON_LOAD.getId(), ent); ((Model) ent)._h_OnLoad(); } } @Override public void postLoad(Object ent, DBObject dbObj, Mapper mapr) { if (ent instanceof Model) { Model m = (Model) ent; PlayPlugin.postEvent(MorphiaEvent.LOADED.getId(), ent); m._h_Loaded(); } } }); }
private static Datastore getDatastore() throws Exception { Morphia morphia = new Morphia(); PureMorphiaUser.ensureMapped(morphia); Mongo mongo = new Mongo(); return morphia.createDatastore(mongo, "mydatabase"); }
@PostConstruct public void init() { try { System.out.println("UMIGON - semantic analyzer for large twitter accounts"); Mongo m; Morphia morphia; Mongo mLocal; Morphia morphiaLocal; mLocal = new Mongo(); morphiaLocal = new Morphia(); setTweets = new ArrayList(); if (dev) { ExcelToCsv.load(); } if (!dev) { saveOnDisk = false; analyzeNewlyArrivedTweets = false; analyzeAllFromDisk = false; loadTweetsFromLocal = false; loadFromTrainingFile = false; bigTrainingFile = false; clementTests = false; } // loads the heuristics from the csv files just created Hloader = new HeuristicsLoader(); Hloader.load(); // loads Categories Categories.populate(); if (saveOnDisk || analyzeNewlyArrivedTweets) { m = new Mongo("alex.mongohq.com", 10056); morphia = new Morphia(); ds = morphia.createDatastore( m, APIkeys.getMongoHQAPIkey(), "seinecle", APIkeys.getMongoHQPass().toCharArray()); if (ds != null) { System.out.println("Morphia datastore on CloudBees / MongoHQ created!!!!!!!"); } morphia.map(Tweet.class); listTweets = ds.find(Tweet.class).asList(); setTweets.addAll(listTweets); } if (saveOnDisk || analyzeAllFromDisk || loadTweetsFromLocal) { dsLocal = morphiaLocal.createDatastore(mLocal, "hp"); morphiaLocal.map(Tweet.class); } if (saveOnDisk) { Iterator<Tweet> setTweetsIterator = setTweets.iterator(); while (setTweetsIterator.hasNext()) { Tweet tweet = setTweetsIterator.next(); dsLocal.save(tweet); } ds.delete(ds.createQuery(Tweet.class)); System.out.println("------------------------------------------------"); System.out.println( "saved " + setTweets.size() + " on disk and deleted them fromm the cloud"); } if (analyzeAllFromDisk) { listTweets = dsLocal.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println( "retrieving all tweets from disk (collected since Dec. 02, 2012): " + setTweets.size()); hl1 = new TweetLooper(setTweets); setTweets = hl1.applyLevel1(loadFromTrainingFile); for (Tweet tweet : setTweets) { updateQuery = dsLocal.createQuery(Tweet.class).field("text").equal(tweet.getText()); ops = dsLocal .createUpdateOperations(Tweet.class) .set("setCategories", tweet.getSetCategories()); dsLocal.update(updateQuery, ops, true); } } if (loadTweetsFromLocal) { listTweets = dsLocal.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println( "retrieved all tweets from disk (collected since Dec. 02, 2012): " + setTweets.size()); } if (analyzeNewlyArrivedTweets) { listTweets = ds.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println("retrieving newly arrived tweets from the cloud: " + setTweets.size()); hl1 = new TweetLooper(setTweets); hl1.applyLevel1(loadFromTrainingFile); } if (loadFromTrainingFile) { ExternalSourceTweetLoader comp = new ExternalSourceTweetLoader(); if (bigTrainingFile) { setTweets = comp.sentimentBigSetLoader(maxTweets, termFilter); } else if (clementTests) { setTweets = comp.clementTestTweetsLoader(maxTweets); } else { setTweets = comp.sentiment140Loader(); } System.out.println("------------------------------------------------"); System.out.println("tweets from training file: " + setTweets.size()); hl1 = new TweetLooper(setTweets); hl1.applyLevel1(loadFromTrainingFile); } } catch (LangDetectException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (FileNotFoundException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (InvalidFormatException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (MongoException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } }
@Before public void createDs() { ds = morphia.createDatastore(mongo, "test"); }
private static Morphia getMorphia(Class entityClass) { Morphia morphia = new Morphia(); morphia.map(entityClass); return morphia; }
public static <T> void map(Class<T> c) { morphia.map(c); }
private MM(String dbName) { morphia = new Morphia(); ds = morphia.createDatastore(DBManager.getInstance().getMongo(), dbName); }