@BeforeClass public static void initMongo() throws Exception { log.info("Init test server"); final int port = Math.max(1025, RandomUtils.JVM_RANDOM.nextInt() % 15000); server = new JMongoServer(port); server.listen(); mongo = new Mongo("127.0.0.1:" + port); morphia = new Morphia(); morphia.map(Person.class); morphia.map(BlogEntry.class); }
/** @throws UnknownHostException Opens database connection */ public void openConnection() throws UnknownHostException { mongoClient = new MongoClient("localhost", 27017); morphia = new Morphia(); ds = morphia.createDatastore((Mongo) mongoClient, "userData"); morphia.map(DbEntry.class); ds.ensureIndexes(); // creates indexes from @Index annotations in your entities ds.ensureCaps(); // creates capped collections from @Entity }
@Before public void setUp() throws UnknownHostException, MongoException { // set up mongo + morphia mongo = new Mongo(); morphia = new Morphia(); morphia.map(Customer.class).map(Order.class); // set up neo neo = new EmbeddedGraphDatabase(NEO_DB_DIR); polymate = new Polymate(mongo, MONGO_DB_NAME, neo); }
private void configureDs_() { List<Class<?>> pending = new ArrayList<Class<?>>(); Map<Class<?>, Integer> retries = new HashMap<Class<?>, Integer>(); List<ApplicationClass> cs = Play.classes.all(); for (ApplicationClass c : cs) { Class<?> clz = c.javaClass; if (clz.isAnnotationPresent(Entity.class)) { try { debug("mapping class: %1$s", clz.getName()); morphia_.map(clz); } catch (ConstraintViolationException e) { error(e, "error mapping class [%1$s]", clz); pending.add(clz); retries.put(clz, 1); } } } while (!pending.isEmpty()) { for (Class<?> clz : pending) { try { debug("mapping class: ", clz.getName()); morphia_.map(clz); pending.remove(clz); } catch (ConstraintViolationException e) { error(e, "error mapping class [%1$s]", clz); int retry = retries.get(clz); if (retry > 2) { throw new RuntimeException("too many errories mapping Morphia Entity classes"); } retries.put(clz, retries.get(clz) + 1); } } } ds().ensureIndexes(); String writeConcern = Play.configuration.getProperty("morphia.defaultWriteConcern", "safe"); if (null != writeConcern) { ds().setDefaultWriteConcern(WriteConcern.valueOf(writeConcern.toUpperCase())); } }
public MongoRolePersonRepository2(final String mongoUri) { super(); // WARNING: mongoUri may contain password! final MongoClientURI realMongoUri = new MongoClientURI(mongoUri); log.info( "Connecting to MongoDB role-person repository {}/{} as {}, rolePerson collection={}", realMongoUri.getHosts(), realMongoUri.getDatabase(), realMongoUri.getUsername(), ROLE_PERSON_COLL_NAME); try { final DB db = MongoUtils.getDb(realMongoUri, ReadPreference.primary()); rolePersonColl = db.getCollection(ROLE_PERSON_COLL_NAME); morphia = new Morphia(); morphia.map(Role2.class); morphia.getMapper().getOptions().objectFactory = new DefaultCreator() { @Override public Object createInstance(Class clazz, DBObject dbObj) { // TODO: Do not hardcode if (clazz == Email.class) { return new EmailImpl(); } return super.createInstance(clazz, dbObj); } }; } catch (Exception e) { throw new MongoRepositoryException( e, "Cannot connect to MongoDB role-person repository {}/{} as {} for collection '{}'", realMongoUri.getHosts(), realMongoUri.getDatabase(), realMongoUri.getUsername(), ROLE_PERSON_COLL_NAME); } }
public static <T> void map(Class<T>[] c) { for (Class<T> cl : c) morphia.map(cl); }
public static <T> void map(Class<T> c) { morphia.map(c); }
private static Morphia getMorphia(Class entityClass) { Morphia morphia = new Morphia(); morphia.map(entityClass); return morphia; }
@PostConstruct public void init() { try { System.out.println("UMIGON - semantic analyzer for large twitter accounts"); Mongo m; Morphia morphia; Mongo mLocal; Morphia morphiaLocal; mLocal = new Mongo(); morphiaLocal = new Morphia(); setTweets = new ArrayList(); if (dev) { ExcelToCsv.load(); } if (!dev) { saveOnDisk = false; analyzeNewlyArrivedTweets = false; analyzeAllFromDisk = false; loadTweetsFromLocal = false; loadFromTrainingFile = false; bigTrainingFile = false; clementTests = false; } // loads the heuristics from the csv files just created Hloader = new HeuristicsLoader(); Hloader.load(); // loads Categories Categories.populate(); if (saveOnDisk || analyzeNewlyArrivedTweets) { m = new Mongo("alex.mongohq.com", 10056); morphia = new Morphia(); ds = morphia.createDatastore( m, APIkeys.getMongoHQAPIkey(), "seinecle", APIkeys.getMongoHQPass().toCharArray()); if (ds != null) { System.out.println("Morphia datastore on CloudBees / MongoHQ created!!!!!!!"); } morphia.map(Tweet.class); listTweets = ds.find(Tweet.class).asList(); setTweets.addAll(listTweets); } if (saveOnDisk || analyzeAllFromDisk || loadTweetsFromLocal) { dsLocal = morphiaLocal.createDatastore(mLocal, "hp"); morphiaLocal.map(Tweet.class); } if (saveOnDisk) { Iterator<Tweet> setTweetsIterator = setTweets.iterator(); while (setTweetsIterator.hasNext()) { Tweet tweet = setTweetsIterator.next(); dsLocal.save(tweet); } ds.delete(ds.createQuery(Tweet.class)); System.out.println("------------------------------------------------"); System.out.println( "saved " + setTweets.size() + " on disk and deleted them fromm the cloud"); } if (analyzeAllFromDisk) { listTweets = dsLocal.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println( "retrieving all tweets from disk (collected since Dec. 02, 2012): " + setTweets.size()); hl1 = new TweetLooper(setTweets); setTweets = hl1.applyLevel1(loadFromTrainingFile); for (Tweet tweet : setTweets) { updateQuery = dsLocal.createQuery(Tweet.class).field("text").equal(tweet.getText()); ops = dsLocal .createUpdateOperations(Tweet.class) .set("setCategories", tweet.getSetCategories()); dsLocal.update(updateQuery, ops, true); } } if (loadTweetsFromLocal) { listTweets = dsLocal.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println( "retrieved all tweets from disk (collected since Dec. 02, 2012): " + setTweets.size()); } if (analyzeNewlyArrivedTweets) { listTweets = ds.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println("retrieving newly arrived tweets from the cloud: " + setTweets.size()); hl1 = new TweetLooper(setTweets); hl1.applyLevel1(loadFromTrainingFile); } if (loadFromTrainingFile) { ExternalSourceTweetLoader comp = new ExternalSourceTweetLoader(); if (bigTrainingFile) { setTweets = comp.sentimentBigSetLoader(maxTweets, termFilter); } else if (clementTests) { setTweets = comp.clementTestTweetsLoader(maxTweets); } else { setTweets = comp.sentiment140Loader(); } System.out.println("------------------------------------------------"); System.out.println("tweets from training file: " + setTweets.size()); hl1 = new TweetLooper(setTweets); hl1.applyLevel1(loadFromTrainingFile); } } catch (LangDetectException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (FileNotFoundException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (InvalidFormatException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (MongoException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } }