public static void crud() throws Exception { Datastore ds = getDatastore(); // create PureMorphiaUser user = new PureMorphiaUser("John", "Smith"); ds.save(user); // read PureMorphiaUser user2 = ds.get(PureMorphiaUser.class, user.id); // update user2.fName = "Tom"; ds.save(user2); // delete ds.delete(user2); }
public List<GeneralMessage> getUnreadMessages(String userId, String tagId, UserTags tags) { Query<UnreaddenMessage> q = ds.createQuery(UnreaddenMessage.class).field("userId").equal(userId.toString()); String order = tags.getOrder(); if (!tagId.equals(UserTags.ALL_TAGS)) { SocioTag tag = tags.getTag(tagId); order = tag.getOrder(); List<String> tagsIds = new ArrayList<String>(); List<SocioTag> leaves = tag.getLeaves(); for (SocioTag leaf : leaves) { tagsIds.add(leaf.getUniqueId()); } q.field("tagId").hasAnyOf(tagsIds); } if (order.equals(SocioTag.ASCENDING_ORDER)) { q.order("-messageDate"); } else { q.order("messageDate"); } q.limit(tags.getRange()); List<GeneralMessage> messagesList = new ArrayList<GeneralMessage>(); Iterable<UnreaddenMessage> messages = q.fetch(); for (UnreaddenMessage unreaddenMessage : messages) { messagesList.add(unreaddenMessage.getMessage()); } tags.setSelectedTag(tagId); ds.save(tags); return messagesList; }
@Override public void setMessagesReadden(String userId, String tagId, UserTags tags) { Query<UnreaddenMessage> q = ds.createQuery(UnreaddenMessage.class).field("userId").equal(userId); if (!tagId.equals(UserTags.ALL_TAGS)) { SocioTag tag = tags.getTag(tagId); List<String> tagsIds = new ArrayList<String>(); List<SocioTag> leaves = tag.getLeaves(); for (SocioTag leaf : leaves) { tagsIds.add(leaf.getUniqueId()); } q.field("tagId").hasAnyOf(tagsIds); } List<UnreaddenMessage> unreadMessages = q.asList(); List<GeneralMessage> messages = new ArrayList<GeneralMessage>(); List<ReaddenMessage> readdenMessages = new ArrayList<ReaddenMessage>(); for (UnreaddenMessage unreaddenMessage : unreadMessages) { GeneralMessage message = unreaddenMessage.getMessage(); messages.add(message); ReaddenMessage readdenMessage = new ReaddenMessage(); readdenMessage.setUserId(userId); readdenMessage.setMessageUniqueId(message.getUniqueFieldValue().toString()); readdenMessages.add(readdenMessage); } ds.save(readdenMessages); ds.delete(q); for (GeneralMessage message : messages) { // TODO Next lines are just to save space in unpaid mongoDB on CloudBees Query<UnreaddenMessage> isMore = ds.createQuery(UnreaddenMessage.class).field("message").equal(message); if (isMore.countAll() <= 0) { ds.delete(message); } } }
public <T extends ISocioObject> void saveObject(T object) throws DuplicateMySocioObjectException { if (object instanceof IUniqueObject) { IUniqueObject uniqueObject = (IUniqueObject) object; Cache cache = cm.getCache("Objects"); String key = uniqueObject.getUniqueFieldName() + uniqueObject.getUniqueFieldValue(); Element element = cache.get(key); if (element != null) { ((SocioObject) object).setId((ObjectId) element.getValue()); return; } @SuppressWarnings("unchecked") Query<T> q = (Query<T>) ds.createQuery(object.getClass()) .field(uniqueObject.getUniqueFieldName()) .equal(uniqueObject.getUniqueFieldValue()); T objectT = (T) q.get(); if (objectT != null) { ((SocioObject) object).setId(objectT.getId()); element = new Element(key, objectT.getId()); cache.put(element); logger.info( "Duplicate object of type: " + object.getClass() + " for query: " + q.toString()); throw new DuplicateMySocioObjectException( "Duplicate object of type: " + object.getClass() + " for query: " + q.toString()); } } ds.save(object); }
public static void init(String name) { Datastore ds = MorphiaPlugin.ds(); Query<Seq> q = ds.find(Seq.class, "_id", name); if (0 == q.countAll()) { Seq newId = new Seq(name); ds.save(newId); } return; }
public void plusOne(String user) throws Exception { try { Query q = ds.createQuery(EssayCount.class).field("user").equal(user); EssayCount ec = (EssayCount) q.get(); if (ec == null) { ec = new EssayCount(); ec.setCount(1); ec.setLastRate(new Date()); ec.setUser(user); ds.save(ec); } else { ec.setCount(ec.getCount() + 1); ec.setLastRate(new Date()); ds.save(ec); } } catch (Exception e) { throw e; } }
public static Seq next(String name) { Datastore ds = MorphiaPlugin.ds(); Query<Seq> q = ds.find(Seq.class, "_id", name); UpdateOperations<Seq> o = ds.createUpdateOperations(Seq.class).inc("value"); Seq newId = ds.findAndModify(q, o); if (null == newId) { newId = new Seq(name); ds.save(newId); } return newId; }
public Boolean update(EssayCount count) throws Exception { try { Query q = ds.createQuery(EssayCount.class).field("_id").equal(count.getId()); EssayCount app = (EssayCount) q.get(); if (app != null) { ds.save(count); return true; } else { return false; } } catch (Exception e) { throw e; } }
public void importData() throws IOException { // D:\_download\GeoLiteCity_20101101\GeoLiteCity-Blocks.csv // D:\_download\GeoLiteCity_20101101\GeoLiteCity-Location.csv Map<Integer, Geolite> locationMap = new HashMap<>(); try (CSVReader reader = new CSVReader( new FileReader("D:\\_download\\GeoLiteCity_20101101\\GeoLiteCity-Location.csv"))) { reader.readNext(); reader.readNext(); String[] nextLine; while ((nextLine = reader.readNext()) != null) { // locId,country,region,city,postalCode,latitude,longitude,metroCode,areaCode int locId = Integer.valueOf(nextLine[0]); Geolite geolite = new Geolite(); geolite.setCountry(nextLine[1]); geolite.setRegion(nextLine[2]); geolite.setCity(nextLine[3]); geolite.setPostalCode(nextLine[4]); geolite.setLatitude(Double.valueOf(nextLine[5])); geolite.setLongitude(Double.valueOf(nextLine[6])); geolite.setMetroCode(nextLine[7]); geolite.setAreaCode(nextLine[8]); locationMap.put(locId, geolite); } } try (CSVReader reader = new CSVReader( new FileReader("D:\\_download\\GeoLiteCity_20101101\\GeoLiteCity-Blocks.csv"))) { reader.readNext(); reader.readNext(); String[] nextLine; while ((nextLine = reader.readNext()) != null) { long startIp = Long.valueOf(nextLine[0]); long endIp = Long.valueOf(nextLine[1]); int locId = Integer.valueOf(nextLine[2]); Geolite geolite = locationMap.get(locId); geolite.setStartIpNum(startIp); geolite.setEndIpNum(endIp); geolite.setId(null); datastore.save(geolite); } } }
@Test public void save() { Map<String, List<String>> properties = new HashMap<String, List<String>>(); properties.put("type", Arrays.asList("video", "show")); DBObject object = new DBObject(); object.setObjectId("testId"); object.setDate(new Date()); object.setObjectProperties(properties); datastore.save(object); assertThat(datastore.find(DBObject.class).countAll()).isEqualTo(1); DBObject dbObject = datastore.find(DBObject.class).field("objectId").equal("testId").get(); assertThat(dbObject.getObjectProperties() == properties); }
@Override public void setMessageReadden(String userId, String messageId) { Query<UnreaddenMessage> q = ds.createQuery(UnreaddenMessage.class) .field("message") .equal(new Key<GeneralMessage>(GeneralMessage.class, new ObjectId(messageId))) .field("userId") .equal(userId); ds.delete(q); ReaddenMessage readdenMessage = new ReaddenMessage(); readdenMessage.setUserId(userId); readdenMessage.setMessageUniqueId( ds.get(GeneralMessage.class, new ObjectId(messageId)).getUniqueFieldValue().toString()); ds.save(readdenMessage); // TODO Next lines are just to save space in unpaid mongoDB on CloudBees Query<UnreaddenMessage> isMore = ds.createQuery(UnreaddenMessage.class) .field("message") .equal(new Key<GeneralMessage>(GeneralMessage.class, new ObjectId(messageId))); if (isMore.countAll() <= 0) { ds.delete(GeneralMessage.class, new ObjectId(messageId)); } }
@Override public <T extends AbstractUserMessagesProcessor> void saveProcessor( T processor, String uniqueFieldName, String uniqueFieldValue) throws DuplicateMySocioObjectException { @SuppressWarnings("unchecked") Query<T> q = (Query<T>) processorsDs .createQuery(processor.getClass()) .field(uniqueFieldName) .equal(uniqueFieldValue); String userId = processor.getUserId(); if (userId != null) { q.field("userId").equal(userId); } AbstractUserMessagesProcessor existingProcessor = q.get(); if (existingProcessor != null) { processor.setId(existingProcessor.getId()); logger.info("Duplicate processor for query: " + q.toString()); throw new DuplicateMySocioObjectException("Duplicate processor for query: " + q.toString()); } processorsDs.save(processor); }
@Override public void saveProcessor(AbstractUserMessagesProcessor processor) { processorsDs.save(processor); }
@Override public <T extends ISocioObject> void saveExistingObject(T object) { ds.save(object); }
@PostConstruct public void init() { try { System.out.println("UMIGON - semantic analyzer for large twitter accounts"); Mongo m; Morphia morphia; Mongo mLocal; Morphia morphiaLocal; mLocal = new Mongo(); morphiaLocal = new Morphia(); setTweets = new ArrayList(); if (dev) { ExcelToCsv.load(); } if (!dev) { saveOnDisk = false; analyzeNewlyArrivedTweets = false; analyzeAllFromDisk = false; loadTweetsFromLocal = false; loadFromTrainingFile = false; bigTrainingFile = false; clementTests = false; } // loads the heuristics from the csv files just created Hloader = new HeuristicsLoader(); Hloader.load(); // loads Categories Categories.populate(); if (saveOnDisk || analyzeNewlyArrivedTweets) { m = new Mongo("alex.mongohq.com", 10056); morphia = new Morphia(); ds = morphia.createDatastore( m, APIkeys.getMongoHQAPIkey(), "seinecle", APIkeys.getMongoHQPass().toCharArray()); if (ds != null) { System.out.println("Morphia datastore on CloudBees / MongoHQ created!!!!!!!"); } morphia.map(Tweet.class); listTweets = ds.find(Tweet.class).asList(); setTweets.addAll(listTweets); } if (saveOnDisk || analyzeAllFromDisk || loadTweetsFromLocal) { dsLocal = morphiaLocal.createDatastore(mLocal, "hp"); morphiaLocal.map(Tweet.class); } if (saveOnDisk) { Iterator<Tweet> setTweetsIterator = setTweets.iterator(); while (setTweetsIterator.hasNext()) { Tweet tweet = setTweetsIterator.next(); dsLocal.save(tweet); } ds.delete(ds.createQuery(Tweet.class)); System.out.println("------------------------------------------------"); System.out.println( "saved " + setTweets.size() + " on disk and deleted them fromm the cloud"); } if (analyzeAllFromDisk) { listTweets = dsLocal.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println( "retrieving all tweets from disk (collected since Dec. 02, 2012): " + setTweets.size()); hl1 = new TweetLooper(setTweets); setTweets = hl1.applyLevel1(loadFromTrainingFile); for (Tweet tweet : setTweets) { updateQuery = dsLocal.createQuery(Tweet.class).field("text").equal(tweet.getText()); ops = dsLocal .createUpdateOperations(Tweet.class) .set("setCategories", tweet.getSetCategories()); dsLocal.update(updateQuery, ops, true); } } if (loadTweetsFromLocal) { listTweets = dsLocal.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println( "retrieved all tweets from disk (collected since Dec. 02, 2012): " + setTweets.size()); } if (analyzeNewlyArrivedTweets) { listTweets = ds.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println("retrieving newly arrived tweets from the cloud: " + setTweets.size()); hl1 = new TweetLooper(setTweets); hl1.applyLevel1(loadFromTrainingFile); } if (loadFromTrainingFile) { ExternalSourceTweetLoader comp = new ExternalSourceTweetLoader(); if (bigTrainingFile) { setTweets = comp.sentimentBigSetLoader(maxTweets, termFilter); } else if (clementTests) { setTweets = comp.clementTestTweetsLoader(maxTweets); } else { setTweets = comp.sentiment140Loader(); } System.out.println("------------------------------------------------"); System.out.println("tweets from training file: " + setTweets.size()); hl1 = new TweetLooper(setTweets); hl1.applyLevel1(loadFromTrainingFile); } } catch (LangDetectException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (FileNotFoundException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (InvalidFormatException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (MongoException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } }
public void save() { datastore.save(this); }