@Override public void setMessagesReadden(String userId, String tagId, UserTags tags) { Query<UnreaddenMessage> q = ds.createQuery(UnreaddenMessage.class).field("userId").equal(userId); if (!tagId.equals(UserTags.ALL_TAGS)) { SocioTag tag = tags.getTag(tagId); List<String> tagsIds = new ArrayList<String>(); List<SocioTag> leaves = tag.getLeaves(); for (SocioTag leaf : leaves) { tagsIds.add(leaf.getUniqueId()); } q.field("tagId").hasAnyOf(tagsIds); } List<UnreaddenMessage> unreadMessages = q.asList(); List<GeneralMessage> messages = new ArrayList<GeneralMessage>(); List<ReaddenMessage> readdenMessages = new ArrayList<ReaddenMessage>(); for (UnreaddenMessage unreaddenMessage : unreadMessages) { GeneralMessage message = unreaddenMessage.getMessage(); messages.add(message); ReaddenMessage readdenMessage = new ReaddenMessage(); readdenMessage.setUserId(userId); readdenMessage.setMessageUniqueId(message.getUniqueFieldValue().toString()); readdenMessages.add(readdenMessage); } ds.save(readdenMessages); ds.delete(q); for (GeneralMessage message : messages) { // TODO Next lines are just to save space in unpaid mongoDB on CloudBees Query<UnreaddenMessage> isMore = ds.createQuery(UnreaddenMessage.class).field("message").equal(message); if (isMore.countAll() <= 0) { ds.delete(message); } } }
@Override public <T> void deleteUserProcessorByField( Class<T> clazz, String fieldName, String fieldValue, String userId) { Query<T> q = processorsDs .createQuery(clazz) .field(fieldName) .equal(fieldValue) .field("userId") .equal(userId); processorsDs.delete(q); }
public static void crud() throws Exception { Datastore ds = getDatastore(); // create PureMorphiaUser user = new PureMorphiaUser("John", "Smith"); ds.save(user); // read PureMorphiaUser user2 = ds.get(PureMorphiaUser.class, user.id); // update user2.fName = "Tom"; ds.save(user2); // delete ds.delete(user2); }
@Override public void setMessageReadden(String userId, String messageId) { Query<UnreaddenMessage> q = ds.createQuery(UnreaddenMessage.class) .field("message") .equal(new Key<GeneralMessage>(GeneralMessage.class, new ObjectId(messageId))) .field("userId") .equal(userId); ds.delete(q); ReaddenMessage readdenMessage = new ReaddenMessage(); readdenMessage.setUserId(userId); readdenMessage.setMessageUniqueId( ds.get(GeneralMessage.class, new ObjectId(messageId)).getUniqueFieldValue().toString()); ds.save(readdenMessage); // TODO Next lines are just to save space in unpaid mongoDB on CloudBees Query<UnreaddenMessage> isMore = ds.createQuery(UnreaddenMessage.class) .field("message") .equal(new Key<GeneralMessage>(GeneralMessage.class, new ObjectId(messageId))); if (isMore.countAll() <= 0) { ds.delete(GeneralMessage.class, new ObjectId(messageId)); } }
public void deleteObject(Object object) { ds.delete(object); }
@Before public void clean() { datastore.delete(datastore.find(DBObject.class)); }
public static void deleteById(long userId) { Datastore ds = ds(); Query query = ds.createQuery(User.class).filter("id", userId); ds.delete(query); }
@PostConstruct public void init() { try { System.out.println("UMIGON - semantic analyzer for large twitter accounts"); Mongo m; Morphia morphia; Mongo mLocal; Morphia morphiaLocal; mLocal = new Mongo(); morphiaLocal = new Morphia(); setTweets = new ArrayList(); if (dev) { ExcelToCsv.load(); } if (!dev) { saveOnDisk = false; analyzeNewlyArrivedTweets = false; analyzeAllFromDisk = false; loadTweetsFromLocal = false; loadFromTrainingFile = false; bigTrainingFile = false; clementTests = false; } // loads the heuristics from the csv files just created Hloader = new HeuristicsLoader(); Hloader.load(); // loads Categories Categories.populate(); if (saveOnDisk || analyzeNewlyArrivedTweets) { m = new Mongo("alex.mongohq.com", 10056); morphia = new Morphia(); ds = morphia.createDatastore( m, APIkeys.getMongoHQAPIkey(), "seinecle", APIkeys.getMongoHQPass().toCharArray()); if (ds != null) { System.out.println("Morphia datastore on CloudBees / MongoHQ created!!!!!!!"); } morphia.map(Tweet.class); listTweets = ds.find(Tweet.class).asList(); setTweets.addAll(listTweets); } if (saveOnDisk || analyzeAllFromDisk || loadTweetsFromLocal) { dsLocal = morphiaLocal.createDatastore(mLocal, "hp"); morphiaLocal.map(Tweet.class); } if (saveOnDisk) { Iterator<Tweet> setTweetsIterator = setTweets.iterator(); while (setTweetsIterator.hasNext()) { Tweet tweet = setTweetsIterator.next(); dsLocal.save(tweet); } ds.delete(ds.createQuery(Tweet.class)); System.out.println("------------------------------------------------"); System.out.println( "saved " + setTweets.size() + " on disk and deleted them fromm the cloud"); } if (analyzeAllFromDisk) { listTweets = dsLocal.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println( "retrieving all tweets from disk (collected since Dec. 02, 2012): " + setTweets.size()); hl1 = new TweetLooper(setTweets); setTweets = hl1.applyLevel1(loadFromTrainingFile); for (Tweet tweet : setTweets) { updateQuery = dsLocal.createQuery(Tweet.class).field("text").equal(tweet.getText()); ops = dsLocal .createUpdateOperations(Tweet.class) .set("setCategories", tweet.getSetCategories()); dsLocal.update(updateQuery, ops, true); } } if (loadTweetsFromLocal) { listTweets = dsLocal.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println( "retrieved all tweets from disk (collected since Dec. 02, 2012): " + setTweets.size()); } if (analyzeNewlyArrivedTweets) { listTweets = ds.find(Tweet.class).asList(); setTweets.addAll(listTweets); System.out.println("------------------------------------------------"); System.out.println("retrieving newly arrived tweets from the cloud: " + setTweets.size()); hl1 = new TweetLooper(setTweets); hl1.applyLevel1(loadFromTrainingFile); } if (loadFromTrainingFile) { ExternalSourceTweetLoader comp = new ExternalSourceTweetLoader(); if (bigTrainingFile) { setTweets = comp.sentimentBigSetLoader(maxTweets, termFilter); } else if (clementTests) { setTweets = comp.clementTestTweetsLoader(maxTweets); } else { setTweets = comp.sentiment140Loader(); } System.out.println("------------------------------------------------"); System.out.println("tweets from training file: " + setTweets.size()); hl1 = new TweetLooper(setTweets); hl1.applyLevel1(loadFromTrainingFile); } } catch (LangDetectException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (FileNotFoundException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (InvalidFormatException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } catch (MongoException ex) { Logger.getLogger(ControllerBean.class.getName()).log(Level.SEVERE, null, ex); } }
public WriteResult delete() { return datastore.delete(this); }