public void getCommitCount() throws Exception { MongoClient mongoClient = new MongoClient(MongoInfo.getMongoServerIp(), 27017); MongoDatabase database = mongoClient.getDatabase("ghcrawlerV3"); FindIterable<Document> issueIterable = database.getCollection("commitnumber").find(); Connection connection = MysqlInfo.getMysqlConnection(); connection.setAutoCommit(false); JsonParser parser = new JsonParser(); for (Document document : issueIterable) { String json = document.toJson(); JsonObject repoJsonObject = parser.parse(json).getAsJsonObject(); int commit = repoJsonObject.get("commitnumber").getAsInt(); String full_name = repoJsonObject.get("fn").getAsString(); System.out.println(full_name); String sql = "update repotest set commit = ? where full_name = ?"; PreparedStatement stmt = connection.prepareStatement(sql); stmt.setInt(1, commit); stmt.setString(2, full_name); stmt.execute(); } connection.commit(); connection.close(); mongoClient.close(); }
public void removeOtherBookingsIfAny(String email, String bookingId) { final List<String> extraBookings = new ArrayList<String>(); try { final MongoDatabase mdb = MongoDBConnManager.getInstance().getConnection(); final MongoCollection<Document> coll = mdb.getCollection(DBConstants.COLL_BOOKING); final Document findCr = new Document(); findCr.put(DBConstants.EMAIL, email); final ArrayList<Document> lstBkngs = coll.find(findCr).into(new ArrayList<Document>()); for (final Document document : lstBkngs) { if (!StringUtils.equalsIgnoreCase(bookingId, document.getString(DBConstants.BOOKING_ID))) { extraBookings.add(document.getString(DBConstants.BOOKING_ID)); } } if (!extraBookings.isEmpty()) { QueryBuilder deleteQuery = new QueryBuilder(); deleteQuery .put(DBConstants.BOOKING_ID) .in(extraBookings.toArray(new String[extraBookings.size()])); coll.deleteMany((Bson) deleteQuery.get()); } } catch (Exception e) { e.printStackTrace(); if (e instanceof com.mongodb.MongoTimeoutException) { throw new ApplicationException(MessagesEnum.MONGODB_IS_DOWN.getMessage(), e); } throw new ApplicationException( MessagesEnum.CLOSE_BOOKING_FAILED.getMessage(extraBookings.toString()), e); } }
public void getIssueAndPull() throws Exception { MongoClient mongoClient = new MongoClient(MongoInfo.getMongoServerIp(), 27017); MongoDatabase database = mongoClient.getDatabase("ghcrawlerV3"); FindIterable<Document> issueIterable = database.getCollection("issueandpull").find(); Connection connection = MysqlInfo.getMysqlConnection(); connection.setAutoCommit(false); String sql = "update repotest set open_issues = ?,closed_issues = ?,open_pull=?,closed_pull=? where full_name = ?"; PreparedStatement stmt = connection.prepareStatement(sql); JsonParser parser = new JsonParser(); for (Document document : issueIterable) { String json = document.toJson(); JsonObject repoIssue = parser.parse(json).getAsJsonObject(); int openIssue = repoIssue.get("openissue").getAsInt(); int closedIssue = repoIssue.get("closedissue").getAsInt(); int openPull = repoIssue.get("openpull").getAsInt(); int closedPull = repoIssue.get("closedpull").getAsInt(); String repoName = repoIssue.get("fn").getAsString(); System.out.println(repoName); stmt.setInt(1, openIssue); stmt.setInt(2, closedIssue); stmt.setInt(3, openPull); stmt.setInt(4, closedPull); stmt.setString(5, repoName); stmt.execute(); } connection.commit(); connection.close(); mongoClient.close(); }
public void tweet(String body) { final ObjectId tweet_id = new ObjectId(); final Date time = new Date(); MongoCollection<Document> tweets = db.getCollection("tweets"); MongoCollection<Document> userline = db.getCollection("userline"); MongoCollection<Document> timeline = db.getCollection("timeline"); MongoCollection<Document> followers = db.getCollection("followers"); Document tweetDoc = new Document("tweet_id", tweet_id).append("username", nick).append("body", body); Document userDoc = new Document("username", nick).append("time", time).append("tweet_id", tweet_id); List<Document> timelineList = new ArrayList<>(); List<Document> followerList = followers.find(eq("username", nick)).into(new ArrayList<Document>()); for (Document doc : followerList) { String follower = (String) doc.get("follower"); Document timeDoc = new Document("username", follower).append("time", time).append("tweet_id", tweet_id); timelineList.add(timeDoc); } tweets.insertOne(tweetDoc); userline.insertOne(userDoc); timeline.insertMany(timelineList); System.out.println("* You tweeted \"" + body + "\" at " + time); }
public static int GetCountByCkan3(String url) { int count = 0; HttpClient client = new HttpClient(); LOG.info("**** INPUT SPLIT COUNT *** " + url); GetMethod method = new GetMethod(url); method .getParams() .setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false)); method.setRequestHeader("User-Agent", "Hammer Project - SantaMaria crawler"); method .getParams() .setParameter(HttpMethodParams.USER_AGENT, "Hammer Project - SantaMaria crawler"); try { client.executeMethod(method); byte[] responseBody = method.getResponseBody(); Document doc = Document.parse(new String(responseBody)); if (doc.containsKey("result")) { count = ((Document) doc.get("result")).getInteger("count"); LOG.info("Find --> " + count); } } catch (Exception e) { e.printStackTrace(); LOG.error(e); } finally { method.releaseConnection(); } return count; }
/** * Relock trigger if its lock has expired. * * @param key trigger to lock * @return true when successfully relocked */ public boolean relockExpired(TriggerKey key) { Document existingLock = locksDao.findTriggerLock(key); if (existingLock != null) { if (expiryCalculator.isTriggerLockExpired(existingLock)) { // When a scheduler is defunct then its triggers become expired // after sometime and can be recovered by other schedulers. // To check that a trigger is owned by a defunct scheduler we evaluate // its LOCK_TIME and try to reassign it to this scheduler. // Relock may not be successful when some other scheduler has done // it first. log.info("Trigger {} is expired - re-locking", key); return locksDao.relock(key, existingLock.getDate(Constants.LOCK_TIME)); } else { log.info( "Trigger {} hasn't expired yet. Lock time: {}", key, existingLock.getDate(Constants.LOCK_TIME)); } } else { log.warn( "Error retrieving expired lock from the database for trigger {}. Maybe it was deleted", key); } return false; }
@SuppressWarnings("deprecation") public boolean passBooking(User currentUserBooking, User nextUserInQueue) { try { final MongoDatabase mdb = MongoDBConnManager.getInstance().getConnection(); final MongoCollection<Document> coll = mdb.getCollection(DBConstants.COLL_BOOKING); final Document findCr = new Document(); findCr.put(DBConstants.BOOKING_ID, nextUserInQueue.getBookingId()); final ArrayList<Document> lstBkngs = coll.find(findCr).into(new ArrayList<Document>()); for (final Document document : lstBkngs) { java.util.Date nextUserBookingTime = document.getDate(DBConstants.BOOKED_DATE_N_TIME); nextUserBookingTime.setSeconds(nextUserBookingTime.getSeconds() + 1); currentUserBooking.setBookedDateNTime(nextUserBookingTime); } // update current user booking time final Document filterQuery = new Document(); filterQuery.put(DBConstants.BOOKING_ID, currentUserBooking.getBookingId()); final Document updateQuery = new Document(); final Document updateSet = new Document(); updateSet.put(DBConstants.BOOKED_DATE_N_TIME, currentUserBooking.getBookedDateNTime()); updateQuery.put("$set", updateSet); coll.updateOne(filterQuery, updateQuery); } catch (Exception e) { if (e instanceof com.mongodb.MongoTimeoutException) { throw new ApplicationException(MessagesEnum.MONGODB_IS_DOWN.getMessage(), e); } throw new ApplicationException(MessagesEnum.PASSING_BOOKING_FAILED.getMessage(), e); } return true; }
public void showUserline(String username) { MongoCollection<Document> users = db.getCollection("users"); Document oldDoc = users.find(eq("username", username)).first(); if (oldDoc == null) { System.out.println("* Show userline failed : Username does not exist"); } else { MongoCollection<Document> userline = db.getCollection("userline"); List<Document> userlineList = userline.find(eq("username", username)).into(new ArrayList<Document>()); if (userlineList.isEmpty()) { System.out.println("* " + username + "'s userline is empty"); } else { MongoCollection<Document> tweets = db.getCollection("tweets"); List<Date> timeList = new ArrayList<>(); List<String> bodyList = new ArrayList<>(); for (Document doc : userlineList) { Date time = (Date) doc.get("time"); ObjectId tweet_id = (ObjectId) doc.get("tweet_id"); Document tweetDoc = tweets.find(eq("tweet_id", tweet_id)).first(); String body = (String) tweetDoc.get("body"); timeList.add(time); bodyList.add(body); } System.out.println("* @" + username + "'s userline"); for (int i = 0; i < timeList.size(); i++) { System.out.println("[" + timeList.get(i) + "] " + bodyList.get(i)); } } } }
public RuleResult runRule( Document aggregation, Object rightsHolders, BasicBSONList affiliations, Document preferences, Document statsDocument, Document profile, Object context) { RuleResult result = new RuleResult(); try { long max = Long.parseLong(statsDocument.getString("Total Size")); long repoMax = Long.parseLong(profile.getString("Total Size")); if (max > repoMax) { result.setResult(-1, "Total size exceeds maximum allowed (" + repoMax + ")."); } else { result.setResult(1, "Total size is acceptable (<=" + repoMax + ")."); } } catch (NullPointerException npe) { // Just return untriggered result System.out.println("Missing info in TotalSize rule" + npe.getLocalizedMessage()); } catch (NumberFormatException nfe) { // Just return untriggered result System.out.println( "Missing info in MaxDatasetSize rule for repo: " + profile.getString("orgidentifier") + " : " + nfe.getLocalizedMessage()); } return result; }
/** Creates each document for pipeline with instances of Bson documents */ private void fromBsonDocuments() { Document totalPop = new Document() .append( "$group", new Document() .append("_id", "$state") .append("totalPop", new Document("$sum", "$pop"))); Document gtOneMillion = new Document().append("$match", new Document("totalPop", new Document("$gte", 1000000))); // // Compose aggregation pipeline List<Document> pipeline = Arrays.asList(totalPop, gtOneMillion); // // Executes aggregation query List<Document> results = DBHelper.getZipCodesCollection().aggregate(pipeline).into(new ArrayList<Document>()); // // Display aggregation results System.out.println("\nStates with population greater than one million"); for (Document result : results) { System.out.println(result.toJson()); } }
// validates that username is unique and insert into db public boolean addUser(String username, String password, String email) { String passwordHash = makePasswordHash(password, Integer.toString(random.nextInt())); // XXX WORK HERE // create an object suitable for insertion into the user collection // be sure to add username and hashed password to the document. problem instructions // will tell you the schema that the documents must follow. password = makePasswordHash(password, "asd"); Document user = new Document("_id", username).append("password", password); if (email != null && !email.equals("")) { // XXX WORK HERE // if there is an email address specified, add it to the document too. user.append("email", email); } try { // XXX WORK HERE // insert the document into the user collection here usersCollection.insertOne(user); return true; } catch (MongoWriteException e) { if (e.getError().getCategory().equals(ErrorCategory.DUPLICATE_KEY)) { System.out.println("Username already in use: " + username); return false; } throw e; } }
private void setListeners() { widthProperty() .addListener( (InvalidationListener) (listener) -> { confDoc.append("width", width.get()); // save(); DBUtils.getCollection() .updateOne( Filters.eq("_id", "conf"), new Document("$set", new Document("width", width.get()))); }); heightProperty() .addListener( (InvalidationListener) (listener) -> { confDoc.append("height", height.get()); DBUtils.getCollection() .updateOne( Filters.eq("_id", "conf"), new Document("$set", new Document("height", height.get()))); }); lastDocProperty() .addListener( (InvalidationListener) (listener) -> { confDoc.append("lastDoc", lastDoc.get()); DBUtils.getCollection() .updateOne( Filters.eq("_id", "conf"), new Document("$set", new Document("lastDoc", lastDoc.get()))); }); }
/** * 更新用户信息 * * @return 是否更新成功 */ private boolean updateRecord(record exist_record, record new_record) { try { System.out.print("update record"); MongoDAO dao = MongoDAO.GetInstance(); System.out.print("update record1"); Document existing = new Document(); existing.append("admission_number", exist_record.getAdmission_number()); long num = dao.GetCollection("records").deleteMany(existing).getDeletedCount(); System.out.print("remove record number = " + num); try { Map<String, Object> docMap = new_record.getDocMap(); docMap.put("inHospital", true); docMap.put("leaveHospital", false); docMap.put("followup", false); dao.GetCollection("records").insertOne(new Document(docMap)); } catch (Exception e) { e.printStackTrace(); logger.error(e.toString()); return false; } System.out.print("update record2"); } catch (Exception e) { e.printStackTrace(); logger.error(e.toString()); return false; } return true; }
/** * 取得正在住院的用户 * * @return */ public List<record> GetInhospitalRecords() { try { List<record> list = new ArrayList<record>(); MongoDAO dao = MongoDAO.GetInstance(); BasicDBObject cond = new BasicDBObject(); cond.append("inHospital", new BasicDBObject("$eq", true)); cond.append("leaveHospital", new BasicDBObject("$eq", false)); FindIterable<Document> result = dao.GetCollection("records").find(cond); MongoCursor<Document> it = result.iterator(); while (it.hasNext()) { Document doc = it.next(); record fol = new record(); fol.setAdmission_number(doc.getString("admission_number")); fol.setName(doc.getString("name")); fol.setWeixin_openid(doc.getString("weixin_openid")); fol.setInTime(doc.getDate("inTime")); System.out.print("\nname = " + fol.getName()); System.out.print("\nadmission_number = " + fol.getAdmission_number()); System.out.print("\nweixin_openid = " + fol.getWeixin_openid()); System.out.print("\nintime = " + fol.getInTime()); list.add(fol); System.out.print("add to list finished"); } System.out.print("list ready"); return list; } catch (Exception e) { e.printStackTrace(); logger.error(e.toString()); return null; } }
public void updateDeviceLog(String productId, String[] vids, Modal model) { MongoCollection<Document> deviceLog = getDeviceLog(); Document filter = new Document("product-id", productId); Document doc = getDeviceLog().find(filter).first(); if (doc != null && model != null) { List<Document> models = doc.get("models", List.class); Document m = convertModel(vids, model); if (models != null) { models.add(m); } else { models = new ArrayList<>(); models.add(m); } doc.append("models", models); deviceLog.replaceOne(filter, doc); } else { Device device = new Device(); device.setProductId(productId); doc = toDocument(device); List<Document> models = new ArrayList<>(); models.add(convertModel(vids, model)); doc.append("models", models); deviceLog.insertOne(doc); } }
public User getBooking(final String bookingId) { /** * Creating 'nextUserEmailInQueue' as string buffer because value cannot be reassigned as it is * declared as final */ final User user = new User(); try { final MongoDatabase mdb = MongoDBConnManager.getInstance().getConnection(); final MongoCollection<Document> coll = mdb.getCollection(DBConstants.COLL_BOOKING); final Document findCr = new Document(); findCr.put(DBConstants.BOOKING_ID, bookingId); final ArrayList<Document> lstBkngs = coll.find(findCr).into(new ArrayList<Document>()); for (final Document document : lstBkngs) { user.setEmail(document.getString(DBConstants.EMAIL)); user.setBookingId(document.getString(DBConstants.BOOKING_ID)); } } catch (Exception e) { if (e instanceof com.mongodb.MongoTimeoutException) { throw new ApplicationException(MessagesEnum.MONGODB_IS_DOWN.getMessage(), e); } throw new ApplicationException(MessagesEnum.BOOKINGS_RETRIVAL_FAILED.getMessage(), e); } return user; }
public void removeInheritance(Group group) { inherits.remove(group.getName()); MongoCollection<Document> collection = MongoConnection.getCollection("perms", "groups"); Document doc = collection.find(eq("group", name)).first(); doc.put("inherits", inherits); collection.replaceOne(eq("group", name), doc); }
public static ApplicationObject fromDocument(Document data) { return new ApplicationObject( data.getString("id"), data.getString("name"), (ArrayList<ObjectAttribute>) data.get("attributes"), (ArrayList<ObjectAction>) data.get("actions"), (ArrayList<ObjectActionChain>) data.get("actionChains")); }
private Document convertModel(String[] vids, Modal model) { Document doc = new Document(); doc.append("vids", Arrays.asList(vids)); doc.append("values", model.getValues()); doc.append("productIds", convertProductIds(model.getProductId())); doc.append("created", new Date()); return doc; }
private Document convertProductId(ProductId productId) { Document doc = new Document(); doc.append("values", productId.getValue()); if (productId.getCondition() != null && !productId.getCondition().isEmpty()) { doc.append("conditions", productId.getCondition()); } return doc; }
public void getCollaborators() throws Exception { // get mysql connection Connection connection = MysqlInfo.getMysqlConnection(); connection.setAutoCommit(false); String conSql = "insert into collaborator(user_id,repo_id) values(?,?);"; PreparedStatement conStmt = connection.prepareStatement(conSql); String repoSql = "update repotest set collaborator = ? where id = ?"; PreparedStatement repoStmt = connection.prepareStatement(repoSql); // get repos from mongo MongoClient mongoClient = new MongoClient(MongoInfo.getMongoServerIp(), 27017); MongoDatabase database = mongoClient.getDatabase("ghcrawlerV3"); FindIterable<Document> repoIterable = database.getCollection("repo").find(); JsonParser parser = new JsonParser(); Map<String, Integer> repoMap = new HashMap<String, Integer>(); for (Document document : repoIterable) { String json = document.toJson(); JsonObject repoJsonObject = parser.parse(json).getAsJsonObject(); int id = repoJsonObject.get("id").getAsInt(); String full_name = repoJsonObject.get("full_name").getAsString(); System.out.println(id); repoMap.put(full_name, id); } Map<Integer, Integer> collaboratorMap = new HashMap<Integer, Integer>(); FindIterable<Document> collaboratorIterable = database.getCollection("assignees").find(); for (Document document : collaboratorIterable) { String json = document.toJson(); JsonObject contriJsonObject = parser.parse(json).getAsJsonObject(); int id = contriJsonObject.get("id").getAsInt(); String repoName = contriJsonObject.get("fn").getAsString(); int repo_id = repoMap.get(repoName); conStmt.setInt(1, id); conStmt.setInt(2, repo_id); conStmt.execute(); if (collaboratorMap.containsKey(repo_id)) { collaboratorMap.put(repo_id, collaboratorMap.get(repo_id) + 1); } else { collaboratorMap.put(repo_id, 1); } } Set<Integer> keySet = collaboratorMap.keySet(); for (Integer repoId : keySet) { int contri_count = collaboratorMap.get(repoId); repoStmt.setInt(1, contri_count); repoStmt.setInt(2, repoId); repoStmt.execute(); } mongoClient.close(); connection.commit(); conStmt.close(); repoStmt.close(); connection.close(); }
public String findUserNameBySessionId(String sessionId) { Document session = getSession(sessionId); if (session == null) { return null; } else { return session.get("username").toString(); } }
/** * Stores the score of a prefix in the mongo database. * * @param prefix the prefix to store * @param wordLength the length of the whole word * @param score the score of the prefix */ public static void storePrefix(final String prefix, final int wordLength, final double score) { MongoDatabase lettersdb = mongoClient.getDatabase(databaseName); String prefixCollectionName = createPrefixCollectionName(prefix, wordLength); MongoCollection<Document> collection = lettersdb.getCollection(prefixCollectionName); Document document = new Document(); document.put(scoreAttribute, score); document.put(prefixAttribute, prefix); collection.insertOne(document); }
@Override public Object execute() throws Exception { MongoDatabase adminDatabase = mongoService.getMongoClient().getDatabase(database); Document document = adminDatabase.runCommand(new Document("dbStats", 1)); System.out.println(document.toJson(new JsonWriterSettings(true))); return null; }
public static void main(String[] args) { Document demo1 = new Document(); demo1.put("_id", 6); demo1.put("age", 21); demo1.put("gender", "female"); demo1.put("name", "周芷若"); insertTest1(demo1); }
/** {@inheritDoc} */ @Override public Set<String> readAllGroups() { Set<String> setOfGroups = new HashSet<String>(); for (Document document : collection.find()) { setOfGroups.add(document.getString(GROUPNAME)); } setOfGroups.remove(null); setOfGroups.remove(""); return setOfGroups; }
public void saveOrUpdate(Document widgetDoc) { logger.debug("--> to be save widget -->"); String tableName = "Widget"; MongoDatabase db = mongoClient.getDatabase(widgetDoc.getString("appId")); MongoCollection<Document> collection = db.getCollection(tableName); if (!widgetDoc.containsKey("_id")) { ObjectId objId = new ObjectId(); widgetDoc.put("_id", objId); logger.debug("--> to insert " + tableName + " with " + widgetDoc.toJson()); collection.insertOne(widgetDoc); // 没有数据就执行添加操作 return; } String objectId = widgetDoc.get("_id").toString(); Document matchFields = new Document(); matchFields.put("_id", new ObjectId(objectId)); if (collection.find(matchFields).iterator().hasNext()) { // 有数据就执行更新操作 logger.debug("--> to update " + tableName + " with " + widgetDoc.toJson()); collection.updateOne(matchFields, new Document("$set", widgetDoc)); } else { logger.debug("--> to save " + tableName + " with " + widgetDoc.toJson()); collection.insertOne(widgetDoc); // 没有数据就执行添加操作 } }
public String create() { MongoCollection<Document> commentCollection = mongoDatabase.getCollection("posts"); Document document = new Document(); document.put("comments", new ArrayList<Document>()); commentCollection.insertOne(document); String generatedId = document.getObjectId("_id").toHexString(); return generatedId; }
@Override public long getEstimatedSizeBytes(PipelineOptions pipelineOptions) { MongoClient mongoClient = new MongoClient(new MongoClientURI(spec.uri())); MongoDatabase mongoDatabase = mongoClient.getDatabase(spec.database()); // get the Mongo collStats object // it gives the size for the entire collection BasicDBObject stat = new BasicDBObject(); stat.append("collStats", spec.collection()); Document stats = mongoDatabase.runCommand(stat); return Long.valueOf(stats.get("size").toString()); }
public void push(String id, Comment comment) { MongoCollection<Document> commentCollection = mongoDatabase.getCollection("posts"); Document document = new Document(); document.put("user", comment.getUser()); document.put("text", comment.getText()); document.put("date", comment.getDate()); commentCollection.updateOne( new Document("_id", new ObjectId(id)), new Document("$push", new Document("comments", document)), new UpdateOptions().upsert(false)); }