private void doDownload(final BsonDocument arguments, final BsonDocument assertion) { Throwable error = null; final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try { new MongoOperation<Long>() { @Override public void execute() { gridFSBucket.downloadToStream( arguments.getObjectId("id").getValue(), toAsyncOutputStream(outputStream), getCallback()); } }.get(); outputStream.close(); } catch (Throwable e) { error = e; } if (assertion.containsKey("result")) { assertNull("Should not have thrown an exception", error); assertEquals( printHexBinary(outputStream.toByteArray()).toLowerCase(), assertion.getDocument("result").getString("$hex").getValue()); } else if (assertion.containsKey("error")) { assertNotNull("Should have thrown an exception", error); } }
public void init() { try { schedFact = new org.quartz.impl.StdSchedulerFactory(); sched = schedFact.getScheduler(); if (sched.isStarted() == false) sched.start(); MongoCollection<BsonDocument> collection = Configuration.mongoDatabase.getCollection("Subscription", BsonDocument.class); Iterator<BsonDocument> subIterator = collection.find(BsonDocument.class).iterator(); MongoQueryService queryService = new MongoQueryService(); while (subIterator.hasNext()) { BsonDocument sub = subIterator.next(); SubscriptionType subscription = new SubscriptionType(sub); if (subscription.getSchedule() != null && subscription.getTrigger() == null) { queryService.addScheduleToQuartz(subscription); } else if (subscription.getSchedule() == null && subscription.getTrigger() != null) { TriggerEngine.addTriggerSubscription( sub.getString("subscriptionID").getValue(), subscription); } } } catch (SchedulerException e) { Configuration.logger.log(Level.ERROR, e.toString()); } }
private void beforeFields( final BsonWriter bsonWriter, final EncoderContext encoderContext, final BsonDocument value) { if (encoderContext.isEncodingCollectibleDocument() && value.containsKey(ID_FIELD_NAME)) { bsonWriter.writeName(ID_FIELD_NAME); writeValue(bsonWriter, encoderContext, value.get(ID_FIELD_NAME)); } }
private void doDownloadByName(final BsonDocument arguments, final BsonDocument assertion) { Throwable error = null; final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try { final GridFSDownloadByNameOptions options = new GridFSDownloadByNameOptions(); if (arguments.containsKey("options")) { int revision = arguments.getDocument("options").getInt32("revision").getValue(); options.revision(revision); } new MongoOperation<Long>() { @Override public void execute() { gridFSBucket.downloadToStreamByName( arguments.getString("filename").getValue(), toAsyncOutputStream(outputStream), options, getCallback()); } }.get(); outputStream.close(); } catch (Throwable e) { error = e; } if (assertion.containsKey("result")) { assertNull("Should not have thrown an exception", error); assertEquals( printHexBinary(outputStream.toByteArray()).toLowerCase(), assertion.getDocument("result").getString("$hex").getValue()); } else if (assertion.containsKey("error")) { assertNotNull("Should have thrown an exception", error); } }
private void sendStartedEvent( final InternalConnection connection, final ByteBufferBsonOutput bsonOutput, final CommandMessage message, final int documentPosition) { if (commandListener != null) { ByteBufBsonDocument byteBufBsonDocument = createOne(bsonOutput, documentPosition); BsonDocument commandDocument; if (byteBufBsonDocument.containsKey("$query")) { commandDocument = byteBufBsonDocument.getDocument("$query"); commandName = commandDocument.keySet().iterator().next(); } else { commandDocument = byteBufBsonDocument; commandName = byteBufBsonDocument.getFirstKey(); } BsonDocument commandDocumentForEvent = (SECURITY_SENSITIVE_COMMANDS.contains(commandName)) ? new BsonDocument() : commandDocument; sendCommandStartedEvent( message, namespace.getDatabaseName(), commandName, commandDocumentForEvent, connection.getDescription(), commandListener); } }
@Before @Override public void setUp() { super.setUp(); gridFSBucket = GridFSBuckets.create(database); filesCollection = Fixture.initializeCollection(new MongoNamespace(getDefaultDatabaseName(), "fs.files")) .withDocumentClass(BsonDocument.class); chunksCollection = Fixture.initializeCollection(new MongoNamespace(getDefaultDatabaseName(), "fs.chunks")) .withDocumentClass(BsonDocument.class); final List<BsonDocument> filesDocuments = processFiles(data.getArray("files", new BsonArray()), new ArrayList<BsonDocument>()); if (!filesDocuments.isEmpty()) { new MongoOperation<Void>() { @Override public void execute() { filesCollection.insertMany(filesDocuments, getCallback()); } }.get(); } final List<BsonDocument> chunksDocuments = processChunks(data.getArray("chunks", new BsonArray()), new ArrayList<BsonDocument>()); if (!chunksDocuments.isEmpty()) { new MongoOperation<Void>() { @Override public void execute() { chunksCollection.insertMany(chunksDocuments, getCallback()); } }.get(); } }
private BsonDocument parseHexDocument(final BsonDocument document, final String hexDocument) { if (document.containsKey(hexDocument) && document.get(hexDocument).isDocument()) { byte[] bytes = DatatypeConverter.parseHexBinary( document.getDocument(hexDocument).getString("$hex").getValue()); document.put(hexDocument, new BsonBinary(bytes)); } return document; }
public void putRecipeSummary(RecipeSummary recipeSummary) { MongoCollection<BsonDocument> collectionRecipeSummary = database.getCollection("recipeSummary", BsonDocument.class); BsonDocument bsonDocument = BsonDocument.parse(recipeSummary.toJson()); bsonDocument.put("_id", new BsonString(recipeSummary.getRecipeId())); LOG.info("put bsonDocument = {}", bsonDocument.toString()); collectionRecipeSummary.replaceOne( eq("_id", recipeSummary.getRecipeId()), bsonDocument, new UpdateOptions().upsert(true)); // collectionRecipeSummary.insertOne(bsonDocument); }
public RecipeSummary getRecipeSummary(String recipeId) { MongoCollection<BsonDocument> collectionRecipeSummary = database.getCollection("recipeSummary", BsonDocument.class); BsonDocument document = collectionRecipeSummary.find(eq("_id", recipeId)).first(); if (document != null) { LOG.info("get document = {}", document.toString()); return new RecipeSummary().fromJson(document.toJson()); } else { LOG.info("Null Document for recipeId={}", recipeId); return null; } }
private static <T> QueryResult<T> cursorDocumentToQueryResult( final BsonDocument cursorDocument, final ServerAddress serverAddress, final String fieldNameContainingBatch) { long cursorId = ((BsonInt64) cursorDocument.get("id")).getValue(); MongoNamespace queryResultNamespace = new MongoNamespace(cursorDocument.getString("ns").getValue()); return new QueryResult<T>( queryResultNamespace, BsonDocumentWrapperHelper.<T>toList(cursorDocument, fieldNameContainingBatch), cursorId, serverAddress); }
@Override public BsonDocument generateIdIfAbsentFromDocument(final BsonDocument document) { if (!documentHasId(document)) { document.put(ID_FIELD_NAME, new BsonObjectId(new ObjectId())); } return document; }
public BsonDocument asBsonDocument() { CaptureUtil util = new CaptureUtil(); BsonDocument objectEvent = super.asBsonDocument(); // Required Fields objectEvent = util.putAction(objectEvent, action); // Optional Fields if (this.epcList != null && this.epcList.size() != 0) { objectEvent = util.putEPCList(objectEvent, epcList); } if (this.bizStep != null) { objectEvent = util.putBizStep(objectEvent, bizStep); } if (this.disposition != null) { objectEvent = util.putDisposition(objectEvent, disposition); } if (this.readPoint != null) { objectEvent = util.putReadPoint(objectEvent, readPoint); } if (this.bizLocation != null) { objectEvent = util.putBizLocation(objectEvent, bizLocation); } if (this.bizTransactionList != null && this.bizTransactionList.isEmpty() == false) { objectEvent = util.putBizTransactionList(objectEvent, bizTransactionList); } if (this.ilmds != null && this.ilmds.isEmpty() == false) { objectEvent = util.putILMD(objectEvent, namespaces, ilmds); } if (this.extensions != null && this.extensions.isEmpty() == false) { objectEvent = util.putExtensions(objectEvent, namespaces, extensions); } BsonDocument extension = new BsonDocument(); if (this.quantityList != null && this.quantityList.isEmpty() == false) { extension = util.putQuantityList(extension, quantityList); } if (this.sourceList != null && this.sourceList.isEmpty() == false) { extension = util.putSourceList(extension, sourceList); } if (this.destinationList != null && this.destinationList.isEmpty() == false) { extension = util.putDestinationList(extension, destinationList); } if (extension.isEmpty() == false) objectEvent.put("extension", extension); return objectEvent; }
@Parameterized.Parameters(name = "{1}") public static Collection<Object[]> data() throws URISyntaxException, IOException { List<Object[]> data = new ArrayList<Object[]>(); for (File file : JsonPoweredTestHelper.getTestFiles("/gridfs-tests")) { BsonDocument testDocument = JsonPoweredTestHelper.getTestDocument(file); for (BsonValue test : testDocument.getArray("tests")) { data.add( new Object[] { file.getName(), test.asDocument().getString("description").getValue(), testDocument.getDocument("data"), test.asDocument() }); } } return data; }
private BsonDocument getCommand(final ConnectionDescription description) { BsonDocument outputDocument = new BsonDocument(getAction(), new BsonString(getCollectionName())); outputDocument.append("sharded", BsonBoolean.valueOf(isSharded())); outputDocument.append("nonAtomic", BsonBoolean.valueOf(isNonAtomic())); if (getDatabaseName() != null) { outputDocument.put("db", new BsonString(getDatabaseName())); } BsonDocument commandDocument = new BsonDocument("mapreduce", new BsonString(namespace.getCollectionName())) .append("map", getMapFunction()) .append("reduce", getReduceFunction()) .append("out", outputDocument) .append("query", asValueOrNull(getFilter())) .append("sort", asValueOrNull(getSort())) .append("finalize", asValueOrNull(getFinalizeFunction())) .append("scope", asValueOrNull(getScope())) .append("verbose", BsonBoolean.valueOf(isVerbose())); putIfNotZero(commandDocument, "limit", getLimit()); putIfNotZero(commandDocument, "maxTimeMS", getMaxTime(MILLISECONDS)); putIfTrue(commandDocument, "jsMode", isJsMode()); if (bypassDocumentValidation != null && description != null && serverIsAtLeastVersionThreeDotTwo(description)) { commandDocument.put( "bypassDocumentValidation", BsonBoolean.valueOf(bypassDocumentValidation)); } if (description != null) { appendWriteConcernToCommand(writeConcern, commandDocument, description); } if (collation != null) { commandDocument.put("collation", collation.asDocument()); } return commandDocument; }
@Override public BsonValue getDocumentId(JsonObject json) { if (!documentHasId(json)) { throw new IllegalStateException("The document does not contain an _id"); } Object id = json.getValue(ID_FIELD); if (id instanceof String) { return new BsonString((String) id); } BsonDocument idHoldingDocument = new BsonDocument(); BsonWriter writer = new BsonDocumentWriter(idHoldingDocument); writer.writeStartDocument(); writer.writeName(ID_FIELD); writeValue(writer, null, id, EncoderContext.builder().build()); writer.writeEndDocument(); return idHoldingDocument.get(ID_FIELD); }
@Override public BsonValue getDocumentId(final Document document) { if (!documentHasId(document)) { throw new IllegalStateException("The document does not contain an _id"); } Object id = document.get(ID_FIELD_NAME); if (id instanceof BsonValue) { return (BsonValue) id; } BsonDocument idHoldingDocument = new BsonDocument(); BsonWriter writer = new BsonDocumentWriter(idHoldingDocument); writer.writeStartDocument(); writer.writeName(ID_FIELD_NAME); writeValue(writer, EncoderContext.builder().build(), id); writer.writeEndDocument(); return idHoldingDocument.get(ID_FIELD_NAME); }
@Override public T execute(final InternalConnection connection) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( format( "Sending command {%s : %s} to database %s on connection [%s] to server %s", getCommandName(), command.values().iterator().next(), namespace.getDatabaseName(), connection.getDescription().getConnectionId(), connection.getDescription().getServerAddress())); } long startTimeNanos = System.nanoTime(); CommandMessage commandMessage = new CommandMessage( namespace.getFullName(), command, slaveOk, fieldNameValidator, ProtocolHelper.getMessageSettings(connection.getDescription())); ResponseBuffers responseBuffers = null; try { sendMessage(commandMessage, connection); responseBuffers = connection.receiveMessage(commandMessage.getId()); if (!ProtocolHelper.isCommandOk( new BsonBinaryReader(new ByteBufferBsonInput(responseBuffers.getBodyByteBuffer())))) { throw getCommandFailureException( getResponseDocument(responseBuffers, commandMessage, new BsonDocumentCodec()), connection.getDescription().getServerAddress()); } T retval = getResponseDocument(responseBuffers, commandMessage, commandResultDecoder); if (commandListener != null) { sendSucceededEvent( connection.getDescription(), startTimeNanos, commandMessage, getResponseDocument(responseBuffers, commandMessage, new RawBsonDocumentCodec())); } LOGGER.debug("Command execution completed"); return retval; } catch (RuntimeException e) { sendFailedEvent(connection.getDescription(), startTimeNanos, commandMessage, e); throw e; } finally { if (responseBuffers != null) { responseBuffers.close(); } } }
@Override protected void appendToWriteCommandResponseDocument( final RequestMessage curMessage, final RequestMessage nextMessage, final WriteConcernResult writeConcernResult, final BsonDocument response) { response.append( "n", new BsonInt32( nextMessage == null ? ((InsertMessage) curMessage).getInsertRequestList().size() : ((InsertMessage) curMessage).getInsertRequestList().size() - ((InsertMessage) nextMessage).getInsertRequestList().size())); }
BsonDocument calculateKeys(final MappedClass mc, final Index index) { BsonDocument keys = new BsonDocument(); for (Field field : index.fields()) { String path; try { path = findField( mc, index.options(), new ArrayList<String>(asList(field.value().split("\\.")))); } catch (Exception e) { path = field.value(); String message = format( "The path '%s' can not be validated against '%s' and may represent an invalid index", path, mc.getClazz().getName()); if (!index.options().disableValidation()) { throw new MappingException(message); } LOG.warning(message); } keys.putAll(toBsonDocument(path, field.type().toIndexValue())); } return keys; }
private void doDelete(final BsonDocument arguments, final BsonDocument assertion) { Throwable error = null; try { new MongoOperation<Void>() { @Override public void execute() { gridFSBucket.delete(arguments.getObjectId("id").getValue(), getCallback()); } }.get(); } catch (MongoGridFSException e) { error = e; } if (assertion.containsKey("error")) { assertNotNull("Should have thrown an exception", error); } else { assertNull("Should not have thrown an exception", error); for (BsonValue rawDataItem : assertion.getArray("data")) { BsonDocument dataItem = rawDataItem.asDocument(); for (BsonValue deletedItem : dataItem.getArray("deletes", new BsonArray())) { String delete = dataItem.getString("delete", new BsonString("none")).getValue(); BsonObjectId id = new BsonObjectId(new ObjectId()); if (delete.equals("expected.files")) { id = deletedItem.asDocument().getDocument("q").getObjectId("_id"); } else if (delete.equals("expected.chunks")) { id = deletedItem.asDocument().getDocument("q").getObjectId("files_id"); } long filesCount = getFilesCount(new BsonDocument("_id", id)); long chunksCount = getChunksCount(new BsonDocument("files_id", id)); assertEquals(filesCount, 0); assertEquals(chunksCount, 0); } } } }
@Override public void executeAsync( final InternalConnection connection, final SingleResultCallback<T> callback) { long startTimeNanos = System.nanoTime(); CommandMessage message = new CommandMessage( namespace.getFullName(), command, slaveOk, fieldNameValidator, ProtocolHelper.getMessageSettings(connection.getDescription())); boolean sentStartedEvent = false; try { if (LOGGER.isDebugEnabled()) { LOGGER.debug( format( "Asynchronously sending command {%s : %s} to database %s on connection [%s] to server %s", getCommandName(), command.values().iterator().next(), namespace.getDatabaseName(), connection.getDescription().getConnectionId(), connection.getDescription().getServerAddress())); } ByteBufferBsonOutput bsonOutput = new ByteBufferBsonOutput(connection); int documentPosition = ProtocolHelper.encodeMessageWithMetadata(message, bsonOutput).getFirstDocumentPosition(); sendStartedEvent(connection, bsonOutput, message, documentPosition); sentStartedEvent = true; SingleResultCallback<ResponseBuffers> receiveCallback = new CommandResultCallback(callback, message, connection.getDescription(), startTimeNanos); connection.sendMessageAsync( bsonOutput.getByteBuffers(), message.getId(), new SendMessageCallback<T>( connection, bsonOutput, message, getCommandName(), startTimeNanos, commandListener, callback, receiveCallback)); } catch (Throwable t) { if (sentStartedEvent) { sendFailedEvent(connection.getDescription(), startTimeNanos, message, t); } callback.onResult(null, t); } }
@Override public void encode( final BsonWriter writer, final BsonDocument value, final EncoderContext encoderContext) { writer.writeStartDocument(); beforeFields(writer, encoderContext, value); for (Map.Entry<String, BsonValue> entry : value.entrySet()) { if (skipField(encoderContext, entry.getKey())) { continue; } writer.writeName(entry.getKey()); writeValue(writer, encoderContext, entry.getValue()); } writer.writeEndDocument(); }
private void actionGridFS(final BsonDocument action, final BsonDocument assertion) { if (action.isEmpty()) { return; } String operation = action.getString("operation").getValue(); if (operation.equals("delete")) { doDelete(action.getDocument("arguments"), assertion); } else if (operation.equals("download")) { doDownload(action.getDocument("arguments"), assertion); } else if (operation.equals("download_by_name")) { doDownloadByName(action.getDocument("arguments"), assertion); } else if (operation.equals("upload")) { doUpload(action.getDocument("arguments"), assertion); } else { throw new IllegalArgumentException("Unknown operation: " + operation); } }
@Override public BsonValue getDocumentId(final BsonDocument document) { return document.get(ID_FIELD_NAME); }
private String getCommandName() { return commandName != null ? commandName : command.keySet().iterator().next(); }
private List<BsonDocument> processFiles( final BsonArray bsonArray, final List<BsonDocument> documents) { for (BsonValue rawDocument : bsonArray.getValues()) { if (rawDocument.isDocument()) { BsonDocument document = rawDocument.asDocument(); if (document.get("length").isInt32()) { document.put("length", new BsonInt64(document.getInt32("length").getValue())); } if (document.containsKey("metadata") && document.getDocument("metadata").isEmpty()) { document.remove("metadata"); } if (document.containsKey("aliases") && document.getArray("aliases").getValues().size() == 0) { document.remove("aliases"); } if (document.containsKey("contentType") && document.getString("contentType").getValue().length() == 0) { document.remove("contentType"); } documents.add(document); } } return documents; }
private void doUpload(final BsonDocument rawArguments, final BsonDocument assertion) { Throwable error = null; ObjectId objectId = null; BsonDocument arguments = parseHexDocument(rawArguments, "source"); try { final String filename = arguments.getString("filename").getValue(); final InputStream inputStream = new ByteArrayInputStream(arguments.getBinary("source").getData()); final GridFSUploadOptions options = new GridFSUploadOptions(); BsonDocument rawOptions = arguments.getDocument("options", new BsonDocument()); if (rawOptions.containsKey("chunkSizeBytes")) { options.chunkSizeBytes(rawOptions.getInt32("chunkSizeBytes").getValue()); } if (rawOptions.containsKey("metadata")) { options.metadata(Document.parse(rawOptions.getDocument("metadata").toJson())); } objectId = new MongoOperation<ObjectId>() { @Override public void execute() { gridFSBucket.uploadFromStream( filename, toAsyncInputStream(inputStream), options, getCallback()); } }.get(); } catch (Throwable e) { error = e; } if (assertion.containsKey("error")) { // We don't need to read anything more so don't see the extra chunk if (!assertion.getString("error").getValue().equals("ExtraChunk")) { assertNotNull("Should have thrown an exception", error); } } else { assertNull("Should not have thrown an exception", error); for (BsonValue rawDataItem : assertion.getArray("data", new BsonArray())) { BsonDocument dataItem = rawDataItem.asDocument(); String insert = dataItem.getString("insert", new BsonString("none")).getValue(); if (insert.equals("expected.files")) { List<BsonDocument> documents = processFiles( dataItem.getArray("documents", new BsonArray()), new ArrayList<BsonDocument>()); assertEquals(getFilesCount(new BsonDocument()), documents.size()); BsonDocument actual = new MongoOperation<BsonDocument>() { @Override public void execute() { filesCollection.find().first(getCallback()); } }.get(); for (BsonDocument expected : documents) { assertEquals(expected.get("length"), actual.get("length")); assertEquals(expected.get("chunkSize"), actual.get("chunkSize")); assertEquals(expected.get("md5"), actual.get("md5")); assertEquals(expected.get("filename"), actual.get("filename")); if (expected.containsKey("metadata")) { assertEquals(expected.get("metadata"), actual.get("metadata")); } } } else if (insert.equals("expected.chunks")) { List<BsonDocument> documents = processChunks( dataItem.getArray("documents", new BsonArray()), new ArrayList<BsonDocument>()); assertEquals(getChunksCount(new BsonDocument()), documents.size()); List<BsonDocument> actualDocuments = new MongoOperation<List<BsonDocument>>() { @Override public void execute() { chunksCollection.find().into(new ArrayList<BsonDocument>(), getCallback()); } }.get(); for (int i = 0; i < documents.size(); i++) { BsonDocument expected = documents.get(i); BsonDocument actual; actual = actualDocuments.get(i); assertEquals(new BsonObjectId(objectId), actual.getObjectId("files_id")); assertEquals(expected.get("n"), actual.get("n")); assertEquals(expected.get("data"), actual.get("data")); } } } } }
private void arrangeGridFS(final BsonDocument arrange) { if (arrange.isEmpty()) { return; } for (BsonValue fileToArrange : arrange.getArray("data", new BsonArray())) { final BsonDocument document = fileToArrange.asDocument(); if (document.containsKey("delete") && document.containsKey("deletes")) { for (BsonValue toDelete : document.getArray("deletes")) { final BsonDocument query = toDelete.asDocument().getDocument("q"); int limit = toDelete.asDocument().getInt32("limit").getValue(); final MongoCollection<BsonDocument> collection; if (document.getString("delete").getValue().equals("fs.files")) { collection = filesCollection; } else { collection = chunksCollection; } if (limit == 1) { new MongoOperation<DeleteResult>() { @Override public void execute() { collection.deleteOne(query, getCallback()); } }.get(); } else { new MongoOperation<DeleteResult>() { @Override public void execute() { collection.deleteMany(query, getCallback()); } }.get(); } } } else if (document.containsKey("insert") && document.containsKey("documents")) { if (document.getString("insert").getValue().equals("fs.files")) { new MongoOperation<Void>() { @Override public void execute() { filesCollection.insertMany( processFiles(document.getArray("documents"), new ArrayList<BsonDocument>()), getCallback()); } }.get(); } else { new MongoOperation<Void>() { @Override public void execute() { chunksCollection.insertMany( processChunks(document.getArray("documents"), new ArrayList<BsonDocument>()), getCallback()); } }.get(); } } else if (document.containsKey("update") && document.containsKey("updates")) { final MongoCollection<BsonDocument> collection; if (document.getString("update").getValue().equals("fs.files")) { collection = filesCollection; } else { collection = chunksCollection; } for (BsonValue rawUpdate : document.getArray("updates")) { final BsonDocument query = rawUpdate.asDocument().getDocument("q"); final BsonDocument update = rawUpdate.asDocument().getDocument("u"); update.put("$set", parseHexDocument(update.getDocument("$set"))); new MongoOperation<UpdateResult>() { @Override public void execute() { collection.updateMany(query, update, getCallback()); } }.get(); } } else { throw new IllegalArgumentException("Unsupported arrange: " + document); } } }
@Test public void shouldPassAllOutcomes() { arrangeGridFS(definition.getDocument("arrange", new BsonDocument())); actionGridFS(definition.getDocument("act"), definition.getDocument("assert")); }
@Override public boolean documentHasId(final BsonDocument document) { return document.containsKey(ID_FIELD_NAME); }