private void beforeFields(
     final BsonWriter bsonWriter, final EncoderContext encoderContext, final BsonDocument value) {
   if (encoderContext.isEncodingCollectibleDocument() && value.containsKey(ID_FIELD_NAME)) {
     bsonWriter.writeName(ID_FIELD_NAME);
     writeValue(bsonWriter, encoderContext, value.get(ID_FIELD_NAME));
   }
 }
Пример #2
0
 private BsonDocument parseHexDocument(final BsonDocument document, final String hexDocument) {
   if (document.containsKey(hexDocument) && document.get(hexDocument).isDocument()) {
     byte[] bytes =
         DatatypeConverter.parseHexBinary(
             document.getDocument(hexDocument).getString("$hex").getValue());
     document.put(hexDocument, new BsonBinary(bytes));
   }
   return document;
 }
 private static <T> QueryResult<T> cursorDocumentToQueryResult(
     final BsonDocument cursorDocument,
     final ServerAddress serverAddress,
     final String fieldNameContainingBatch) {
   long cursorId = ((BsonInt64) cursorDocument.get("id")).getValue();
   MongoNamespace queryResultNamespace =
       new MongoNamespace(cursorDocument.getString("ns").getValue());
   return new QueryResult<T>(
       queryResultNamespace,
       BsonDocumentWrapperHelper.<T>toList(cursorDocument, fieldNameContainingBatch),
       cursorId,
       serverAddress);
 }
Пример #4
0
  @Override
  public BsonValue getDocumentId(final Document document) {
    if (!documentHasId(document)) {
      throw new IllegalStateException("The document does not contain an _id");
    }

    Object id = document.get(ID_FIELD_NAME);
    if (id instanceof BsonValue) {
      return (BsonValue) id;
    }

    BsonDocument idHoldingDocument = new BsonDocument();
    BsonWriter writer = new BsonDocumentWriter(idHoldingDocument);
    writer.writeStartDocument();
    writer.writeName(ID_FIELD_NAME);
    writeValue(writer, EncoderContext.builder().build(), id);
    writer.writeEndDocument();
    return idHoldingDocument.get(ID_FIELD_NAME);
  }
  @Override
  public BsonValue getDocumentId(JsonObject json) {
    if (!documentHasId(json)) {
      throw new IllegalStateException("The document does not contain an _id");
    }

    Object id = json.getValue(ID_FIELD);
    if (id instanceof String) {
      return new BsonString((String) id);
    }

    BsonDocument idHoldingDocument = new BsonDocument();
    BsonWriter writer = new BsonDocumentWriter(idHoldingDocument);
    writer.writeStartDocument();
    writer.writeName(ID_FIELD);
    writeValue(writer, null, id, EncoderContext.builder().build());
    writer.writeEndDocument();
    return idHoldingDocument.get(ID_FIELD);
  }
Пример #6
0
 private List<BsonDocument> processFiles(
     final BsonArray bsonArray, final List<BsonDocument> documents) {
   for (BsonValue rawDocument : bsonArray.getValues()) {
     if (rawDocument.isDocument()) {
       BsonDocument document = rawDocument.asDocument();
       if (document.get("length").isInt32()) {
         document.put("length", new BsonInt64(document.getInt32("length").getValue()));
       }
       if (document.containsKey("metadata") && document.getDocument("metadata").isEmpty()) {
         document.remove("metadata");
       }
       if (document.containsKey("aliases")
           && document.getArray("aliases").getValues().size() == 0) {
         document.remove("aliases");
       }
       if (document.containsKey("contentType")
           && document.getString("contentType").getValue().length() == 0) {
         document.remove("contentType");
       }
       documents.add(document);
     }
   }
   return documents;
 }
Пример #7
0
  private void doUpload(final BsonDocument rawArguments, final BsonDocument assertion) {
    Throwable error = null;
    ObjectId objectId = null;
    BsonDocument arguments = parseHexDocument(rawArguments, "source");
    try {
      final String filename = arguments.getString("filename").getValue();
      final InputStream inputStream =
          new ByteArrayInputStream(arguments.getBinary("source").getData());
      final GridFSUploadOptions options = new GridFSUploadOptions();
      BsonDocument rawOptions = arguments.getDocument("options", new BsonDocument());
      if (rawOptions.containsKey("chunkSizeBytes")) {
        options.chunkSizeBytes(rawOptions.getInt32("chunkSizeBytes").getValue());
      }
      if (rawOptions.containsKey("metadata")) {
        options.metadata(Document.parse(rawOptions.getDocument("metadata").toJson()));
      }

      objectId =
          new MongoOperation<ObjectId>() {
            @Override
            public void execute() {
              gridFSBucket.uploadFromStream(
                  filename, toAsyncInputStream(inputStream), options, getCallback());
            }
          }.get();
    } catch (Throwable e) {
      error = e;
    }

    if (assertion.containsKey("error")) {
      // We don't need to read anything more so don't see the extra chunk
      if (!assertion.getString("error").getValue().equals("ExtraChunk")) {
        assertNotNull("Should have thrown an exception", error);
      }
    } else {
      assertNull("Should not have thrown an exception", error);
      for (BsonValue rawDataItem : assertion.getArray("data", new BsonArray())) {
        BsonDocument dataItem = rawDataItem.asDocument();
        String insert = dataItem.getString("insert", new BsonString("none")).getValue();
        if (insert.equals("expected.files")) {
          List<BsonDocument> documents =
              processFiles(
                  dataItem.getArray("documents", new BsonArray()), new ArrayList<BsonDocument>());

          assertEquals(getFilesCount(new BsonDocument()), documents.size());
          BsonDocument actual =
              new MongoOperation<BsonDocument>() {
                @Override
                public void execute() {
                  filesCollection.find().first(getCallback());
                }
              }.get();
          for (BsonDocument expected : documents) {
            assertEquals(expected.get("length"), actual.get("length"));
            assertEquals(expected.get("chunkSize"), actual.get("chunkSize"));
            assertEquals(expected.get("md5"), actual.get("md5"));
            assertEquals(expected.get("filename"), actual.get("filename"));

            if (expected.containsKey("metadata")) {
              assertEquals(expected.get("metadata"), actual.get("metadata"));
            }
          }
        } else if (insert.equals("expected.chunks")) {
          List<BsonDocument> documents =
              processChunks(
                  dataItem.getArray("documents", new BsonArray()), new ArrayList<BsonDocument>());
          assertEquals(getChunksCount(new BsonDocument()), documents.size());

          List<BsonDocument> actualDocuments =
              new MongoOperation<List<BsonDocument>>() {
                @Override
                public void execute() {
                  chunksCollection.find().into(new ArrayList<BsonDocument>(), getCallback());
                }
              }.get();

          for (int i = 0; i < documents.size(); i++) {
            BsonDocument expected = documents.get(i);
            BsonDocument actual;
            actual = actualDocuments.get(i);
            assertEquals(new BsonObjectId(objectId), actual.getObjectId("files_id"));
            assertEquals(expected.get("n"), actual.get("n"));
            assertEquals(expected.get("data"), actual.get("data"));
          }
        }
      }
    }
  }
 @Override
 public BsonValue getDocumentId(final BsonDocument document) {
   return document.get(ID_FIELD_NAME);
 }