private void doDownloadByName(final BsonDocument arguments, final BsonDocument assertion) {
    Throwable error = null;
    final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();

    try {
      final GridFSDownloadByNameOptions options = new GridFSDownloadByNameOptions();
      if (arguments.containsKey("options")) {
        int revision = arguments.getDocument("options").getInt32("revision").getValue();
        options.revision(revision);
      }

      new MongoOperation<Long>() {
        @Override
        public void execute() {
          gridFSBucket.downloadToStreamByName(
              arguments.getString("filename").getValue(),
              toAsyncOutputStream(outputStream),
              options,
              getCallback());
        }
      }.get();

      outputStream.close();
    } catch (Throwable e) {
      error = e;
    }
    if (assertion.containsKey("result")) {
      assertNull("Should not have thrown an exception", error);
      assertEquals(
          printHexBinary(outputStream.toByteArray()).toLowerCase(),
          assertion.getDocument("result").getString("$hex").getValue());
    } else if (assertion.containsKey("error")) {
      assertNotNull("Should have thrown an exception", error);
    }
  }
  private void doDownload(final BsonDocument arguments, final BsonDocument assertion) {
    Throwable error = null;
    final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();

    try {
      new MongoOperation<Long>() {
        @Override
        public void execute() {
          gridFSBucket.downloadToStream(
              arguments.getObjectId("id").getValue(),
              toAsyncOutputStream(outputStream),
              getCallback());
        }
      }.get();
      outputStream.close();
    } catch (Throwable e) {
      error = e;
    }

    if (assertion.containsKey("result")) {
      assertNull("Should not have thrown an exception", error);
      assertEquals(
          printHexBinary(outputStream.toByteArray()).toLowerCase(),
          assertion.getDocument("result").getString("$hex").getValue());
    } else if (assertion.containsKey("error")) {
      assertNotNull("Should have thrown an exception", error);
    }
  }
 private void beforeFields(
     final BsonWriter bsonWriter, final EncoderContext encoderContext, final BsonDocument value) {
   if (encoderContext.isEncodingCollectibleDocument() && value.containsKey(ID_FIELD_NAME)) {
     bsonWriter.writeName(ID_FIELD_NAME);
     writeValue(bsonWriter, encoderContext, value.get(ID_FIELD_NAME));
   }
 }
 private BsonDocument parseHexDocument(final BsonDocument document, final String hexDocument) {
   if (document.containsKey(hexDocument) && document.get(hexDocument).isDocument()) {
     byte[] bytes =
         DatatypeConverter.parseHexBinary(
             document.getDocument(hexDocument).getString("$hex").getValue());
     document.put(hexDocument, new BsonBinary(bytes));
   }
   return document;
 }
 private List<BsonDocument> processFiles(
     final BsonArray bsonArray, final List<BsonDocument> documents) {
   for (BsonValue rawDocument : bsonArray.getValues()) {
     if (rawDocument.isDocument()) {
       BsonDocument document = rawDocument.asDocument();
       if (document.get("length").isInt32()) {
         document.put("length", new BsonInt64(document.getInt32("length").getValue()));
       }
       if (document.containsKey("metadata") && document.getDocument("metadata").isEmpty()) {
         document.remove("metadata");
       }
       if (document.containsKey("aliases")
           && document.getArray("aliases").getValues().size() == 0) {
         document.remove("aliases");
       }
       if (document.containsKey("contentType")
           && document.getString("contentType").getValue().length() == 0) {
         document.remove("contentType");
       }
       documents.add(document);
     }
   }
   return documents;
 }
  private void doDelete(final BsonDocument arguments, final BsonDocument assertion) {
    Throwable error = null;

    try {
      new MongoOperation<Void>() {
        @Override
        public void execute() {
          gridFSBucket.delete(arguments.getObjectId("id").getValue(), getCallback());
        }
      }.get();
    } catch (MongoGridFSException e) {
      error = e;
    }

    if (assertion.containsKey("error")) {
      assertNotNull("Should have thrown an exception", error);
    } else {
      assertNull("Should not have thrown an exception", error);
      for (BsonValue rawDataItem : assertion.getArray("data")) {
        BsonDocument dataItem = rawDataItem.asDocument();
        for (BsonValue deletedItem : dataItem.getArray("deletes", new BsonArray())) {
          String delete = dataItem.getString("delete", new BsonString("none")).getValue();
          BsonObjectId id = new BsonObjectId(new ObjectId());
          if (delete.equals("expected.files")) {
            id = deletedItem.asDocument().getDocument("q").getObjectId("_id");
          } else if (delete.equals("expected.chunks")) {
            id = deletedItem.asDocument().getDocument("q").getObjectId("files_id");
          }
          long filesCount = getFilesCount(new BsonDocument("_id", id));
          long chunksCount = getChunksCount(new BsonDocument("files_id", id));

          assertEquals(filesCount, 0);
          assertEquals(chunksCount, 0);
        }
      }
    }
  }
  private void doUpload(final BsonDocument rawArguments, final BsonDocument assertion) {
    Throwable error = null;
    ObjectId objectId = null;
    BsonDocument arguments = parseHexDocument(rawArguments, "source");
    try {
      final String filename = arguments.getString("filename").getValue();
      final InputStream inputStream =
          new ByteArrayInputStream(arguments.getBinary("source").getData());
      final GridFSUploadOptions options = new GridFSUploadOptions();
      BsonDocument rawOptions = arguments.getDocument("options", new BsonDocument());
      if (rawOptions.containsKey("chunkSizeBytes")) {
        options.chunkSizeBytes(rawOptions.getInt32("chunkSizeBytes").getValue());
      }
      if (rawOptions.containsKey("metadata")) {
        options.metadata(Document.parse(rawOptions.getDocument("metadata").toJson()));
      }

      objectId =
          new MongoOperation<ObjectId>() {
            @Override
            public void execute() {
              gridFSBucket.uploadFromStream(
                  filename, toAsyncInputStream(inputStream), options, getCallback());
            }
          }.get();
    } catch (Throwable e) {
      error = e;
    }

    if (assertion.containsKey("error")) {
      // We don't need to read anything more so don't see the extra chunk
      if (!assertion.getString("error").getValue().equals("ExtraChunk")) {
        assertNotNull("Should have thrown an exception", error);
      }
    } else {
      assertNull("Should not have thrown an exception", error);
      for (BsonValue rawDataItem : assertion.getArray("data", new BsonArray())) {
        BsonDocument dataItem = rawDataItem.asDocument();
        String insert = dataItem.getString("insert", new BsonString("none")).getValue();
        if (insert.equals("expected.files")) {
          List<BsonDocument> documents =
              processFiles(
                  dataItem.getArray("documents", new BsonArray()), new ArrayList<BsonDocument>());

          assertEquals(getFilesCount(new BsonDocument()), documents.size());
          BsonDocument actual =
              new MongoOperation<BsonDocument>() {
                @Override
                public void execute() {
                  filesCollection.find().first(getCallback());
                }
              }.get();
          for (BsonDocument expected : documents) {
            assertEquals(expected.get("length"), actual.get("length"));
            assertEquals(expected.get("chunkSize"), actual.get("chunkSize"));
            assertEquals(expected.get("md5"), actual.get("md5"));
            assertEquals(expected.get("filename"), actual.get("filename"));

            if (expected.containsKey("metadata")) {
              assertEquals(expected.get("metadata"), actual.get("metadata"));
            }
          }
        } else if (insert.equals("expected.chunks")) {
          List<BsonDocument> documents =
              processChunks(
                  dataItem.getArray("documents", new BsonArray()), new ArrayList<BsonDocument>());
          assertEquals(getChunksCount(new BsonDocument()), documents.size());

          List<BsonDocument> actualDocuments =
              new MongoOperation<List<BsonDocument>>() {
                @Override
                public void execute() {
                  chunksCollection.find().into(new ArrayList<BsonDocument>(), getCallback());
                }
              }.get();

          for (int i = 0; i < documents.size(); i++) {
            BsonDocument expected = documents.get(i);
            BsonDocument actual;
            actual = actualDocuments.get(i);
            assertEquals(new BsonObjectId(objectId), actual.getObjectId("files_id"));
            assertEquals(expected.get("n"), actual.get("n"));
            assertEquals(expected.get("data"), actual.get("data"));
          }
        }
      }
    }
  }
  private void arrangeGridFS(final BsonDocument arrange) {
    if (arrange.isEmpty()) {
      return;
    }
    for (BsonValue fileToArrange : arrange.getArray("data", new BsonArray())) {
      final BsonDocument document = fileToArrange.asDocument();
      if (document.containsKey("delete") && document.containsKey("deletes")) {
        for (BsonValue toDelete : document.getArray("deletes")) {
          final BsonDocument query = toDelete.asDocument().getDocument("q");
          int limit = toDelete.asDocument().getInt32("limit").getValue();

          final MongoCollection<BsonDocument> collection;
          if (document.getString("delete").getValue().equals("fs.files")) {
            collection = filesCollection;
          } else {
            collection = chunksCollection;
          }

          if (limit == 1) {
            new MongoOperation<DeleteResult>() {
              @Override
              public void execute() {
                collection.deleteOne(query, getCallback());
              }
            }.get();
          } else {
            new MongoOperation<DeleteResult>() {
              @Override
              public void execute() {
                collection.deleteMany(query, getCallback());
              }
            }.get();
          }
        }
      } else if (document.containsKey("insert") && document.containsKey("documents")) {
        if (document.getString("insert").getValue().equals("fs.files")) {
          new MongoOperation<Void>() {
            @Override
            public void execute() {
              filesCollection.insertMany(
                  processFiles(document.getArray("documents"), new ArrayList<BsonDocument>()),
                  getCallback());
            }
          }.get();
        } else {
          new MongoOperation<Void>() {
            @Override
            public void execute() {
              chunksCollection.insertMany(
                  processChunks(document.getArray("documents"), new ArrayList<BsonDocument>()),
                  getCallback());
            }
          }.get();
        }
      } else if (document.containsKey("update") && document.containsKey("updates")) {
        final MongoCollection<BsonDocument> collection;
        if (document.getString("update").getValue().equals("fs.files")) {
          collection = filesCollection;
        } else {
          collection = chunksCollection;
        }

        for (BsonValue rawUpdate : document.getArray("updates")) {
          final BsonDocument query = rawUpdate.asDocument().getDocument("q");
          final BsonDocument update = rawUpdate.asDocument().getDocument("u");
          update.put("$set", parseHexDocument(update.getDocument("$set")));
          new MongoOperation<UpdateResult>() {
            @Override
            public void execute() {
              collection.updateMany(query, update, getCallback());
            }
          }.get();
        }
      } else {
        throw new IllegalArgumentException("Unsupported arrange: " + document);
      }
    }
  }
 @Override
 public boolean documentHasId(final BsonDocument document) {
   return document.containsKey(ID_FIELD_NAME);
 }