private byte[] createAvroData(String name, int age, List<String> emails) throws IOException {
    String AVRO_SCHEMA =
        "{\n"
            + "\"type\": \"record\",\n"
            + "\"name\": \"Employee\",\n"
            + "\"fields\": [\n"
            + " {\"name\": \"name\", \"type\": \"string\"},\n"
            + " {\"name\": \"age\", \"type\": \"int\"},\n"
            + " {\"name\": \"emails\", \"type\": {\"type\": \"array\", \"items\": \"string\"}},\n"
            + " {\"name\": \"boss\", \"type\": [\"Employee\",\"null\"]}\n"
            + "]}";
    Schema schema = new Schema.Parser().parse(AVRO_SCHEMA);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    GenericRecord e1 = new GenericData.Record(schema);
    e1.put("name", name);
    e1.put("age", age);
    e1.put("emails", emails);
    e1.put("boss", null);

    DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter);
    dataFileWriter.create(schema, out);
    dataFileWriter.append(e1);
    dataFileWriter.close();
    return out.toByteArray();
  }
Пример #2
0
 private static GenericRecord buildUser(Schema schema, String name, String office, String color) {
   GenericRecord user = new GenericData.Record(schema);
   user.put("name", name);
   user.put("office", office);
   if (color != null) user.put("favorite_color", color);
   return user;
 }
  @Test
  public void testRead_SpecificReader() throws IOException {
    GenericRecord savedRecord = new GenericData.Record(schema);
    savedRecord.put("name", "John Doe");
    savedRecord.put("age", 42);
    savedRecord.put("siblingnames", Lists.newArrayList("Jimmy", "Jane"));
    populateGenericFile(Lists.newArrayList(savedRecord));

    AvroFileReaderFactory<Person> genericReader =
        new AvroFileReaderFactory<Person>(Avros.records(Person.class), new Configuration());
    Iterator<Person> recordIterator =
        genericReader.read(
            FileSystem.getLocal(new Configuration()), new Path(this.avroFile.getAbsolutePath()));

    Person expectedPerson = new Person();
    expectedPerson.setAge(42);
    expectedPerson.setName("John Doe");
    List<CharSequence> siblingNames = Lists.newArrayList();
    siblingNames.add("Jimmy");
    siblingNames.add("Jane");
    expectedPerson.setSiblingnames(siblingNames);

    Person person = recordIterator.next();

    assertEquals(expectedPerson, person);
    assertFalse(recordIterator.hasNext());
  }
Пример #4
0
 private GenericRecord createField(Schema schema) {
   GenericRecord field = new GenericData.Record(schema);
   field.put("name", schema.getName());
   field.put("boost", 2.3f);
   field.put("omitNorms", true);
   field.put("omitTermFreqAndPositions", true);
   return field;
 }
Пример #5
0
  private GenericRecord createAddress(int memberId) {
    GenericRecord record = new GenericData.Record(_store.getSchema());

    record.put("id", memberId);
    record.put("city", new Utf8("city." + memberId));
    record.put("state", new Utf8("state." + memberId));
    record.put("country", new Utf8("country." + memberId));
    record.put("postal_code", new Utf8("postal." + memberId));

    return record;
  }
 public void execute(TridentTuple tuple, TridentCollector collector) {
   GenericRecord docEntry = new GenericData.Record(schema);
   docEntry.put("docid", tuple.getStringByField("documentId"));
   docEntry.put("time", Time.currentTimeMillis());
   docEntry.put("line", tuple.getStringByField("document"));
   try {
     dataFileWriter.append(docEntry);
     dataFileWriter.flush();
   } catch (IOException e) {
     LOG.error("Error writing to document record: " + e);
     throw new RuntimeException(e);
   }
 }
Пример #7
0
  @Override
  public void execute(Tuple inputTuple) {
    /* Processing tuples of the shape
    (DATASOURCE_ID, TIMESTAMP_FIELD, CONTENT_FIELD) */

    // get datasource
    String datasource = inputTuple.getStringByField(RestIngestionSpout.DATASOURCE_ID);
    // compute month
    long timestamp = inputTuple.getLongByField(RestIngestionSpout.TIMESTAMP_FIELD);
    // this computation is completely stateless
    String month = timestampToMonth(timestamp);

    // now get the DataFileWriter
    DataFileWriter<GenericRecord> writer = null;
    try {
      writer = this.writersCache.get(DatasourceMonth.create(datasource, month));
    } catch (ExecutionException ee) {
      LOGGER.error(
          "Error getting DataFileWriter for tuple for datasource "
              + datasource
              + " and timestamp "
              + timestamp
              + " : "
              + ee.getMessage());
      this.collector.fail(inputTuple);
      return;
    }

    // create and write a new record
    GenericRecord newDataRecord = new GenericData.Record(AVRO_SCHEMA);
    newDataRecord.put(AVRO_TIMESTAMP_FIELD, new Long(timestamp));
    newDataRecord.put(
        AVRO_CONTENT_FIELD, inputTuple.getStringByField(RestIngestionSpout.CONTENT_FIELD));
    try {
      writer.append(newDataRecord);
    } catch (IOException ioe) {
      LOGGER.error(
          "Error writing Avro record for datasource "
              + datasource
              + " and timestamp "
              + timestamp
              + " : "
              + ioe.getMessage());
      this.collector.fail(inputTuple);
      return;
    }

    // ACK processing for this tupe as ok
    this.collector.ack(inputTuple);
  }
Пример #8
0
  public Iterable<GenericRecord> convertRecord(
      Schema outputSchema, JsonElement inputRecord, WorkUnitState workUnit) {
    JsonElement element = GSON.fromJson(inputRecord, JsonElement.class);
    Map<String, Object> fields = GSON.fromJson(element, FIELD_ENTRY_TYPE);
    GenericRecord record = new GenericData.Record(outputSchema);
    for (Map.Entry<String, Object> entry : fields.entrySet()) {
      if (entry.getKey().equals("*")) {
        // switch '*' to 'content' since '*' is not a valid avro schema field name
        record.put(JSON_CONTENT_MEMBER, entry.getValue());
      } else {
        record.put(entry.getKey(), entry.getValue());
      }
    }

    return new SingleRecordIterable<GenericRecord>(record);
  }
  public void testWrite() throws IOException {

    URL url = this.getClass().getClassLoader().getResource("input/Company.avsc");
    assertNotNull(url);
    Schema schema = new Schema.Parser().parse(new File(url.getFile()));
    assertNotNull(schema);

    DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
    // Another way of loading a file
    File file = new File("src/test/resources/input/companies.avro");
    DataFileReader<GenericRecord> dataFileReader =
        new DataFileReader<GenericRecord>(file, datumReader);

    File fileOut = new File("target/companies2.avro");
    Schema schemaOut =
        new Schema.Parser().parse(new File("src/test/resources/input/Company2.avsc"));
    DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schemaOut);
    DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(datumWriter);

    GenericRecord company = null;
    int count = 0;
    while (dataFileReader.hasNext()) {
      company = dataFileReader.next(company);
      if (company.get("name").toString().equals("aol")) {
        dataFileWriter.create(schemaOut, fileOut);

        GenericRecord recordOut = new GenericData.Record(schemaOut);
        recordOut.put("id", company.get("id"));
        recordOut.put("name", company.get("name"));
        assertTrue(recordOut.getSchema().getField("address") != null);
        assertTrue(recordOut.getSchema().getField("employeeCount") == null);

        // address is of complex type
        GenericRecord address =
            new GenericData.Record((GenericData.Record) company.get("address"), true);
        recordOut.put("address", address);

        dataFileWriter.append(recordOut);

        count++;
      }
    }
    assertTrue(count > 0);

    dataFileWriter.close();
  }
  @Test
  public void testRead_GenericReader() throws IOException {
    GenericRecord savedRecord = new GenericData.Record(schema);
    savedRecord.put("name", "John Doe");
    savedRecord.put("age", 42);
    savedRecord.put("siblingnames", Lists.newArrayList("Jimmy", "Jane"));
    populateGenericFile(Lists.newArrayList(savedRecord));

    AvroFileReaderFactory<GenericData.Record> genericReader =
        new AvroFileReaderFactory<GenericData.Record>(Avros.generics(schema), new Configuration());
    Iterator<GenericData.Record> recordIterator =
        genericReader.read(
            FileSystem.getLocal(new Configuration()), new Path(this.avroFile.getAbsolutePath()));

    GenericRecord genericRecord = recordIterator.next();
    assertEquals(savedRecord, genericRecord);
    assertFalse(recordIterator.hasNext());
  }
Пример #11
0
  private GenericRecord convertRecord(String inputRecord) {
    Gson gson = new Gson();
    JsonElement element = gson.fromJson(inputRecord, JsonElement.class);
    Map<String, Object> fields = gson.fromJson(element, FIELD_ENTRY_TYPE);
    GenericRecord outputRecord = new GenericData.Record(schema);
    for (Map.Entry<String, Object> entry : fields.entrySet()) {
      outputRecord.put(entry.getKey(), entry.getValue());
    }

    return outputRecord;
  }
Пример #12
0
 private GenericRecord createNumeric(Schema schema) {
   GenericRecord numericField = new GenericData.Record(schema);
   numericField.put("name", "int");
   numericField.put("precisionStep", 3);
   numericField.put("store", "YES");
   numericField.put("indexed", true);
   numericField.put("boost", 2.3f);
   numericField.put("omitNorms", true);
   numericField.put("omitTermFreqAndPositions", true);
   return numericField;
 }
Пример #13
0
  public void serializeGeneric() throws IOException {
    // Create a datum to serialize.
    Schema schema = new Schema.Parser().parse(getClass().getResourceAsStream("/MyPair.avsc"));
    GenericRecord datum = new GenericData.Record(schema);
    datum.put("left", new Utf8("dog"));
    datum.put("right", new Utf8("cat"));

    // Serialize it.
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
    Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
    writer.write(datum, encoder);
    encoder.flush();
    out.close();
    System.out.println("Serialization: " + out);

    // Deserialize it.
    DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schema);
    BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(out.toByteArray(), null);
    GenericRecord result = reader.read(null, decoder);
    System.out.printf("Left: %s, Right: %s\n", result.get("left"), result.get("right"));
  }
    public void reduce(
        Utf8 key, Iterable<Long> values, AvroCollector<GenericRecord> collector, Reporter reporter)
        throws IOException {
      long sum = 0;
      for (Long val : values) {
        sum += val;
      }

      GenericRecord value = new GenericData.Record(OUTPUT_SCHEMA);
      value.put("shape", key);
      value.put("count", sum);

      collector.collect(value);
    }
Пример #15
0
    /**
     * Describe <code>map</code> method here.
     *
     * @param object an <code>Object</code> value
     * @param object1 an <code>Object</code> value
     * @param context a <code>Mapper.Context</code> value
     * @exception IOException if an error occurs
     * @exception InterruptedException if an error occurs
     */
    @Override
    public final void map(AvroKey<GenericRecord> key, NullWritable value, Context context)
        throws IOException, InterruptedException {
      GenericRecord data = key.datum();
      GenericRecord output = new GenericData.Record(outputSchema);
      // CharSequence s_no = (CharSequence)data.get(S_NO);
      // Log.info("s_no:" + data.get(S_NO));
      output.put(S_NO, data.get(S_NO));
      if (data.get(PSAM) == null) {
        if (data.get(LATITUDE) != null && data.get(LONGITUDE) != null) {
          String[] parts = ((String) data.get(LATITUDE)).split("\\|");
          for (int i = 0; i < parts.length; i++) {
            System.out.println("parts[" + i + "]:" + parts[i]);
          }

          output.put(
              LATITUDE,
              parts[0].replaceFirst("^0+(?!$)", "") + "." + parts[1] + (parts[2].replace(".", "")));
          String[] parts1 = ((String) data.get(LONGITUDE)).split("\\|");
          // parts1 = ((String)data.get(LONGITUDE)).split("|");
          output.put(
              LONGITUDE,
              parts1[0].replaceFirst("^0+(?!$)", "")
                  + "."
                  + parts1[1]
                  + (parts1[2].replace(".", "")));
          context.write(new Text(data.get(S_NO).toString()), new AvroValue<GenericRecord>(output));
        }
      } else {
        if (typeSet.contains((data.get(TERM_TYPE)))) {
          output.put(PSAM, data.get(PSAM));
          output.put(TERM_TYPE, data.get(TERM_TYPE));
          context.write(new Text(data.get(S_NO).toString()), new AvroValue<GenericRecord>(output));
        }
      }
    }
Пример #16
0
    /**
     * Describe <code>reduce</code> method here.
     *
     * @param object an <code>Object</code> value
     * @param iterable an <code>Iterable</code> value
     * @param context a <code>Reducer.Context</code> value
     * @exception IOException if an error occurs
     * @exception InterruptedException if an error occurs
     */
    @Override
    public final void reduce(Text key, Iterable<AvroValue<GenericRecord>> values, Context context)
        throws IOException, InterruptedException {
      GenericRecord output = new GenericData.Record(outputSchema);

      for (AvroValue<GenericRecord> value : values) {
        GenericRecord datum = value.datum();
        for (Schema.Field field : datum.getSchema().getFields()) {
          String fieldName = field.name();
          Object fieldValue = datum.get(fieldName);
          if (fieldValue != null) {
            output.put(fieldName, fieldValue);
          }
        }
      }

      CharSequence psam = (CharSequence) output.get(PSAM);
      CharSequence longitude = (CharSequence) output.get(LONGITUDE);
      CharSequence latitude = (CharSequence) output.get(LATITUDE);
      if (psam != null && longitude != null && latitude != null) {
        context.write(new AvroKey<GenericRecord>(output), NullWritable.get());
      }
    }
Пример #17
0
 public static GenericRecord toGenericRecord(byte[] bytes) {
   GenericRecord record = new GenericData.Record(SCHEMA);
   record.put(BYTES_FIELD, ByteBuffer.wrap(bytes));
   return record;
 }
  @Test
  public void testConvertGenericRecordToTableRow() throws Exception {
    TableSchema tableSchema = new TableSchema();
    tableSchema.setFields(fields);
    Schema avroSchema = AvroCoder.of(Bird.class).getSchema();

    {
      // Test nullable fields.
      GenericRecord record = new GenericData.Record(avroSchema);
      record.put("number", 5L);
      TableRow convertedRow = BigQueryAvroUtils.convertGenericRecordToTableRow(record, tableSchema);
      TableRow row = new TableRow().set("number", "5").set("associates", new ArrayList<TableRow>());
      assertEquals(row, convertedRow);
    }
    {
      // Test type conversion for:
      // INTEGER, FLOAT, TIMESTAMP, BOOLEAN, BYTES, DATE, DATETIME, TIME.
      GenericRecord record = new GenericData.Record(avroSchema);
      byte[] soundBytes = "chirp,chirp".getBytes();
      ByteBuffer soundByteBuffer = ByteBuffer.wrap(soundBytes);
      soundByteBuffer.rewind();
      record.put("number", 5L);
      record.put("quality", 5.0);
      record.put("birthday", 5L);
      record.put("flighted", Boolean.TRUE);
      record.put("sound", soundByteBuffer);
      record.put("anniversaryDate", new Utf8("2000-01-01"));
      record.put("anniversaryDatetime", new String("2000-01-01 00:00:00.000005"));
      record.put("anniversaryTime", new Utf8("00:00:00.000005"));
      TableRow convertedRow = BigQueryAvroUtils.convertGenericRecordToTableRow(record, tableSchema);
      TableRow row =
          new TableRow()
              .set("number", "5")
              .set("birthday", "1970-01-01 00:00:00.000005 UTC")
              .set("quality", 5.0)
              .set("associates", new ArrayList<TableRow>())
              .set("flighted", Boolean.TRUE)
              .set("sound", BaseEncoding.base64().encode(soundBytes))
              .set("anniversaryDate", "2000-01-01")
              .set("anniversaryDatetime", "2000-01-01 00:00:00.000005")
              .set("anniversaryTime", "00:00:00.000005");
      assertEquals(row, convertedRow);
    }
    {
      // Test repeated fields.
      Schema subBirdSchema = AvroCoder.of(Bird.SubBird.class).getSchema();
      GenericRecord nestedRecord = new GenericData.Record(subBirdSchema);
      nestedRecord.put("species", "other");
      GenericRecord record = new GenericData.Record(avroSchema);
      record.put("number", 5L);
      record.put("associates", Lists.<GenericRecord>newArrayList(nestedRecord));
      TableRow convertedRow = BigQueryAvroUtils.convertGenericRecordToTableRow(record, tableSchema);
      TableRow row =
          new TableRow()
              .set(
                  "associates",
                  Lists.<TableRow>newArrayList(new TableRow().set("species", "other")))
              .set("number", "5");
      assertEquals(row, convertedRow);
    }
  }
Пример #19
0
 private static GenericRecord buildPet(Schema schema, String name, Integer legs) {
   GenericRecord pet = new GenericData.Record(schema);
   pet.put("name", name);
   pet.put("legs", legs);
   return pet;
 }
Пример #20
0
  @Test
  public void experimentWithAvro() throws Exception {
    String root = "org/hibernate/search/remote/codex/avro/v1_1/";
    parseSchema(root + "attribute/TokenTrackingAttribute.avro", "attribute/TokenTrackingAttribute");
    parseSchema(root + "attribute/CharTermAttribute.avro", "attribute/CharTermAttribute");
    parseSchema(root + "attribute/PayloadAttribute.avro", "attribute/PayloadAttribute");
    parseSchema(root + "attribute/KeywordAttribute.avro", "attribute/KeywordAttribute");
    parseSchema(
        root + "attribute/PositionIncrementAttribute.avro", "attribute/PositionIncrementAttribute");
    parseSchema(root + "attribute/FlagsAttribute.avro", "attribute/FlagsAttribute");
    parseSchema(root + "attribute/TypeAttribute.avro", "attribute/TypeAttribute");
    parseSchema(root + "attribute/OffsetAttribute.avro", "attribute/OffsetAttribute");
    parseSchema(root + "field/TermVector.avro", "field/TermVector");
    parseSchema(root + "field/Index.avro", "field/Index");
    parseSchema(root + "field/Store.avro", "field/Store");
    parseSchema(root + "field/TokenStreamField.avro", "field/TokenStreamField");
    parseSchema(root + "field/ReaderField.avro", "field/ReaderField");
    parseSchema(root + "field/StringField.avro", "field/StringField");
    parseSchema(root + "field/BinaryField.avro", "field/BinaryField");
    parseSchema(root + "field/NumericIntField.avro", "field/NumericIntField");
    parseSchema(root + "field/NumericLongField.avro", "field/NumericLongField");
    parseSchema(root + "field/NumericFloatField.avro", "field/NumericFloatField");
    parseSchema(root + "field/NumericDoubleField.avro", "field/NumericDoubleField");
    parseSchema(root + "field/CustomFieldable.avro", "field/CustomFieldable");
    parseSchema(root + "Document.avro", "Document");
    parseSchema(root + "operation/Id.avro", "operation/Id");
    parseSchema(root + "operation/OptimizeAll.avro", "operation/OptimizeAll");
    parseSchema(root + "operation/PurgeAll.avro", "operation/PurgeAll");
    parseSchema(root + "operation/Flush.avro", "operation/Flush");
    parseSchema(root + "operation/Delete.avro", "operation/Delete");
    parseSchema(root + "operation/Add.avro", "operation/Add");
    parseSchema(root + "operation/Update.avro", "operation/Update");
    parseSchema(root + "Message.avro", "Message");

    String filename = root + "Works.avpr";
    Protocol protocol = parseProtocol(filename, "Works");
    final Schema termVectorSchema = protocol.getType("TermVector");
    final Schema indexSchema = protocol.getType("Index");
    final Schema storeSchema = protocol.getType("Store");
    final Schema tokenTrackingAttribute = protocol.getType("TokenTrackingAttribute");
    final Schema tokenStreamSchema = protocol.getType("TokenStreamField");
    final Schema readerSchema = protocol.getType("ReaderField");
    final Schema stringSchema = protocol.getType("StringField");
    final Schema binarySchema = protocol.getType("BinaryField");
    final Schema intFieldSchema = protocol.getType("NumericIntField");
    final Schema longFieldSchema = protocol.getType("NumericLongField");
    final Schema floatFieldSchema = protocol.getType("NumericFloatField");
    final Schema doubleFieldSchema = protocol.getType("NumericDoubleField");
    final Schema custonFieldableSchema = protocol.getType("CustomFieldable");
    final Schema documentSchema = protocol.getType("Document");
    final Schema idSchema = protocol.getType("Id");
    final Schema optimizeAllSchema = protocol.getType("OptimizeAll");
    final Schema purgeAllSchema = protocol.getType("PurgeAll");
    final Schema flushSchema = protocol.getType("Flush");
    final Schema deleteSchema = protocol.getType("Delete");
    final Schema addSchema = protocol.getType("Add");
    final Schema updateSchema = protocol.getType("Update");
    Schema messageSchema = protocol.getType("Message");

    final ByteArrayOutputStream out = new ByteArrayOutputStream();
    GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(messageSchema);
    Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);

    byte[] serializableSample = new byte[10];
    for (int i = 0; i < 10; i++) {
      serializableSample[i] = (byte) i;
    }

    List<String> classReferences = new ArrayList<String>();
    classReferences.add(AvroTest.class.getName());

    List<GenericRecord> fieldables = new ArrayList<GenericRecord>(1);
    // custom fieldable
    GenericRecord customFieldable = new GenericData.Record(custonFieldableSchema);
    customFieldable.put("instance", ByteBuffer.wrap(serializableSample));
    fieldables.add(customFieldable);

    // numeric fields
    GenericRecord numericField = createNumeric(intFieldSchema);
    numericField.put("value", 3);
    fieldables.add(numericField);
    numericField = createNumeric(longFieldSchema);
    numericField.put("value", 3l);
    fieldables.add(numericField);
    numericField = createNumeric(floatFieldSchema);
    numericField.put("value", 2.3f);
    fieldables.add(numericField);
    numericField = createNumeric(doubleFieldSchema);
    numericField.put("value", 2.3d);
    fieldables.add(numericField);

    // fields
    GenericRecord field = createField(binarySchema);
    field.put("offset", 0);
    field.put("length", 10);
    field.put("value", ByteBuffer.wrap(serializableSample));
    fieldables.add(field);
    field = createField(stringSchema);
    field.put("value", stringSchema.getName());
    field.put("store", "YES");
    field.put("index", "ANALYZED");
    field.put("termVector", "WITH_OFFSETS");
    fieldables.add(field);
    field = createField(tokenStreamSchema);

    List<List<Object>> tokens = new ArrayList<List<Object>>();
    List<Object> attrs = new ArrayList<Object>();
    tokens.add(attrs);
    GenericData.Record attr = new GenericData.Record(tokenTrackingAttribute);
    List<Integer> positions = new ArrayList<Integer>();
    positions.add(1);
    positions.add(2);
    positions.add(3);
    positions.add(4);
    attr.put("positions", positions);
    attrs.add(attr);
    attrs.add(ByteBuffer.wrap(serializableSample));

    field.put("value", tokens);
    field.put("termVector", "WITH_OFFSETS");
    fieldables.add(field);
    field = createField(readerSchema);
    field.put("value", ByteBuffer.wrap(serializableSample));
    field.put("termVector", "WITH_OFFSETS");
    fieldables.add(field);

    GenericRecord doc = new GenericData.Record(documentSchema);
    doc.put("boost", 2.3f);
    doc.put("fieldables", fieldables);

    GenericRecord add = new GenericData.Record(addSchema);
    add.put("class", classReferences.indexOf(AvroTest.class.getName()));
    GenericRecord id = new GenericData.Record(idSchema);
    id.put("value", ByteBuffer.wrap(serializableSample));
    add.put("id", id);
    add.put("document", doc);
    Map<String, String> analyzers = new HashMap<String, String>();
    analyzers.put("name", "ngram");
    analyzers.put("description", "porter");
    add.put("fieldToAnalyzerMap", analyzers);

    GenericRecord delete = new GenericData.Record(deleteSchema);
    delete.put("class", classReferences.indexOf(AvroTest.class.getName()));
    id = new GenericData.Record(idSchema);
    id.put("value", new Long(30));
    delete.put("id", id);

    GenericRecord purgeAll = new GenericData.Record(purgeAllSchema);
    purgeAll.put("class", classReferences.indexOf(AvroTest.class.getName()));
    GenericRecord optimizeAll = new GenericData.Record(optimizeAllSchema);

    GenericRecord flush = new GenericData.Record(flushSchema);

    List<GenericRecord> operations = new ArrayList<GenericRecord>(1);
    operations.add(purgeAll);
    operations.add(optimizeAll);
    operations.add(flush);
    operations.add(delete);
    operations.add(add);

    GenericRecord message = new GenericData.Record(messageSchema);
    message.put("classReferences", classReferences);
    message.put("operations", operations);

    writer.write(message, encoder);
    encoder.flush();

    ByteArrayInputStream inputStream = new ByteArrayInputStream(out.toByteArray());
    Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null);
    GenericDatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(messageSchema);
    while (true) {
      try {
        GenericRecord result = reader.read(null, decoder);
        System.out.println(result);

        assertThat(result).isNotNull();
        // operations
        assertThat(result.get("operations")).isNotNull().isInstanceOf(List.class);
        List<?> ops = (List<?>) result.get("operations");
        assertThat(ops).hasSize(5);

        // Flush
        assertThat(ops.get(2)).isInstanceOf(GenericRecord.class);
        GenericRecord flushOp = (GenericRecord) ops.get(2);
        assertThat(flushOp.getSchema().getName()).isEqualTo("Flush");

        // Delete
        assertThat(ops.get(3)).isInstanceOf(GenericRecord.class);
        GenericRecord deleteOp = (GenericRecord) ops.get(3);
        assertThat(deleteOp.getSchema().getName()).isEqualTo("Delete");
        Object actual = ((GenericRecord) deleteOp.get("id")).get("value");
        assertThat(actual).isInstanceOf(Long.class);
        assertThat(actual).isEqualTo(Long.valueOf(30));

        // Add
        assertThat(ops.get(4)).isInstanceOf(GenericRecord.class);
        GenericRecord addOp = (GenericRecord) ops.get(4);
        assertThat(addOp.getSchema().getName()).isEqualTo("Add");
        actual = ((GenericRecord) addOp.get("id")).get("value");
        assertThat(actual).isInstanceOf(ByteBuffer.class);
        ByteBuffer bb = (ByteBuffer) actual;
        assertThat(bb.hasArray()).isTrue();
        byte[] copy = new byte[bb.remaining()];
        bb.get(copy);
        assertThat(serializableSample).isEqualTo(copy);

        // fieldToAnalyzerMap
        assertThat(addOp.get("fieldToAnalyzerMap")).isInstanceOf(Map.class);
        assertThat((Map) addOp.get("fieldToAnalyzerMap")).hasSize(2);

        // document
        assertThat(addOp.get("document")).isNotNull();
        GenericRecord document = (GenericRecord) addOp.get("document");
        assertThat(document.get("boost")).isEqualTo(2.3f);

        // numeric fields
        assertThat(document.get("fieldables")).isNotNull().isInstanceOf(List.class);
        List<?> fields = (List<?>) document.get("fieldables");

        assertThat(fields).hasSize(9); // custom + 4 numerics + 4 fields

        field = (GenericRecord) fields.get(0);
        assertThat(field.getSchema().getName()).isEqualTo("CustomFieldable");
        field = (GenericRecord) fields.get(1);
        assertThat(field.getSchema().getName()).isEqualTo("NumericIntField");
        assertThat(field.get("value")).isEqualTo(3);
        assertNumericField(field);
        field = (GenericRecord) fields.get(2);
        assertThat(field.getSchema().getName()).isEqualTo("NumericLongField");
        assertThat(field.get("value")).isEqualTo(3l);
        assertNumericField(field);
        field = (GenericRecord) fields.get(3);
        assertThat(field.getSchema().getName()).isEqualTo("NumericFloatField");
        assertThat(field.get("value")).isEqualTo(2.3f);
        assertNumericField(field);
        field = (GenericRecord) fields.get(4);
        assertThat(field.getSchema().getName()).isEqualTo("NumericDoubleField");
        assertThat(field.get("value")).isEqualTo(2.3d);
        assertNumericField(field);

        // fields
        field = (GenericRecord) fields.get(5);
        assertThat(field.getSchema().getName()).isEqualTo("BinaryField");
        assertThat(field.get("value")).isInstanceOf(ByteBuffer.class);
        assertField(field);

        field = (GenericRecord) fields.get(6);
        assertThat(field.getSchema().getName()).isEqualTo("StringField");
        assertThat(field.get("value")).isInstanceOf(Utf8.class);
        assertTermVector(field);
        assertIndexAndStore(field);
        assertField(field);

        field = (GenericRecord) fields.get(7);
        assertThat(field.getSchema().getName()).isEqualTo("TokenStreamField");
        assertThat(field.get("value")).isInstanceOf(List.class);
        List<List<Object>> l1 = (List<List<Object>>) field.get("value");
        assertThat(l1.get(0)).as("Wrong attribute impl list").hasSize(2);
        Object object = l1.get(0).get(0);
        assertThat(object).isNotNull();
        assertTermVector(field);
        assertField(field);

        field = (GenericRecord) fields.get(8);
        assertThat(field.getSchema().getName()).isEqualTo("ReaderField");
        assertThat(field.get("value")).isInstanceOf(ByteBuffer.class);
        assertTermVector(field);
        assertField(field);
      } catch (EOFException eof) {
        break;
      } catch (Exception ex) {
        ex.printStackTrace();
        throw ex;
      }
    }
  }