@Test public void testAvroScheme() { byte[] byteArray = { 12, 109, 121, 112, 105, 112, 101, 14, 116, 101, 115, 116, 105, 110, 103, -96, 1, 4, 4, 105, 100, 18, 10, 115, 99, 111, 114, 101, -56, 1, 0, 8, 14, 115, 117, 98, 106, 101, 99, 116, 16, 114, 101, 108, 105, 103, 105, 111, 110, 16, 108, 97, 115, 116, 110, 97, 109, 101, 10, 115, 116, 97, 109, 109, 18, 102, 105, 114, 115, 116, 110, 97, 109, 101, 14, 114, 111, 115, 97, 110, 110, 101, 8, 100, 97, 116, 101, 20, 50, 48, 49, 52, 45, 49, 48, 45, 50, 50, 0, 0 }; try { SpecificDatumReader<InsertMutation> reader = new SpecificDatumReader<>(InsertMutation.getClassSchema()); Decoder decoder = DecoderFactory.get().binaryDecoder(byteArray, null); InsertMutation insertMutation = reader.read(null, decoder); LOG.info("ENTRY: " + insertMutation.toString()); LOG.info("DATABASE NAME: " + insertMutation.getDatabase()); assertEquals("mypipe", insertMutation.getDatabase().toString()); LOG.info("TABLE NAME: " + insertMutation.getTable()); assertEquals("testing", insertMutation.getTable().toString()); LOG.info("ID: " + AvroSchemaUtils.getIntegerValueByKey(insertMutation.getIntegers(), "id")); assertEquals( 9, (int) AvroSchemaUtils.getIntegerValueByKey(insertMutation.getIntegers(), "id")); LOG.info( "FIRST NAME: " + AvroSchemaUtils.getStringValueByKey(insertMutation.getStrings(), "firstname")); assertEquals( "rosanne", AvroSchemaUtils.getStringValueByKey(insertMutation.getStrings(), "firstname")); LOG.info( "LAST NAME: " + AvroSchemaUtils.getStringValueByKey(insertMutation.getStrings(), "lastname")); assertEquals( "stamm", AvroSchemaUtils.getStringValueByKey(insertMutation.getStrings(), "lastname")); LOG.info( "SUBJECT: " + AvroSchemaUtils.getStringValueByKey(insertMutation.getStrings(), "subject")); assertEquals( "religion", AvroSchemaUtils.getStringValueByKey(insertMutation.getStrings(), "subject")); LOG.info( "SCORE: " + AvroSchemaUtils.getIntegerValueByKey(insertMutation.getIntegers(), "score")); assertEquals( 100, (int) AvroSchemaUtils.getIntegerValueByKey(insertMutation.getIntegers(), "score")); LOG.info("DATE: " + AvroSchemaUtils.getStringValueByKey(insertMutation.getStrings(), "date")); assertEquals( "2014-10-22", AvroSchemaUtils.getStringValueByKey(insertMutation.getStrings(), "date")); } catch (IOException e) { e.printStackTrace(); } }
@Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { if (args.size() != 2) { err.println("Expected 2 arguments: schema binary_data_file"); err.println("Use '-' as binary_data_file for stdin."); return 1; } Schema schema = Schema.parse(args.get(0)); InputStream input; boolean needsClosing; if (args.get(1).equals("-")) { input = stdin; needsClosing = false; } else { input = new FileInputStream(args.get(1)); needsClosing = true; } try { DatumReader<Object> reader = new GenericDatumReader<Object>(schema); Object datum = reader.read(null, DecoderFactory.get().binaryDecoder(input, null)); DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema); JsonGenerator g = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8); g.useDefaultPrettyPrinter(); writer.write(datum, EncoderFactory.get().jsonEncoder(schema, g)); g.flush(); out.println(); out.flush(); } finally { if (needsClosing) { input.close(); } } return 0; }
/** * Create a deep copy of an Avro value. * * @param source The value to be copied * @return The deep copy of the value */ @Override public T deepCopy(T source) { if (source == null) { return null; } if (datumReader == null) { datumReader = createDatumReader(conf); } if (datumWriter == null) { datumWriter = createDatumWriter(conf); } ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream(); binaryEncoder = EncoderFactory.get().binaryEncoder(byteOutStream, binaryEncoder); T target = createCopyTarget(); try { datumWriter.write(source, binaryEncoder); binaryEncoder.flush(); binaryDecoder = DecoderFactory.get().binaryDecoder(byteOutStream.toByteArray(), binaryDecoder); return datumReader.read(target, binaryDecoder); } catch (Exception e) { throw new CrunchRuntimeException("Error while deep copying avro value " + source, e); } }
// It is assumed that record has following format. // byte 1 : version, static 0x1 // byte 2-5 : int schemaId // remaining bytes would have avro data @Override public GenericRecord parse(ByteBuffer bytes) { if (bytes.remaining() < 5) { throw new ParseException("record must have at least 5 bytes carrying version and schemaId"); } byte version = bytes.get(); if (version != V1) { throw new ParseException("found record of arbitrary version [%s]", version); } int schemaId = bytes.getInt(); Schema schemaObj = schemaObjs.get(schemaId); if (schemaObj == null) { throw new ParseException("Failed to find schema for id [%s]", schemaId); } try { DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schemaObj); ByteBufferInputStream inputStream = new ByteBufferInputStream(Collections.singletonList(bytes)); return reader.read(null, DecoderFactory.get().binaryDecoder(inputStream, null)); } catch (Exception e) { throw new ParseException(e, "Fail to decode avro message with schemaId [%s].", schemaId); } }
public Record fromStream( final InputStream stream, @Nullable final Set<URI> propertiesToDeserialize) throws IOException { final Decoder decoder = DecoderFactory.get().directBinaryDecoder(stream, null); final DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(Schemas.NODE); final GenericRecord generic = reader.read(null, decoder); return decodeRecord(generic, propertiesToDeserialize); }
public static void checkBinaryJson(String json) throws Exception { Object node = Json.parseJson(json); ByteArrayOutputStream out = new ByteArrayOutputStream(); DatumWriter<Object> writer = new Json.ObjectWriter(); Encoder encoder = EncoderFactory.get().binaryEncoder(out, null); encoder = EncoderFactory.get().validatingEncoder(Json.SCHEMA, encoder); writer.write(node, encoder); encoder.flush(); byte[] bytes = out.toByteArray(); DatumReader<Object> reader = new Json.ObjectReader(); Decoder decoder = DecoderFactory.get().binaryDecoder(bytes, null); decoder = DecoderFactory.get().validatingDecoder(Json.SCHEMA, decoder); Object decoded = reader.read(null, decoder); assertEquals("Decoded json does not match.", Json.toString(node), Json.toString(decoded)); }
@Override public void deserialize(byte[] data, LuceneWorksBuilder hydrator) { final ByteArrayInputStream inputStream = new ByteArrayInputStream(data); final int majorVersion = inputStream.read(); final int minorVersion = inputStream.read(); final Protocol protocol = protocols.getProtocol(majorVersion, minorVersion); Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(protocol.getType("Message")); GenericRecord result; try { result = reader.read(null, decoder); } catch (IOException e) { throw log.unableToDeserializeAvroStream(e); } classReferences = asListOfString(result, "classReferences"); final List<GenericRecord> operations = asListOfGenericRecords(result, "operations"); final ConversionContext conversionContext = new ContextualExceptionBridgeHelper(); for (GenericRecord operation : operations) { String schema = operation.getSchema().getName(); if ("OptimizeAll".equals(schema)) { hydrator.addOptimizeAll(); } else if ("PurgeAll".equals(schema)) { hydrator.addPurgeAllLuceneWork(asClass(operation, "class")); } else if ("Flush".equals(schema)) { hydrator.addFlush(); } else if ("Delete".equals(schema)) { processId(operation, hydrator); hydrator.addDeleteLuceneWork(asClass(operation, "class"), conversionContext); } else if ("DeleteByQuery".equals(schema)) { String entityClassName = asClass(operation, "class"); int queryKey = asInt(operation, "key"); DeleteByQuerySupport.StringToQueryMapper mapper = DeleteByQuerySupport.getStringToQueryMapper(queryKey); List<Utf8> stringList = asListOfString(operation, "query"); String[] query = new String[stringList.size()]; for (int i = 0; i < stringList.size(); ++i) { query[i] = stringList.get(i).toString(); } hydrator.addDeleteByQueryLuceneWork(entityClassName, mapper.fromString(query)); } else if ("Add".equals(schema)) { buildLuceneDocument(asGenericRecord(operation, "document"), hydrator); Map<String, String> analyzers = getAnalyzers(operation); processId(operation, hydrator); hydrator.addAddLuceneWork(asClass(operation, "class"), analyzers, conversionContext); } else if ("Update".equals(schema)) { buildLuceneDocument(asGenericRecord(operation, "document"), hydrator); Map<String, String> analyzers = getAnalyzers(operation); processId(operation, hydrator); hydrator.addUpdateLuceneWork(asClass(operation, "class"), analyzers, conversionContext); } else { throw log.cannotDeserializeOperation(schema); } } }
@Test(expected = AvroTypeException.class) public void testNoDefaultField() throws Exception { Schema expected = Schema.parse( "{\"type\":\"record\", \"name\":\"Foo\", \"fields\":" + "[{\"name\":\"f\", \"type\": \"string\"}]}"); DatumReader<Object> in = new GenericDatumReader<Object>(ACTUAL, expected); in.read(null, DecoderFactory.get().binaryDecoder(new ByteArrayInputStream(new byte[0]), null)); }
/** * Create a new Event Reader * * @param in * @throws IOException */ @SuppressWarnings("deprecation") public EventReader(DataInputStream in) throws IOException { this.in = in; this.version = in.readLine(); if (!EventWriter.VERSION.equals(version)) { throw new IOException("Incompatible event log version: " + version); } this.schema = Schema.parse(in.readLine()); this.reader = new SpecificDatumReader(schema); this.decoder = DecoderFactory.get().jsonDecoder(schema, in); }
public URI expandURI(final byte[] bytes) { Preconditions.checkNotNull(bytes); try { final InputStream stream = new ByteArrayInputStream(bytes); final Decoder decoder = DecoderFactory.get().directBinaryDecoder(stream, null); final DatumReader<Object> reader = new GenericDatumReader<Object>(Schemas.COMPRESSED_IDENTIFIER); final Object generic = reader.read(null, decoder); return (URI) decodeNode(generic); } catch (final IOException ex) { throw new Error("Unexpected exception (!): " + ex.getMessage(), ex); } }
public static void checkBlockingBinary( Schema schema, Object datum, DatumWriter<Object> writer, DatumReader<Object> reader) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); writer.setSchema(schema); Encoder encoder = EncoderFactory.get().blockingBinaryEncoder(out, null); writer.write(datum, encoder); encoder.flush(); byte[] data = out.toByteArray(); reader.setSchema(schema); Object decoded = reader.read(null, DecoderFactory.get().binaryDecoder(data, null)); assertEquals("Decoded data does not match.", datum, decoded); }
private static void checkDefault(String schemaJson, String defaultJson, Object defaultValue) throws Exception { String recordJson = "{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[{\"name\":\"f\", " + "\"type\":" + schemaJson + ", " + "\"default\":" + defaultJson + "}]}"; Schema expected = Schema.parse(recordJson); DatumReader<Object> in = new GenericDatumReader<Object>(ACTUAL, expected); GenericData.Record record = (GenericData.Record) in.read(null, DecoderFactory.get().binaryDecoder(new byte[0], null)); assertEquals("Wrong default.", defaultValue, record.get("f")); assertEquals("Wrong toString", expected, Schema.parse(expected.toString())); }
private static void checkJson(Schema schema, Object datum, String json) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().jsonEncoder(schema, out); DatumWriter<Object> writer = new GenericDatumWriter<Object>(); writer.setSchema(schema); writer.write(datum, encoder); encoder.flush(); byte[] data = out.toByteArray(); String encoded = new String(data, "UTF-8"); assertEquals("Encoded data does not match.", json, encoded); DatumReader<Object> reader = new GenericDatumReader<Object>(); reader.setSchema(schema); Object decoded = reader.read(null, DecoderFactory.get().jsonDecoder(schema, new ByteArrayInputStream(data))); assertEquals("Decoded data does not match.", datum, decoded); }
private void parseTopics() { if (state.getProperty(TOPIC_LIST) != null) { byte[] data = base64.decodeBase64(state.getProperty(TOPIC_LIST)); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(data, null); SpecificDatumReader<Topic> avroReader = new SpecificDatumReader<>(Topic.class); try { // NOSONAR Topic decodedTopic; while (!decoder.isEnd()) { decodedTopic = avroReader.read(null, decoder); LOG.debug("Loaded {}", decodedTopic); topicMap.put(decodedTopic.getId(), decodedTopic); } } catch (Exception e) { LOG.error("Unexpected exception occurred while reading information from decoder", e); } } else { LOG.info("No topic list found in state"); } }
private static void checkJson( Schema schema, Object datum, DatumWriter<Object> writer, DatumReader<Object> reader) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().jsonEncoder(schema, out); writer.setSchema(schema); writer.write(datum, encoder); writer.write(datum, encoder); encoder.flush(); byte[] data = out.toByteArray(); reader.setSchema(schema); Decoder decoder = DecoderFactory.get().jsonDecoder(schema, new ByteArrayInputStream(data)); Object decoded = reader.read(null, decoder); assertEquals("Decoded data does not match.", datum, decoded); decoded = reader.read(decoded, decoder); assertEquals("Decoded data does not match.", datum, decoded); }
@Test public void testEnumMismatch() throws Exception { Schema actual = Schema.parse("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"X\",\"Y\"]}"); Schema expected = Schema.parse("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"Y\",\"Z\"]}"); ByteArrayOutputStream out = new ByteArrayOutputStream(); DatumWriter<Object> writer = new GenericDatumWriter<Object>(actual); Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null); writer.write(new GenericData.EnumSymbol(actual, "Y"), encoder); writer.write(new GenericData.EnumSymbol(actual, "X"), encoder); encoder.flush(); byte[] data = out.toByteArray(); Decoder decoder = DecoderFactory.get().binaryDecoder(data, null); DatumReader<String> in = new GenericDatumReader<String>(actual, expected); assertEquals("Wrong value", new GenericData.EnumSymbol(expected, "Y"), in.read(null, decoder)); try { in.read(null, decoder); fail("Should have thrown exception."); } catch (AvroTypeException e) { // expected } }
public void serializeGeneric() throws IOException { // Create a datum to serialize. Schema schema = new Schema.Parser().parse(getClass().getResourceAsStream("/MyPair.avsc")); GenericRecord datum = new GenericData.Record(schema); datum.put("left", new Utf8("dog")); datum.put("right", new Utf8("cat")); // Serialize it. ByteArrayOutputStream out = new ByteArrayOutputStream(); DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema); Encoder encoder = EncoderFactory.get().binaryEncoder(out, null); writer.write(datum, encoder); encoder.flush(); out.close(); System.out.println("Serialization: " + out); // Deserialize it. DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schema); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(out.toByteArray(), null); GenericRecord result = reader.read(null, decoder); System.out.printf("Left: %s, Right: %s\n", result.get("left"), result.get("right")); }
@Test public void experimentWithAvro() throws Exception { String root = "org/hibernate/search/remote/codex/avro/v1_1/"; parseSchema(root + "attribute/TokenTrackingAttribute.avro", "attribute/TokenTrackingAttribute"); parseSchema(root + "attribute/CharTermAttribute.avro", "attribute/CharTermAttribute"); parseSchema(root + "attribute/PayloadAttribute.avro", "attribute/PayloadAttribute"); parseSchema(root + "attribute/KeywordAttribute.avro", "attribute/KeywordAttribute"); parseSchema( root + "attribute/PositionIncrementAttribute.avro", "attribute/PositionIncrementAttribute"); parseSchema(root + "attribute/FlagsAttribute.avro", "attribute/FlagsAttribute"); parseSchema(root + "attribute/TypeAttribute.avro", "attribute/TypeAttribute"); parseSchema(root + "attribute/OffsetAttribute.avro", "attribute/OffsetAttribute"); parseSchema(root + "field/TermVector.avro", "field/TermVector"); parseSchema(root + "field/Index.avro", "field/Index"); parseSchema(root + "field/Store.avro", "field/Store"); parseSchema(root + "field/TokenStreamField.avro", "field/TokenStreamField"); parseSchema(root + "field/ReaderField.avro", "field/ReaderField"); parseSchema(root + "field/StringField.avro", "field/StringField"); parseSchema(root + "field/BinaryField.avro", "field/BinaryField"); parseSchema(root + "field/NumericIntField.avro", "field/NumericIntField"); parseSchema(root + "field/NumericLongField.avro", "field/NumericLongField"); parseSchema(root + "field/NumericFloatField.avro", "field/NumericFloatField"); parseSchema(root + "field/NumericDoubleField.avro", "field/NumericDoubleField"); parseSchema(root + "field/CustomFieldable.avro", "field/CustomFieldable"); parseSchema(root + "Document.avro", "Document"); parseSchema(root + "operation/Id.avro", "operation/Id"); parseSchema(root + "operation/OptimizeAll.avro", "operation/OptimizeAll"); parseSchema(root + "operation/PurgeAll.avro", "operation/PurgeAll"); parseSchema(root + "operation/Flush.avro", "operation/Flush"); parseSchema(root + "operation/Delete.avro", "operation/Delete"); parseSchema(root + "operation/Add.avro", "operation/Add"); parseSchema(root + "operation/Update.avro", "operation/Update"); parseSchema(root + "Message.avro", "Message"); String filename = root + "Works.avpr"; Protocol protocol = parseProtocol(filename, "Works"); final Schema termVectorSchema = protocol.getType("TermVector"); final Schema indexSchema = protocol.getType("Index"); final Schema storeSchema = protocol.getType("Store"); final Schema tokenTrackingAttribute = protocol.getType("TokenTrackingAttribute"); final Schema tokenStreamSchema = protocol.getType("TokenStreamField"); final Schema readerSchema = protocol.getType("ReaderField"); final Schema stringSchema = protocol.getType("StringField"); final Schema binarySchema = protocol.getType("BinaryField"); final Schema intFieldSchema = protocol.getType("NumericIntField"); final Schema longFieldSchema = protocol.getType("NumericLongField"); final Schema floatFieldSchema = protocol.getType("NumericFloatField"); final Schema doubleFieldSchema = protocol.getType("NumericDoubleField"); final Schema custonFieldableSchema = protocol.getType("CustomFieldable"); final Schema documentSchema = protocol.getType("Document"); final Schema idSchema = protocol.getType("Id"); final Schema optimizeAllSchema = protocol.getType("OptimizeAll"); final Schema purgeAllSchema = protocol.getType("PurgeAll"); final Schema flushSchema = protocol.getType("Flush"); final Schema deleteSchema = protocol.getType("Delete"); final Schema addSchema = protocol.getType("Add"); final Schema updateSchema = protocol.getType("Update"); Schema messageSchema = protocol.getType("Message"); final ByteArrayOutputStream out = new ByteArrayOutputStream(); GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(messageSchema); Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null); byte[] serializableSample = new byte[10]; for (int i = 0; i < 10; i++) { serializableSample[i] = (byte) i; } List<String> classReferences = new ArrayList<String>(); classReferences.add(AvroTest.class.getName()); List<GenericRecord> fieldables = new ArrayList<GenericRecord>(1); // custom fieldable GenericRecord customFieldable = new GenericData.Record(custonFieldableSchema); customFieldable.put("instance", ByteBuffer.wrap(serializableSample)); fieldables.add(customFieldable); // numeric fields GenericRecord numericField = createNumeric(intFieldSchema); numericField.put("value", 3); fieldables.add(numericField); numericField = createNumeric(longFieldSchema); numericField.put("value", 3l); fieldables.add(numericField); numericField = createNumeric(floatFieldSchema); numericField.put("value", 2.3f); fieldables.add(numericField); numericField = createNumeric(doubleFieldSchema); numericField.put("value", 2.3d); fieldables.add(numericField); // fields GenericRecord field = createField(binarySchema); field.put("offset", 0); field.put("length", 10); field.put("value", ByteBuffer.wrap(serializableSample)); fieldables.add(field); field = createField(stringSchema); field.put("value", stringSchema.getName()); field.put("store", "YES"); field.put("index", "ANALYZED"); field.put("termVector", "WITH_OFFSETS"); fieldables.add(field); field = createField(tokenStreamSchema); List<List<Object>> tokens = new ArrayList<List<Object>>(); List<Object> attrs = new ArrayList<Object>(); tokens.add(attrs); GenericData.Record attr = new GenericData.Record(tokenTrackingAttribute); List<Integer> positions = new ArrayList<Integer>(); positions.add(1); positions.add(2); positions.add(3); positions.add(4); attr.put("positions", positions); attrs.add(attr); attrs.add(ByteBuffer.wrap(serializableSample)); field.put("value", tokens); field.put("termVector", "WITH_OFFSETS"); fieldables.add(field); field = createField(readerSchema); field.put("value", ByteBuffer.wrap(serializableSample)); field.put("termVector", "WITH_OFFSETS"); fieldables.add(field); GenericRecord doc = new GenericData.Record(documentSchema); doc.put("boost", 2.3f); doc.put("fieldables", fieldables); GenericRecord add = new GenericData.Record(addSchema); add.put("class", classReferences.indexOf(AvroTest.class.getName())); GenericRecord id = new GenericData.Record(idSchema); id.put("value", ByteBuffer.wrap(serializableSample)); add.put("id", id); add.put("document", doc); Map<String, String> analyzers = new HashMap<String, String>(); analyzers.put("name", "ngram"); analyzers.put("description", "porter"); add.put("fieldToAnalyzerMap", analyzers); GenericRecord delete = new GenericData.Record(deleteSchema); delete.put("class", classReferences.indexOf(AvroTest.class.getName())); id = new GenericData.Record(idSchema); id.put("value", new Long(30)); delete.put("id", id); GenericRecord purgeAll = new GenericData.Record(purgeAllSchema); purgeAll.put("class", classReferences.indexOf(AvroTest.class.getName())); GenericRecord optimizeAll = new GenericData.Record(optimizeAllSchema); GenericRecord flush = new GenericData.Record(flushSchema); List<GenericRecord> operations = new ArrayList<GenericRecord>(1); operations.add(purgeAll); operations.add(optimizeAll); operations.add(flush); operations.add(delete); operations.add(add); GenericRecord message = new GenericData.Record(messageSchema); message.put("classReferences", classReferences); message.put("operations", operations); writer.write(message, encoder); encoder.flush(); ByteArrayInputStream inputStream = new ByteArrayInputStream(out.toByteArray()); Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); GenericDatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(messageSchema); while (true) { try { GenericRecord result = reader.read(null, decoder); System.out.println(result); assertThat(result).isNotNull(); // operations assertThat(result.get("operations")).isNotNull().isInstanceOf(List.class); List<?> ops = (List<?>) result.get("operations"); assertThat(ops).hasSize(5); // Flush assertThat(ops.get(2)).isInstanceOf(GenericRecord.class); GenericRecord flushOp = (GenericRecord) ops.get(2); assertThat(flushOp.getSchema().getName()).isEqualTo("Flush"); // Delete assertThat(ops.get(3)).isInstanceOf(GenericRecord.class); GenericRecord deleteOp = (GenericRecord) ops.get(3); assertThat(deleteOp.getSchema().getName()).isEqualTo("Delete"); Object actual = ((GenericRecord) deleteOp.get("id")).get("value"); assertThat(actual).isInstanceOf(Long.class); assertThat(actual).isEqualTo(Long.valueOf(30)); // Add assertThat(ops.get(4)).isInstanceOf(GenericRecord.class); GenericRecord addOp = (GenericRecord) ops.get(4); assertThat(addOp.getSchema().getName()).isEqualTo("Add"); actual = ((GenericRecord) addOp.get("id")).get("value"); assertThat(actual).isInstanceOf(ByteBuffer.class); ByteBuffer bb = (ByteBuffer) actual; assertThat(bb.hasArray()).isTrue(); byte[] copy = new byte[bb.remaining()]; bb.get(copy); assertThat(serializableSample).isEqualTo(copy); // fieldToAnalyzerMap assertThat(addOp.get("fieldToAnalyzerMap")).isInstanceOf(Map.class); assertThat((Map) addOp.get("fieldToAnalyzerMap")).hasSize(2); // document assertThat(addOp.get("document")).isNotNull(); GenericRecord document = (GenericRecord) addOp.get("document"); assertThat(document.get("boost")).isEqualTo(2.3f); // numeric fields assertThat(document.get("fieldables")).isNotNull().isInstanceOf(List.class); List<?> fields = (List<?>) document.get("fieldables"); assertThat(fields).hasSize(9); // custom + 4 numerics + 4 fields field = (GenericRecord) fields.get(0); assertThat(field.getSchema().getName()).isEqualTo("CustomFieldable"); field = (GenericRecord) fields.get(1); assertThat(field.getSchema().getName()).isEqualTo("NumericIntField"); assertThat(field.get("value")).isEqualTo(3); assertNumericField(field); field = (GenericRecord) fields.get(2); assertThat(field.getSchema().getName()).isEqualTo("NumericLongField"); assertThat(field.get("value")).isEqualTo(3l); assertNumericField(field); field = (GenericRecord) fields.get(3); assertThat(field.getSchema().getName()).isEqualTo("NumericFloatField"); assertThat(field.get("value")).isEqualTo(2.3f); assertNumericField(field); field = (GenericRecord) fields.get(4); assertThat(field.getSchema().getName()).isEqualTo("NumericDoubleField"); assertThat(field.get("value")).isEqualTo(2.3d); assertNumericField(field); // fields field = (GenericRecord) fields.get(5); assertThat(field.getSchema().getName()).isEqualTo("BinaryField"); assertThat(field.get("value")).isInstanceOf(ByteBuffer.class); assertField(field); field = (GenericRecord) fields.get(6); assertThat(field.getSchema().getName()).isEqualTo("StringField"); assertThat(field.get("value")).isInstanceOf(Utf8.class); assertTermVector(field); assertIndexAndStore(field); assertField(field); field = (GenericRecord) fields.get(7); assertThat(field.getSchema().getName()).isEqualTo("TokenStreamField"); assertThat(field.get("value")).isInstanceOf(List.class); List<List<Object>> l1 = (List<List<Object>>) field.get("value"); assertThat(l1.get(0)).as("Wrong attribute impl list").hasSize(2); Object object = l1.get(0).get(0); assertThat(object).isNotNull(); assertTermVector(field); assertField(field); field = (GenericRecord) fields.get(8); assertThat(field.getSchema().getName()).isEqualTo("ReaderField"); assertThat(field.get("value")).isInstanceOf(ByteBuffer.class); assertTermVector(field); assertField(field); } catch (EOFException eof) { break; } catch (Exception ex) { ex.printStackTrace(); throw ex; } } }
public Object fromStream(final InputStream stream) throws IOException { final Decoder decoder = DecoderFactory.get().directBinaryDecoder(stream, null); final DatumReader<Object> reader = new GenericDatumReader<Object>(Schemas.NODE); final Object generic = reader.read(null, decoder); return decodeNode(generic); }