private void buildAttribute(Object element, LuceneWorksBuilder hydrator) { if (element instanceof GenericRecord) { GenericRecord record = (GenericRecord) element; String name = record.getSchema().getName(); if ("TokenTrackingAttribute".equals(name)) { @SuppressWarnings("unchecked") List<Integer> positionList = (List<Integer>) record.get("positions"); hydrator.addTokenTrackingAttribute(positionList); } else if ("CharTermAttribute".equals(name)) { hydrator.addCharTermAttribute((CharSequence) record.get("sequence")); } else if ("PayloadAttribute".equals(name)) { hydrator.addPayloadAttribute(asByteArray(record, "payload")); } else if ("KeywordAttribute".equals(name)) { hydrator.addKeywordAttribute(asBoolean(record, "isKeyword")); } else if ("PositionIncrementAttribute".equals(name)) { hydrator.addPositionIncrementAttribute(asInt(record, "positionIncrement")); } else if ("FlagsAttribute".equals(name)) { hydrator.addFlagsAttribute(asInt(record, "flags")); } else if ("TypeAttribute".equals(name)) { hydrator.addTypeAttribute(asString(record, "type")); } else if ("OffsetAttribute".equals(name)) { hydrator.addOffsetAttribute(asInt(record, "startOffset"), asInt(record, "endOffset")); } else { throw log.unknownAttributeSerializedRepresentation(name); } } else if (element instanceof ByteBuffer) { hydrator.addSerializedAttribute(asByteArray((ByteBuffer) element)); } else { throw log.unknownAttributeSerializedRepresentation(element.getClass().getName()); } }
private void processId(GenericRecord operation, LuceneWorksBuilder hydrator) { GenericRecord id = (GenericRecord) operation.get("id"); Object value = id.get("value"); if (value instanceof ByteBuffer) { hydrator.addIdAsJavaSerialized(asByteArray((ByteBuffer) value)); } else if (value instanceof Utf8) { hydrator.addId(value.toString()); } else { // the rest are serialized objects hydrator.addId((Serializable) value); } }
private void buildAttributes(GenericRecord record, String field, LuceneWorksBuilder hydrator) { @SuppressWarnings("unchecked") List<List<?>> tokens = (List<List<?>>) record.get(field); for (List<?> token : tokens) { for (Object attribute : token) { buildAttribute(attribute, hydrator); } hydrator.addToken(); } }
@Override public void deserialize(byte[] data, LuceneWorksBuilder hydrator) { final ByteArrayInputStream inputStream = new ByteArrayInputStream(data); final int majorVersion = inputStream.read(); final int minorVersion = inputStream.read(); final Protocol protocol = protocols.getProtocol(majorVersion, minorVersion); Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(protocol.getType("Message")); GenericRecord result; try { result = reader.read(null, decoder); } catch (IOException e) { throw log.unableToDeserializeAvroStream(e); } classReferences = asListOfString(result, "classReferences"); final List<GenericRecord> operations = asListOfGenericRecords(result, "operations"); final ConversionContext conversionContext = new ContextualExceptionBridgeHelper(); for (GenericRecord operation : operations) { String schema = operation.getSchema().getName(); if ("OptimizeAll".equals(schema)) { hydrator.addOptimizeAll(); } else if ("PurgeAll".equals(schema)) { hydrator.addPurgeAllLuceneWork(asClass(operation, "class")); } else if ("Flush".equals(schema)) { hydrator.addFlush(); } else if ("Delete".equals(schema)) { processId(operation, hydrator); hydrator.addDeleteLuceneWork(asClass(operation, "class"), conversionContext); } else if ("DeleteByQuery".equals(schema)) { String entityClassName = asClass(operation, "class"); int queryKey = asInt(operation, "key"); DeleteByQuerySupport.StringToQueryMapper mapper = DeleteByQuerySupport.getStringToQueryMapper(queryKey); List<Utf8> stringList = asListOfString(operation, "query"); String[] query = new String[stringList.size()]; for (int i = 0; i < stringList.size(); ++i) { query[i] = stringList.get(i).toString(); } hydrator.addDeleteByQueryLuceneWork(entityClassName, mapper.fromString(query)); } else if ("Add".equals(schema)) { buildLuceneDocument(asGenericRecord(operation, "document"), hydrator); Map<String, String> analyzers = getAnalyzers(operation); processId(operation, hydrator); hydrator.addAddLuceneWork(asClass(operation, "class"), analyzers, conversionContext); } else if ("Update".equals(schema)) { buildLuceneDocument(asGenericRecord(operation, "document"), hydrator); Map<String, String> analyzers = getAnalyzers(operation); processId(operation, hydrator); hydrator.addUpdateLuceneWork(asClass(operation, "class"), analyzers, conversionContext); } else { throw log.cannotDeserializeOperation(schema); } } }
private void buildLuceneDocument(GenericRecord document, LuceneWorksBuilder hydrator) { hydrator.defineDocument(); List<GenericRecord> fieldables = asListOfGenericRecords(document, "fieldables"); for (GenericRecord field : fieldables) { String schema = field.getSchema().getName(); if ("CustomFieldable".equals(schema)) { hydrator.addFieldable(asByteArray(field, "instance")); } else if ("NumericIntField".equals(schema)) { hydrator.addIntNumericField( asInt(field, "value"), asString(field, "name"), asInt(field, "precisionStep"), asStore(field), asBoolean(field, "indexed"), asFloat(field, "boost"), asBoolean(field, "omitNorms"), asBoolean(field, "omitTermFreqAndPositions")); } else if ("NumericFloatField".equals(schema)) { hydrator.addFloatNumericField( asFloat(field, "value"), asString(field, "name"), asInt(field, "precisionStep"), asStore(field), asBoolean(field, "indexed"), asFloat(field, "boost"), asBoolean(field, "omitNorms"), asBoolean(field, "omitTermFreqAndPositions")); } else if ("NumericLongField".equals(schema)) { hydrator.addLongNumericField( asLong(field, "value"), asString(field, "name"), asInt(field, "precisionStep"), asStore(field), asBoolean(field, "indexed"), asFloat(field, "boost"), asBoolean(field, "omitNorms"), asBoolean(field, "omitTermFreqAndPositions")); } else if ("NumericDoubleField".equals(schema)) { hydrator.addDoubleNumericField( asDouble(field, "value"), asString(field, "name"), asInt(field, "precisionStep"), asStore(field), asBoolean(field, "indexed"), asFloat(field, "boost"), asBoolean(field, "omitNorms"), asBoolean(field, "omitTermFreqAndPositions")); } else if ("BinaryField".equals(schema)) { hydrator.addFieldWithBinaryData( asString(field, "name"), asByteArray(field, "value"), asInt(field, "offset"), asInt(field, "length")); } else if ("StringField".equals(schema)) { hydrator.addFieldWithStringData( asString(field, "name"), asString(field, "value"), asStore(field), asIndex(field), asTermVector(field), asFloat(field, "boost"), asBoolean(field, "omitNorms"), asBoolean(field, "omitTermFreqAndPositions")); } else if ("TokenStreamField".equals(schema)) { buildAttributes(field, "value", hydrator); hydrator.addFieldWithTokenStreamData( asString(field, "name"), asTermVector(field), asFloat(field, "boost"), asBoolean(field, "omitNorms"), asBoolean(field, "omitTermFreqAndPositions")); } else if ("ReaderField".equals(schema)) { hydrator.addFieldWithSerializableReaderData( asString(field, "name"), asByteArray(field, "value"), asTermVector(field), asFloat(field, "boost"), asBoolean(field, "omitNorms"), asBoolean(field, "omitTermFreqAndPositions")); } else if ("BinaryDocValuesField".equals(schema)) { hydrator.addDocValuesFieldWithBinaryData( asString(field, "name"), asString(field, "type"), asByteArray(field, "value"), asInt(field, "offset"), asInt(field, "length")); } else if ("NumericDocValuesField".equals(schema)) { hydrator.addDocValuesFieldWithNumericData( asString(field, "name"), asString(field, "type"), asLong(field, "value")); } else { throw log.cannotDeserializeField(schema); } } }