private BinaryDocValues loadBinary(FieldInfo field) throws IOException { BinaryEntry entry = binaries.get(field.name); data.seek(entry.offset); PagedBytes bytes = new PagedBytes(16); bytes.copy(data, entry.numBytes); final PagedBytes.Reader bytesReader = bytes.freeze(true); if (!merging) { binaryInfo.put(field.name, bytesReader); } if (entry.minLength == entry.maxLength) { final int fixedLength = entry.minLength; if (!merging) { ramBytesUsed.addAndGet(bytesReader.ramBytesUsed()); } return new BinaryDocValues() { @Override public BytesRef get(int docID) { final BytesRef term = new BytesRef(); bytesReader.fillSlice(term, fixedLength * (long) docID, fixedLength); return term; } }; } else { final MonotonicBlockPackedReader addresses = MonotonicBlockPackedReader.of( data, entry.packedIntsVersion, entry.blockSize, maxDoc, false); if (!merging) { addressInfo.put(field.name, addresses); ramBytesUsed.addAndGet(bytesReader.ramBytesUsed() + addresses.ramBytesUsed()); } return new BinaryDocValues() { @Override public BytesRef get(int docID) { long startAddress = docID == 0 ? 0 : addresses.get(docID - 1); long endAddress = addresses.get(docID); final BytesRef term = new BytesRef(); bytesReader.fillSlice(term, startAddress, (int) (endAddress - startAddress)); return term; } }; } }
@Override public BytesRef getValueByOrd(long ord) { assert ord != BytesValues.WithOrdinals.MISSING_ORDINAL; bytes.fill(scratch, termOrdToBytesOffset.get(ord)); return scratch; }