/** * Create a new instance configured with the provided FieldType options. See {@link * #DEFAULT_FIELDTYPE}. a field type is used to articulate the desired options (namely * pointValues, docValues, stored). Legacy numerics is configurable this way too. */ public PointVectorStrategy(SpatialContext ctx, String fieldNamePrefix, FieldType fieldType) { super(ctx, fieldNamePrefix); this.fieldNameX = fieldNamePrefix + SUFFIX_X; this.fieldNameY = fieldNamePrefix + SUFFIX_Y; int numPairs = 0; if ((this.hasStored = fieldType.stored())) { numPairs++; } if ((this.hasDocVals = fieldType.docValuesType() != DocValuesType.NONE)) { numPairs++; } if ((this.hasPointVals = fieldType.pointDimensionCount() > 0)) { numPairs++; } if (fieldType.indexOptions() != IndexOptions.NONE && fieldType.numericType() != null) { if (hasPointVals) { throw new IllegalArgumentException( "pointValues and LegacyNumericType are mutually exclusive"); } if (fieldType.numericType() != FieldType.LegacyNumericType.DOUBLE) { throw new IllegalArgumentException( getClass() + " does not support " + fieldType.numericType()); } numPairs++; legacyNumericFieldType = new FieldType(LegacyDoubleField.TYPE_NOT_STORED); legacyNumericFieldType.setNumericPrecisionStep(fieldType.numericPrecisionStep()); legacyNumericFieldType.freeze(); } else { legacyNumericFieldType = null; } this.fieldsLen = numPairs * 2; }
static { // Default: pointValues + docValues FieldType type = new FieldType(); type.setDimensions(1, Double.BYTES); // pointValues (assume Double) type.setDocValuesType(DocValuesType.NUMERIC); // docValues type.setStored(false); type.freeze(); DEFAULT_FIELDTYPE = type; // Legacy default: legacyNumerics type = new FieldType(); type.setIndexOptions(IndexOptions.DOCS); type.setNumericType(FieldType.LegacyNumericType.DOUBLE); type.setNumericPrecisionStep(8); // same as solr default type.setDocValuesType(DocValuesType.NONE); // no docValues! type.setStored(false); type.freeze(); LEGACY_FIELDTYPE = type; }
@BeforeClass public static void beforeClass() throws Exception { noDocs = atLeast(4096); distance = (1L << 60) / noDocs; directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( random(), directory, newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) .setMergePolicy(newLogMergePolicy())); final FieldType storedLong = new FieldType(LegacyLongField.TYPE_NOT_STORED); storedLong.setStored(true); storedLong.freeze(); final FieldType storedLong8 = new FieldType(storedLong); storedLong8.setNumericPrecisionStep(8); final FieldType storedLong4 = new FieldType(storedLong); storedLong4.setNumericPrecisionStep(4); final FieldType storedLong6 = new FieldType(storedLong); storedLong6.setNumericPrecisionStep(6); final FieldType storedLong2 = new FieldType(storedLong); storedLong2.setNumericPrecisionStep(2); LegacyLongField field8 = new LegacyLongField("field8", 0L, storedLong8), field6 = new LegacyLongField("field6", 0L, storedLong6), field4 = new LegacyLongField("field4", 0L, storedLong4), field2 = new LegacyLongField("field2", 0L, storedLong2); Document doc = new Document(); // add fields, that have a distance to test general functionality doc.add(field8); doc.add(field6); doc.add(field4); doc.add(field2); // Add a series of noDocs docs with increasing long values, by updating the fields for (int l = 0; l < noDocs; l++) { long val = distance * l + startOffset; field8.setLongValue(val); field6.setLongValue(val); field4.setLongValue(val); field2.setLongValue(val); val = l - (noDocs / 2); writer.addDocument(doc); } Map<String, Type> map = new HashMap<>(); map.put("field2", Type.LEGACY_LONG); map.put("field4", Type.LEGACY_LONG); map.put("field6", Type.LEGACY_LONG); map.put("field8", Type.LEGACY_LONG); reader = UninvertingReader.wrap(writer.getReader(), map); searcher = newSearcher(reader); writer.close(); }
@BeforeClass public static void beforeClass() throws Exception { noDocs = atLeast(4096); distance = (1L << 60) / noDocs; directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( random(), directory, newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) .setMergePolicy(newLogMergePolicy())); final FieldType storedLong = new FieldType(LegacyLongField.TYPE_NOT_STORED); storedLong.setStored(true); storedLong.freeze(); final FieldType storedLong8 = new FieldType(storedLong); storedLong8.setNumericPrecisionStep(8); final FieldType storedLong4 = new FieldType(storedLong); storedLong4.setNumericPrecisionStep(4); final FieldType storedLong6 = new FieldType(storedLong); storedLong6.setNumericPrecisionStep(6); final FieldType storedLong2 = new FieldType(storedLong); storedLong2.setNumericPrecisionStep(2); final FieldType storedLongNone = new FieldType(storedLong); storedLongNone.setNumericPrecisionStep(Integer.MAX_VALUE); final FieldType unstoredLong = LegacyLongField.TYPE_NOT_STORED; final FieldType unstoredLong8 = new FieldType(unstoredLong); unstoredLong8.setNumericPrecisionStep(8); final FieldType unstoredLong6 = new FieldType(unstoredLong); unstoredLong6.setNumericPrecisionStep(6); final FieldType unstoredLong4 = new FieldType(unstoredLong); unstoredLong4.setNumericPrecisionStep(4); final FieldType unstoredLong2 = new FieldType(unstoredLong); unstoredLong2.setNumericPrecisionStep(2); LegacyLongField field8 = new LegacyLongField("field8", 0L, storedLong8), field6 = new LegacyLongField("field6", 0L, storedLong6), field4 = new LegacyLongField("field4", 0L, storedLong4), field2 = new LegacyLongField("field2", 0L, storedLong2), fieldNoTrie = new LegacyLongField("field" + Integer.MAX_VALUE, 0L, storedLongNone), ascfield8 = new LegacyLongField("ascfield8", 0L, unstoredLong8), ascfield6 = new LegacyLongField("ascfield6", 0L, unstoredLong6), ascfield4 = new LegacyLongField("ascfield4", 0L, unstoredLong4), ascfield2 = new LegacyLongField("ascfield2", 0L, unstoredLong2); Document doc = new Document(); // add fields, that have a distance to test general functionality doc.add(field8); doc.add(field6); doc.add(field4); doc.add(field2); doc.add(fieldNoTrie); // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct // splitting of range and inclusive/exclusive doc.add(ascfield8); doc.add(ascfield6); doc.add(ascfield4); doc.add(ascfield2); // Add a series of noDocs docs with increasing long values, by updating the fields for (int l = 0; l < noDocs; l++) { long val = distance * l + startOffset; field8.setLongValue(val); field6.setLongValue(val); field4.setLongValue(val); field2.setLongValue(val); fieldNoTrie.setLongValue(val); val = l - (noDocs / 2); ascfield8.setLongValue(val); ascfield6.setLongValue(val); ascfield4.setLongValue(val); ascfield2.setLongValue(val); writer.addDocument(doc); } reader = writer.getReader(); searcher = newSearcher(reader); writer.close(); }
public void testNumericField() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); final int numDocs = atLeast(500); final Number[] answers = new Number[numDocs]; final NumericType[] typeAnswers = new NumericType[numDocs]; for (int id = 0; id < numDocs; id++) { Document doc = new Document(); final Field nf; final Field sf; final Number answer; final NumericType typeAnswer; if (random().nextBoolean()) { // float/double if (random().nextBoolean()) { final float f = random().nextFloat(); answer = Float.valueOf(f); nf = new FloatField("nf", f, Field.Store.NO); sf = new StoredField("nf", f); typeAnswer = NumericType.FLOAT; } else { final double d = random().nextDouble(); answer = Double.valueOf(d); nf = new DoubleField("nf", d, Field.Store.NO); sf = new StoredField("nf", d); typeAnswer = NumericType.DOUBLE; } } else { // int/long if (random().nextBoolean()) { final int i = random().nextInt(); answer = Integer.valueOf(i); nf = new IntField("nf", i, Field.Store.NO); sf = new StoredField("nf", i); typeAnswer = NumericType.INT; } else { final long l = random().nextLong(); answer = Long.valueOf(l); nf = new LongField("nf", l, Field.Store.NO); sf = new StoredField("nf", l); typeAnswer = NumericType.LONG; } } doc.add(nf); doc.add(sf); answers[id] = answer; typeAnswers[id] = typeAnswer; FieldType ft = new FieldType(IntField.TYPE_STORED); ft.setNumericPrecisionStep(Integer.MAX_VALUE); doc.add(new IntField("id", id, ft)); w.addDocument(doc); } final DirectoryReader r = w.getReader(); w.close(); assertEquals(numDocs, r.numDocs()); for (AtomicReaderContext ctx : r.leaves()) { final AtomicReader sub = ctx.reader(); final FieldCache.Ints ids = FieldCache.DEFAULT.getInts(sub, "id", false); for (int docID = 0; docID < sub.numDocs(); docID++) { final Document doc = sub.document(docID); final Field f = (Field) doc.getField("nf"); assertTrue("got f=" + f, f instanceof StoredField); assertEquals(answers[ids.get(docID)], f.numericValue()); } } r.close(); dir.close(); }