private AbstractDistanceScoreFunction parseDateVariable(
      XContentParser parser,
      QueryShardContext context,
      DateFieldMapper.DateFieldType dateFieldType,
      MultiValueMode mode)
      throws IOException {
    XContentParser.Token token;
    String parameterName = null;
    String scaleString = null;
    String originString = null;
    String offsetString = "0d";
    double decay = 0.5;
    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
      if (token == XContentParser.Token.FIELD_NAME) {
        parameterName = parser.currentName();
      } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) {
        scaleString = parser.text();
      } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) {
        originString = parser.text();
      } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) {
        decay = parser.doubleValue();
      } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) {
        offsetString = parser.text();
      } else {
        throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
      }
    }
    long origin;
    if (originString == null) {
      origin = context.nowInMillis();
    } else {
      origin = dateFieldType.parseToMilliseconds(originString, false, null, null);
    }

    if (scaleString == null) {
      throw new ElasticsearchParseException(
          "[{}] must be set for date fields.", DecayFunctionBuilder.SCALE);
    }
    TimeValue val =
        TimeValue.parseTimeValue(
            scaleString,
            TimeValue.timeValueHours(24),
            DecayFunctionParser.class.getSimpleName() + ".scale");
    double scale = val.getMillis();
    val =
        TimeValue.parseTimeValue(
            offsetString,
            TimeValue.timeValueHours(24),
            DecayFunctionParser.class.getSimpleName() + ".offset");
    double offset = val.getMillis();
    IndexNumericFieldData numericFieldData = context.getForField(dateFieldType);
    return new NumericFieldDataScoreFunction(
        origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
  }
 private AbstractDistanceScoreFunction parseNumberVariable(
     XContentParser parser,
     QueryShardContext context,
     NumberFieldMapper.NumberFieldType fieldType,
     MultiValueMode mode)
     throws IOException {
   XContentParser.Token token;
   String parameterName = null;
   double scale = 0;
   double origin = 0;
   double decay = 0.5;
   double offset = 0.0d;
   boolean scaleFound = false;
   boolean refFound = false;
   while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
     if (token == XContentParser.Token.FIELD_NAME) {
       parameterName = parser.currentName();
     } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) {
       scale = parser.doubleValue();
       scaleFound = true;
     } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) {
       decay = parser.doubleValue();
     } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) {
       origin = parser.doubleValue();
       refFound = true;
     } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) {
       offset = parser.doubleValue();
     } else {
       throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
     }
   }
   if (!scaleFound || !refFound) {
     throw new ElasticsearchParseException(
         "both [{}] and [{}] must be set for numeric fields.",
         DecayFunctionBuilder.SCALE,
         DecayFunctionBuilder.ORIGIN);
   }
   IndexNumericFieldData numericFieldData = context.getForField(fieldType);
   return new NumericFieldDataScoreFunction(
       origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
 }
 private AbstractDistanceScoreFunction parseGeoVariable(
     XContentParser parser,
     QueryShardContext context,
     BaseGeoPointFieldMapper.GeoPointFieldType fieldType,
     MultiValueMode mode)
     throws IOException {
   XContentParser.Token token;
   String parameterName = null;
   GeoPoint origin = new GeoPoint();
   String scaleString = null;
   String offsetString = "0km";
   double decay = 0.5;
   while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
     if (token == XContentParser.Token.FIELD_NAME) {
       parameterName = parser.currentName();
     } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) {
       scaleString = parser.text();
     } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) {
       origin = GeoUtils.parseGeoPoint(parser);
     } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) {
       decay = parser.doubleValue();
     } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) {
       offsetString = parser.text();
     } else {
       throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
     }
   }
   if (origin == null || scaleString == null) {
     throw new ElasticsearchParseException(
         "[{}] and [{}] must be set for geo fields.",
         DecayFunctionBuilder.ORIGIN,
         DecayFunctionBuilder.SCALE);
   }
   double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
   double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
   IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
   return new GeoFieldDataScoreFunction(
       origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode);
 }
  public void testToFilter() throws IOException {
    Directory dir = new RAMDirectory();
    try (IndexWriter writer =
        new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
      writer.commit();
    }
    QueryShardContext context = mock(QueryShardContext.class);
    try (IndexReader reader = DirectoryReader.open(dir)) {
      MappedFieldType fieldType =
          new MappedFieldType() {
            @Override
            public MappedFieldType clone() {
              return null;
            }

            @Override
            public String typeName() {
              return null;
            }

            @Override
            public Query termQuery(Object value, @Nullable QueryShardContext context) {
              return null;
            }
          };
      fieldType.setName(UidFieldMapper.NAME);
      fieldType.setHasDocValues(false);
      when(context.fieldMapper(UidFieldMapper.NAME)).thenReturn(fieldType);
      when(context.getIndexReader()).thenReturn(reader);
      SliceBuilder builder = new SliceBuilder(5, 10);
      Query query = builder.toFilter(context, 0, 1);
      assertThat(query, instanceOf(TermsSliceQuery.class));

      assertThat(builder.toFilter(context, 0, 1), equalTo(query));
      try (IndexReader newReader = DirectoryReader.open(dir)) {
        when(context.getIndexReader()).thenReturn(newReader);
        assertThat(builder.toFilter(context, 0, 1), equalTo(query));
      }
    }

    try (IndexReader reader = DirectoryReader.open(dir)) {
      MappedFieldType fieldType =
          new MappedFieldType() {
            @Override
            public MappedFieldType clone() {
              return null;
            }

            @Override
            public String typeName() {
              return null;
            }

            @Override
            public Query termQuery(Object value, @Nullable QueryShardContext context) {
              return null;
            }
          };
      fieldType.setName("field_doc_values");
      fieldType.setHasDocValues(true);
      fieldType.setDocValuesType(DocValuesType.SORTED_NUMERIC);
      when(context.fieldMapper("field_doc_values")).thenReturn(fieldType);
      when(context.getIndexReader()).thenReturn(reader);
      IndexNumericFieldData fd = mock(IndexNumericFieldData.class);
      when(context.getForField(fieldType)).thenReturn(fd);
      SliceBuilder builder = new SliceBuilder("field_doc_values", 5, 10);
      Query query = builder.toFilter(context, 0, 1);
      assertThat(query, instanceOf(DocValuesSliceQuery.class));

      assertThat(builder.toFilter(context, 0, 1), equalTo(query));
      try (IndexReader newReader = DirectoryReader.open(dir)) {
        when(context.getIndexReader()).thenReturn(newReader);
        assertThat(builder.toFilter(context, 0, 1), equalTo(query));
      }

      // numSlices > numShards
      int numSlices = randomIntBetween(10, 100);
      int numShards = randomIntBetween(1, 9);
      Map<Integer, AtomicInteger> numSliceMap = new HashMap<>();
      for (int i = 0; i < numSlices; i++) {
        for (int j = 0; j < numShards; j++) {
          SliceBuilder slice = new SliceBuilder("_uid", i, numSlices);
          Query q = slice.toFilter(context, j, numShards);
          if (q instanceof TermsSliceQuery || q instanceof MatchAllDocsQuery) {
            AtomicInteger count = numSliceMap.get(j);
            if (count == null) {
              count = new AtomicInteger(0);
              numSliceMap.put(j, count);
            }
            count.incrementAndGet();
            if (q instanceof MatchAllDocsQuery) {
              assertThat(count.get(), equalTo(1));
            }
          } else {
            assertThat(q, instanceOf(MatchNoDocsQuery.class));
          }
        }
      }
      int total = 0;
      for (Map.Entry<Integer, AtomicInteger> e : numSliceMap.entrySet()) {
        total += e.getValue().get();
      }
      assertThat(total, equalTo(numSlices));

      // numShards > numSlices
      numShards = randomIntBetween(4, 100);
      numSlices = randomIntBetween(2, numShards - 1);
      List<Integer> targetShards = new ArrayList<>();
      for (int i = 0; i < numSlices; i++) {
        for (int j = 0; j < numShards; j++) {
          SliceBuilder slice = new SliceBuilder("_uid", i, numSlices);
          Query q = slice.toFilter(context, j, numShards);
          if (q instanceof MatchNoDocsQuery == false) {
            assertThat(q, instanceOf(MatchAllDocsQuery.class));
            targetShards.add(j);
          }
        }
      }
      assertThat(targetShards.size(), equalTo(numShards));
      assertThat(new HashSet<>(targetShards).size(), equalTo(numShards));

      // numShards == numSlices
      numShards = randomIntBetween(2, 10);
      numSlices = numShards;
      for (int i = 0; i < numSlices; i++) {
        for (int j = 0; j < numShards; j++) {
          SliceBuilder slice = new SliceBuilder("_uid", i, numSlices);
          Query q = slice.toFilter(context, j, numShards);
          if (i == j) {
            assertThat(q, instanceOf(MatchAllDocsQuery.class));
          } else {
            assertThat(q, instanceOf(MatchNoDocsQuery.class));
          }
        }
      }
    }

    try (IndexReader reader = DirectoryReader.open(dir)) {
      MappedFieldType fieldType =
          new MappedFieldType() {
            @Override
            public MappedFieldType clone() {
              return null;
            }

            @Override
            public String typeName() {
              return null;
            }

            @Override
            public Query termQuery(Object value, @Nullable QueryShardContext context) {
              return null;
            }
          };
      fieldType.setName("field_without_doc_values");
      when(context.fieldMapper("field_without_doc_values")).thenReturn(fieldType);
      when(context.getIndexReader()).thenReturn(reader);
      SliceBuilder builder = new SliceBuilder("field_without_doc_values", 5, 10);
      IllegalArgumentException exc =
          expectThrows(IllegalArgumentException.class, () -> builder.toFilter(context, 0, 1));
      assertThat(exc.getMessage(), containsString("cannot load numeric doc values"));
    }
  }