private AbstractDistanceScoreFunction parseVariable(
      String fieldName, XContentParser parser, QueryShardContext context, MultiValueMode mode)
      throws IOException {
    // the field must exist, else we cannot read the value for the doc later
    MappedFieldType fieldType = context.fieldMapper(fieldName);
    if (fieldType == null) {
      throw new ParsingException(parser.getTokenLocation(), "unknown field [{}]", fieldName);
    }

    // dates and time need special handling
    parser.nextToken();
    if (fieldType instanceof DateFieldMapper.DateFieldType) {
      return parseDateVariable(parser, context, (DateFieldMapper.DateFieldType) fieldType, mode);
    } else if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) {
      return parseGeoVariable(
          parser, context, (BaseGeoPointFieldMapper.GeoPointFieldType) fieldType, mode);
    } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
      return parseNumberVariable(
          parser, context, (NumberFieldMapper.NumberFieldType) fieldType, mode);
    } else {
      throw new ParsingException(
          parser.getTokenLocation(),
          "field [{}] is of type [{}], but only numeric types are supported.",
          fieldName,
          fieldType);
    }
  }
  public Query parse(Type type, String fieldName, Object value) throws IOException {
    final String field;
    MappedFieldType fieldType = context.fieldMapper(fieldName);
    if (fieldType != null) {
      field = fieldType.name();
    } else {
      field = fieldName;
    }

    /*
     * If the user forced an analyzer we really don't care if they are
     * searching a type that wants term queries to be used with query string
     * because the QueryBuilder will take care of it. If they haven't forced
     * an analyzer then types like NumberFieldType that want terms with
     * query string will blow up because their analyzer isn't capable of
     * passing through QueryBuilder.
     */
    boolean noForcedAnalyzer = this.analyzer == null;
    if (fieldType != null && fieldType.tokenized() == false && noForcedAnalyzer) {
      return termQuery(fieldType, value);
    }

    Analyzer analyzer = getAnalyzer(fieldType);
    assert analyzer != null;
    MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType);
    builder.setEnablePositionIncrements(this.enablePositionIncrements);

    Query query = null;
    switch (type) {
      case BOOLEAN:
        if (commonTermsCutoff == null) {
          query = builder.createBooleanQuery(field, value.toString(), occur);
        } else {
          query =
              builder.createCommonTermsQuery(
                  field, value.toString(), occur, occur, commonTermsCutoff, fieldType);
        }
        break;
      case PHRASE:
        query = builder.createPhraseQuery(field, value.toString(), phraseSlop);
        break;
      case PHRASE_PREFIX:
        query = builder.createPhrasePrefixQuery(field, value.toString(), phraseSlop, maxExpansions);
        break;
      default:
        throw new IllegalStateException("No type found for [" + type + "]");
    }

    if (query == null) {
      return zeroTermsQuery();
    } else {
      return query;
    }
  }
  public void testToFilter() throws IOException {
    Directory dir = new RAMDirectory();
    try (IndexWriter writer =
        new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
      writer.commit();
    }
    QueryShardContext context = mock(QueryShardContext.class);
    try (IndexReader reader = DirectoryReader.open(dir)) {
      MappedFieldType fieldType =
          new MappedFieldType() {
            @Override
            public MappedFieldType clone() {
              return null;
            }

            @Override
            public String typeName() {
              return null;
            }

            @Override
            public Query termQuery(Object value, @Nullable QueryShardContext context) {
              return null;
            }
          };
      fieldType.setName(UidFieldMapper.NAME);
      fieldType.setHasDocValues(false);
      when(context.fieldMapper(UidFieldMapper.NAME)).thenReturn(fieldType);
      when(context.getIndexReader()).thenReturn(reader);
      SliceBuilder builder = new SliceBuilder(5, 10);
      Query query = builder.toFilter(context, 0, 1);
      assertThat(query, instanceOf(TermsSliceQuery.class));

      assertThat(builder.toFilter(context, 0, 1), equalTo(query));
      try (IndexReader newReader = DirectoryReader.open(dir)) {
        when(context.getIndexReader()).thenReturn(newReader);
        assertThat(builder.toFilter(context, 0, 1), equalTo(query));
      }
    }

    try (IndexReader reader = DirectoryReader.open(dir)) {
      MappedFieldType fieldType =
          new MappedFieldType() {
            @Override
            public MappedFieldType clone() {
              return null;
            }

            @Override
            public String typeName() {
              return null;
            }

            @Override
            public Query termQuery(Object value, @Nullable QueryShardContext context) {
              return null;
            }
          };
      fieldType.setName("field_doc_values");
      fieldType.setHasDocValues(true);
      fieldType.setDocValuesType(DocValuesType.SORTED_NUMERIC);
      when(context.fieldMapper("field_doc_values")).thenReturn(fieldType);
      when(context.getIndexReader()).thenReturn(reader);
      IndexNumericFieldData fd = mock(IndexNumericFieldData.class);
      when(context.getForField(fieldType)).thenReturn(fd);
      SliceBuilder builder = new SliceBuilder("field_doc_values", 5, 10);
      Query query = builder.toFilter(context, 0, 1);
      assertThat(query, instanceOf(DocValuesSliceQuery.class));

      assertThat(builder.toFilter(context, 0, 1), equalTo(query));
      try (IndexReader newReader = DirectoryReader.open(dir)) {
        when(context.getIndexReader()).thenReturn(newReader);
        assertThat(builder.toFilter(context, 0, 1), equalTo(query));
      }

      // numSlices > numShards
      int numSlices = randomIntBetween(10, 100);
      int numShards = randomIntBetween(1, 9);
      Map<Integer, AtomicInteger> numSliceMap = new HashMap<>();
      for (int i = 0; i < numSlices; i++) {
        for (int j = 0; j < numShards; j++) {
          SliceBuilder slice = new SliceBuilder("_uid", i, numSlices);
          Query q = slice.toFilter(context, j, numShards);
          if (q instanceof TermsSliceQuery || q instanceof MatchAllDocsQuery) {
            AtomicInteger count = numSliceMap.get(j);
            if (count == null) {
              count = new AtomicInteger(0);
              numSliceMap.put(j, count);
            }
            count.incrementAndGet();
            if (q instanceof MatchAllDocsQuery) {
              assertThat(count.get(), equalTo(1));
            }
          } else {
            assertThat(q, instanceOf(MatchNoDocsQuery.class));
          }
        }
      }
      int total = 0;
      for (Map.Entry<Integer, AtomicInteger> e : numSliceMap.entrySet()) {
        total += e.getValue().get();
      }
      assertThat(total, equalTo(numSlices));

      // numShards > numSlices
      numShards = randomIntBetween(4, 100);
      numSlices = randomIntBetween(2, numShards - 1);
      List<Integer> targetShards = new ArrayList<>();
      for (int i = 0; i < numSlices; i++) {
        for (int j = 0; j < numShards; j++) {
          SliceBuilder slice = new SliceBuilder("_uid", i, numSlices);
          Query q = slice.toFilter(context, j, numShards);
          if (q instanceof MatchNoDocsQuery == false) {
            assertThat(q, instanceOf(MatchAllDocsQuery.class));
            targetShards.add(j);
          }
        }
      }
      assertThat(targetShards.size(), equalTo(numShards));
      assertThat(new HashSet<>(targetShards).size(), equalTo(numShards));

      // numShards == numSlices
      numShards = randomIntBetween(2, 10);
      numSlices = numShards;
      for (int i = 0; i < numSlices; i++) {
        for (int j = 0; j < numShards; j++) {
          SliceBuilder slice = new SliceBuilder("_uid", i, numSlices);
          Query q = slice.toFilter(context, j, numShards);
          if (i == j) {
            assertThat(q, instanceOf(MatchAllDocsQuery.class));
          } else {
            assertThat(q, instanceOf(MatchNoDocsQuery.class));
          }
        }
      }
    }

    try (IndexReader reader = DirectoryReader.open(dir)) {
      MappedFieldType fieldType =
          new MappedFieldType() {
            @Override
            public MappedFieldType clone() {
              return null;
            }

            @Override
            public String typeName() {
              return null;
            }

            @Override
            public Query termQuery(Object value, @Nullable QueryShardContext context) {
              return null;
            }
          };
      fieldType.setName("field_without_doc_values");
      when(context.fieldMapper("field_without_doc_values")).thenReturn(fieldType);
      when(context.getIndexReader()).thenReturn(reader);
      SliceBuilder builder = new SliceBuilder("field_without_doc_values", 5, 10);
      IllegalArgumentException exc =
          expectThrows(IllegalArgumentException.class, () -> builder.toFilter(context, 0, 1));
      assertThat(exc.getMessage(), containsString("cannot load numeric doc values"));
    }
  }