@Override
 public void checkCompatibility(
     MappedFieldType fieldType, List<String> conflicts, boolean strict) {
   super.checkCompatibility(fieldType, conflicts, strict);
   CompletionFieldType other = (CompletionFieldType) fieldType;
   if (analyzingSuggestLookupProvider.hasPayloads()
       != other.analyzingSuggestLookupProvider.hasPayloads()) {
     conflicts.add("mapper [" + names().fullName() + "] has different payload values");
   }
   if (analyzingSuggestLookupProvider.getPreservePositionsIncrements()
       != other.analyzingSuggestLookupProvider.getPreservePositionsIncrements()) {
     conflicts.add(
         "mapper ["
             + names().fullName()
             + "] has different 'preserve_position_increments' values");
   }
   if (analyzingSuggestLookupProvider.getPreserveSep()
       != other.analyzingSuggestLookupProvider.getPreserveSep()) {
     conflicts.add(
         "mapper [" + names().fullName() + "] has different 'preserve_separators' values");
   }
   if (!ContextMapping.mappingsAreEqual(getContextMapping(), other.getContextMapping())) {
     conflicts.add("mapper [" + names().fullName() + "] has different 'context_mapping' values");
   }
 }
 @Override
 public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
   super.merge(mergeWith, mergeResult);
   CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
   if (fieldType().analyzingSuggestLookupProvider.hasPayloads()
       != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.hasPayloads()) {
     mergeResult.addConflict(
         "mapper [" + fieldType.names().fullName() + "] has different payload values");
   }
   if (fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements()
       != fieldMergeWith
           .fieldType()
           .analyzingSuggestLookupProvider
           .getPreservePositionsIncrements()) {
     mergeResult.addConflict(
         "mapper ["
             + fieldType.names().fullName()
             + "] has different 'preserve_position_increments' values");
   }
   if (fieldType().analyzingSuggestLookupProvider.getPreserveSep()
       != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreserveSep()) {
     mergeResult.addConflict(
         "mapper ["
             + fieldType.names().fullName()
             + "] has different 'preserve_separators' values");
   }
   if (!ContextMapping.mappingsAreEqual(
       fieldType().getContextMapping(), fieldMergeWith.fieldType().getContextMapping())) {
     mergeResult.addConflict(
         "mapper [" + fieldType.names().fullName() + "] has different 'context_mapping' values");
   }
   if (!mergeResult.simulate()) {
     this.maxInputLength = fieldMergeWith.maxInputLength;
   }
 }
Exemplo n.º 3
0
    /**
     * Parse a set of {@link ContextQuery} according to a given mapping
     *
     * @param mappings List of mapping defined y the suggest field
     * @param parser parser holding the settings of the queries. The parsers current token is
     *     assumed hold an array. The number of elements in this array must match the number of
     *     elements in the mappings.
     * @return List of context queries
     * @throws IOException if something unexpected happened on the underlying stream
     * @throws ElasticsearchParseException if the list of queries could not be parsed
     */
    public static List<ContextQuery> parseQueries(
        Map<String, ContextMapping> mappings, XContentParser parser)
        throws IOException, ElasticsearchParseException {

      Map<String, ContextQuery> querySet = new HashMap<>();
      Token token = parser.currentToken();
      if (token == Token.START_OBJECT) {
        while ((token = parser.nextToken()) != Token.END_OBJECT) {
          String name = parser.text();
          ContextMapping mapping = mappings.get(name);
          if (mapping == null) {
            throw new ElasticsearchParseException("no mapping defined for [" + name + "]");
          }
          parser.nextToken();
          querySet.put(name, mapping.parseQuery(name, parser));
        }
      }

      List<ContextQuery> queries = Lists.newArrayListWithExpectedSize(mappings.size());
      for (ContextMapping mapping : mappings.values()) {
        queries.add(querySet.get(mapping.name));
      }
      return queries;
    }
 @Override
 public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
   super.merge(mergeWith, mergeContext);
   CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
   if (payloads != fieldMergeWith.payloads) {
     mergeContext.addConflict("mapper [" + names.fullName() + "] has different payload values");
   }
   if (preservePositionIncrements != fieldMergeWith.preservePositionIncrements) {
     mergeContext.addConflict(
         "mapper [" + names.fullName() + "] has different 'preserve_position_increments' values");
   }
   if (preserveSeparators != fieldMergeWith.preserveSeparators) {
     mergeContext.addConflict(
         "mapper [" + names.fullName() + "] has different 'preserve_separators' values");
   }
   if (!ContextMapping.mappingsAreEqual(getContextMapping(), fieldMergeWith.getContextMapping())) {
     mergeContext.addConflict(
         "mapper [" + names.fullName() + "] has different 'context_mapping' values");
   }
   if (!mergeContext.mergeFlags().simulate()) {
     this.maxInputLength = fieldMergeWith.maxInputLength;
   }
 }
  @Override
  public Mapper parse(ParseContext context) throws IOException {
    XContentParser parser = context.parser();
    XContentParser.Token token = parser.currentToken();
    if (token == XContentParser.Token.VALUE_NULL) {
      throw new MapperParsingException(
          "completion field [" + fieldType().names().fullName() + "] does not support null values");
    }

    String surfaceForm = null;
    BytesRef payload = null;
    long weight = -1;
    List<String> inputs = Lists.newArrayListWithExpectedSize(4);

    SortedMap<String, ContextConfig> contextConfig = null;

    if (token == XContentParser.Token.VALUE_STRING) {
      inputs.add(parser.text());
      multiFields.parse(this, context);
    } else {
      String currentFieldName = null;
      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
          currentFieldName = parser.currentName();
          if (!ALLOWED_CONTENT_FIELD_NAMES.contains(currentFieldName)) {
            throw new IllegalArgumentException(
                "Unknown field name["
                    + currentFieldName
                    + "], must be one of "
                    + ALLOWED_CONTENT_FIELD_NAMES);
          }
        } else if (Fields.CONTEXT.equals(currentFieldName)) {
          SortedMap<String, ContextConfig> configs = Maps.newTreeMap();

          if (token == Token.START_OBJECT) {
            while ((token = parser.nextToken()) != Token.END_OBJECT) {
              String name = parser.text();
              ContextMapping mapping = fieldType().getContextMapping().get(name);
              if (mapping == null) {
                throw new ElasticsearchParseException("context [{}] is not defined", name);
              } else {
                token = parser.nextToken();
                configs.put(name, mapping.parseContext(context, parser));
              }
            }
            contextConfig = Maps.newTreeMap();
            for (ContextMapping mapping : fieldType().getContextMapping().values()) {
              ContextConfig config = configs.get(mapping.name());
              contextConfig.put(mapping.name(), config == null ? mapping.defaultConfig() : config);
            }
          } else {
            throw new ElasticsearchParseException("context must be an object");
          }
        } else if (Fields.CONTENT_FIELD_NAME_PAYLOAD.equals(currentFieldName)) {
          if (!isStoringPayloads()) {
            throw new MapperException("Payloads disabled in mapping");
          }
          if (token == XContentParser.Token.START_OBJECT) {
            XContentBuilder payloadBuilder =
                XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser);
            payload = payloadBuilder.bytes().toBytesRef();
            payloadBuilder.close();
          } else if (token.isValue()) {
            payload = parser.utf8BytesOrNull();
          } else {
            throw new MapperException("payload doesn't support type " + token);
          }
        } else if (token == XContentParser.Token.VALUE_STRING) {
          if (Fields.CONTENT_FIELD_NAME_OUTPUT.equals(currentFieldName)) {
            surfaceForm = parser.text();
          }
          if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) {
            inputs.add(parser.text());
          }
          if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) {
            Number weightValue;
            try {
              weightValue = Long.parseLong(parser.text());
            } catch (NumberFormatException e) {
              throw new IllegalArgumentException(
                  "Weight must be a string representing a numeric value, but was ["
                      + parser.text()
                      + "]");
            }
            weight =
                weightValue.longValue(); // always parse a long to make sure we don't get overflow
            checkWeight(weight);
          }
        } else if (token == XContentParser.Token.VALUE_NUMBER) {
          if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) {
            NumberType numberType = parser.numberType();
            if (NumberType.LONG != numberType && NumberType.INT != numberType) {
              throw new IllegalArgumentException(
                  "Weight must be an integer, but was [" + parser.numberValue() + "]");
            }
            weight = parser.longValue(); // always parse a long to make sure we don't get overflow
            checkWeight(weight);
          }
        } else if (token == XContentParser.Token.START_ARRAY) {
          if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) {
            while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
              inputs.add(parser.text());
            }
          }
        }
      }
    }

    if (contextConfig == null) {
      contextConfig = Maps.newTreeMap();
      for (ContextMapping mapping : fieldType().getContextMapping().values()) {
        contextConfig.put(mapping.name(), mapping.defaultConfig());
      }
    }

    final ContextMapping.Context ctx = new ContextMapping.Context(contextConfig, context.doc());

    payload = payload == null ? EMPTY : payload;
    if (surfaceForm == null) { // no surface form use the input
      for (String input : inputs) {
        if (input.length() == 0) {
          continue;
        }
        BytesRef suggestPayload =
            fieldType()
                .analyzingSuggestLookupProvider
                .buildPayload(new BytesRef(input), weight, payload);
        context.doc().add(getCompletionField(ctx, input, suggestPayload));
      }
    } else {
      BytesRef suggestPayload =
          fieldType()
              .analyzingSuggestLookupProvider
              .buildPayload(new BytesRef(surfaceForm), weight, payload);
      for (String input : inputs) {
        if (input.length() == 0) {
          continue;
        }
        context.doc().add(getCompletionField(ctx, input, suggestPayload));
      }
    }
    return null;
  }