public Filter getInnerFilter() throws IOException {
    if (filterParsed) {
      return innerFilter;
    } else {
      if (path == null) {
        throw new QueryParsingException(parseContext.index(), "[nested] requires 'path' field");
      }
      if (!filterFound) {
        throw new QueryParsingException(
            parseContext.index(), "[nested] requires either 'query' or 'filter' field");
      }

      setPathLevel();
      XContentParser old = parseContext.parser();
      try {
        XContentParser innerParser = XContentHelper.createParser(source);
        parseContext.parser(innerParser);
        innerFilter = parseContext.parseInnerFilter();
        filterParsed = true;
        return innerFilter;
      } finally {
        resetPathLevel();
        parseContext.parser(old);
      }
    }
  }
  public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException {
    String json =
        "{\n"
            + "    \"VDcvDuFjE\" : [ \"7umzzv8eychg\", \"dmdgmt5z13uw\", "
            + "    \"ezu09wxw6v4c\", \"kc7s3515p6k6\", \"jgeuvjwrmfzn\", \"kcpcfj7ruyf8\" ],\n"
            + "    \"unit\" : \"m\",\n"
            + "    \"distance_type\" : \"sloppy_arc\",\n"
            + "    \"mode\" : \"MAX\",\n"
            + "    \"nested_filter\" : {\n"
            + "      \"ids\" : {\n"
            + "        \"type\" : [ ],\n"
            + "        \"values\" : [ ],\n"
            + "        \"boost\" : 5.711116\n"
            + "      }\n"
            + "    },\n"
            + "    \"validation_method\" : \"STRICT\"\n"
            + "  }";
    XContentParser itemParser = XContentHelper.createParser(new BytesArray(json));
    itemParser.nextToken();

    QueryParseContext context =
        new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT);

    GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(context, json);
    assertEquals(
        "[-19.700583312660456, -2.8225036337971687, "
            + "31.537466906011105, -74.63590376079082, "
            + "43.71844606474042, -5.548660643398762, "
            + "-37.20467280596495, 38.71751043945551, "
            + "-69.44606635719538, 84.25200328230858, "
            + "-39.03717711567879, 44.74099852144718]",
        Arrays.toString(result.points()));
  }
 private static GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Exception {
   XContentParser parser = XContentHelper.createParser(sortBuilder.bytes());
   QueryParseContext parseContext =
       new QueryParseContext(new IndicesQueriesRegistry(), parser, ParseFieldMatcher.STRICT);
   parser.nextToken();
   return GeoDistanceSortBuilder.fromXContent(parseContext, null);
 }
 static Query parseQuery(QueryBuilder queryBuilder) throws IOException {
   QueryParseContext context =
       new QueryParseContext(new Index("test"), SearchContext.current().queryParserService());
   XContentParser parser = XContentHelper.createParser(queryBuilder.buildAsBytes());
   context.reset(parser);
   return context.parseInnerQuery();
 }
Exemplo n.º 5
0
  public void testRegisteredQueries() throws IOException {
    SearchModule module = new SearchModule(Settings.EMPTY, false, emptyList());
    List<String> allSupportedQueries = new ArrayList<>();
    Collections.addAll(allSupportedQueries, NON_DEPRECATED_QUERIES);
    Collections.addAll(allSupportedQueries, DEPRECATED_QUERIES);
    String[] supportedQueries = allSupportedQueries.toArray(new String[allSupportedQueries.size()]);
    assertThat(module.getQueryParserRegistry().getNames(), containsInAnyOrder(supportedQueries));

    IndicesQueriesRegistry indicesQueriesRegistry = module.getQueryParserRegistry();
    XContentParser dummyParser = XContentHelper.createParser(new BytesArray("{}"));
    for (String queryName : supportedQueries) {
      indicesQueriesRegistry.lookup(
          queryName, ParseFieldMatcher.EMPTY, dummyParser.getTokenLocation());
    }

    for (String queryName : NON_DEPRECATED_QUERIES) {
      QueryParser<?> queryParser =
          indicesQueriesRegistry.lookup(
              queryName, ParseFieldMatcher.STRICT, dummyParser.getTokenLocation());
      assertThat(queryParser, notNullValue());
    }
    for (String queryName : DEPRECATED_QUERIES) {
      try {
        indicesQueriesRegistry.lookup(
            queryName, ParseFieldMatcher.STRICT, dummyParser.getTokenLocation());
        fail("query is deprecated, getQueryParser should have failed in strict mode");
      } catch (IllegalArgumentException e) {
        assertThat(e.getMessage(), containsString("Deprecated field [" + queryName + "] used"));
      }
    }
  }
Exemplo n.º 6
0
 private MetaData readMetaData(byte[] data) throws IOException {
   XContentParser parser = null;
   try {
     parser = XContentHelper.createParser(data, 0, data.length);
     return MetaData.Builder.fromXContent(parser);
   } finally {
     if (parser != null) {
       parser.close();
     }
   }
 }
Exemplo n.º 7
0
 /** Sets the aliases that will be associated with the index when it gets created */
 public CreateIndexRequest aliases(BytesReference source) {
   try (XContentParser parser = XContentHelper.createParser(source)) {
     // move to the first alias
     parser.nextToken();
     while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
       alias(Alias.fromXContent(parser));
     }
     return this;
   } catch (IOException e) {
     throw new ElasticsearchParseException("Failed to parse aliases", e);
   }
 }
 private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XContentBuilder builder)
     throws Exception {
   Settings settings =
       Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
   ParseContext.InternalParseContext ctx =
       new ParseContext.InternalParseContext("test", settings, parser, mapper, new ContentPath(0));
   SourceToParse source = SourceToParse.source(builder.bytes());
   ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source);
   assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
   ctx.parser().nextToken();
   return DocumentParser.parseObject(ctx, mapper.root());
 }
 @Nullable
 private IndexMetaData loadIndex(String index) {
   long highestVersion = -1;
   IndexMetaData indexMetaData = null;
   for (File indexLocation : nodeEnv.indexLocations(new Index(index))) {
     File stateDir = new File(indexLocation, "_state");
     if (!stateDir.exists() || !stateDir.isDirectory()) {
       continue;
     }
     // now, iterate over the current versions, and find latest one
     File[] stateFiles = stateDir.listFiles();
     if (stateFiles == null) {
       continue;
     }
     for (File stateFile : stateFiles) {
       if (!stateFile.getName().startsWith("state-")) {
         continue;
       }
       try {
         long version = Long.parseLong(stateFile.getName().substring("state-".length()));
         if (version > highestVersion) {
           byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
           if (data.length == 0) {
             logger.debug(
                 "[{}]: no data for [" + stateFile.getAbsolutePath() + "], ignoring...", index);
             continue;
           }
           XContentParser parser = null;
           try {
             parser = XContentHelper.createParser(data, 0, data.length);
             parser.nextToken(); // move to START_OBJECT
             indexMetaData = IndexMetaData.Builder.fromXContent(parser);
             highestVersion = version;
           } finally {
             if (parser != null) {
               parser.close();
             }
           }
         }
       } catch (Exception e) {
         logger.debug(
             "[{}]: failed to read [" + stateFile.getAbsolutePath() + "], ignoring...", e, index);
       }
     }
   }
   return indexMetaData;
 }
Exemplo n.º 10
0
  Query parsePercolatorDocument(String id, BytesReference source) {
    String type = null;
    BytesReference querySource = null;

    XContentParser parser = null;
    try {
      parser = XContentHelper.createParser(source);
      String currentFieldName = null;
      XContentParser.Token token = parser.nextToken(); // move the START_OBJECT
      if (token != XContentParser.Token.START_OBJECT) {
        throw new ElasticsearchException(
            "failed to parse query [" + id + "], not starting with OBJECT");
      }
      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
          currentFieldName = parser.currentName();
        } else if (token == XContentParser.Token.START_OBJECT) {
          if ("query".equals(currentFieldName)) {
            if (type != null) {
              return parseQuery(type, null, parser);
            } else {
              XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
              builder.copyCurrentStructure(parser);
              querySource = builder.bytes();
              builder.close();
            }
          } else {
            parser.skipChildren();
          }
        } else if (token == XContentParser.Token.START_ARRAY) {
          parser.skipChildren();
        } else if (token.isValue()) {
          if ("type".equals(currentFieldName)) {
            type = parser.text();
          }
        }
      }
      return parseQuery(type, querySource, null);
    } catch (Exception e) {
      throw new PercolatorException(shardId().index(), "failed to parse query [" + id + "]", e);
    } finally {
      if (parser != null) {
        parser.close();
      }
    }
  }
Exemplo n.º 11
0
  private MetaData loadGlobalState() {
    long highestVersion = -1;
    MetaData metaData = null;
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        String name = stateFile.getName();
        if (!name.startsWith("global-")) {
          continue;
        }
        try {
          long version = Long.parseLong(stateFile.getName().substring("global-".length()));
          if (version > highestVersion) {
            byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
            if (data.length == 0) {
              logger.debug(
                  "[_global] no data for [" + stateFile.getAbsolutePath() + "], ignoring...");
              continue;
            }

            XContentParser parser = null;
            try {
              parser = XContentHelper.createParser(data, 0, data.length);
              metaData = MetaData.Builder.fromXContent(parser);
              highestVersion = version;
            } finally {
              if (parser != null) {
                parser.close();
              }
            }
          }
        } catch (Exception e) {
          logger.debug("failed to load global state from [{}]", e, stateFile.getAbsolutePath());
        }
      }
    }

    return metaData;
  }
Exemplo n.º 12
0
 public void testFromXContent() throws Exception {
   SliceBuilder sliceBuilder = randomSliceBuilder();
   XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
   if (randomBoolean()) {
     builder.prettyPrint();
   }
   builder.startObject();
   sliceBuilder.innerToXContent(builder);
   builder.endObject();
   XContentParser parser = XContentHelper.createParser(shuffleXContent(builder).bytes());
   QueryParseContext context =
       new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
   SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(context);
   assertNotSame(sliceBuilder, secondSliceBuilder);
   assertEquals(sliceBuilder, secondSliceBuilder);
   assertEquals(sliceBuilder.hashCode(), secondSliceBuilder.hashCode());
 }
 /**
  * Test that creates new smoothing model from a random test smoothing model and checks both for
  * equality
  */
 public void testFromXContent() throws IOException {
   SmoothingModel testModel = createTestModel();
   XContentBuilder contentBuilder =
       XContentFactory.contentBuilder(randomFrom(XContentType.values()));
   if (randomBoolean()) {
     contentBuilder.prettyPrint();
   }
   contentBuilder.startObject();
   testModel.innerToXContent(contentBuilder, ToXContent.EMPTY_PARAMS);
   contentBuilder.endObject();
   XContentParser parser = XContentHelper.createParser(shuffleXContent(contentBuilder).bytes());
   QueryParseContext context =
       new QueryParseContext(new IndicesQueriesRegistry(), parser, ParseFieldMatcher.STRICT);
   parser.nextToken(); // go to start token, real parsing would do that in the outer element parser
   SmoothingModel parsedModel = fromXContent(context);
   assertNotSame(testModel, parsedModel);
   assertEquals(testModel, parsedModel);
   assertEquals(testModel.hashCode(), parsedModel.hashCode());
 }
  public void testReverseOptionFailsBuildWhenInvalidGeoHashString() throws IOException {
    String json = "{\n" + "  \"reverse\" : \"false\"\n" + "}";
    XContentParser itemParser = XContentHelper.createParser(new BytesArray(json));
    itemParser.nextToken();

    QueryParseContext context =
        new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT);

    try {
      GeoDistanceSortBuilder item = GeoDistanceSortBuilder.fromXContent(context, "");
      item.validation(GeoValidationMethod.STRICT);
      item.build(createMockShardContext());

      fail("adding reverse sorting option should fail with an exception");
    } catch (ElasticsearchParseException e) {
      assertEquals(
          "illegal latitude value [269.384765625] for [GeoDistanceSort] for field [reverse].",
          e.getMessage());
    }
  }
  /**
   * creates random suggestion builder, renders it to xContent and back to new instance that should
   * be equal to original
   */
  public void testFromXContent() throws IOException {
    Suggesters suggesters = new Suggesters(Collections.emptyMap());
    QueryParseContext context = new QueryParseContext(null);
    context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
    for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
      SuggestBuilder suggestBuilder = createTestModel();
      XContentBuilder xContentBuilder =
          XContentFactory.contentBuilder(randomFrom(XContentType.values()));
      if (randomBoolean()) {
        xContentBuilder.prettyPrint();
      }
      suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
      XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes());
      context.reset(parser);

      SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters);
      assertNotSame(suggestBuilder, secondSuggestBuilder);
      assertEquals(suggestBuilder, secondSuggestBuilder);
      assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode());
    }
  }
  public void testSortModeSumIsRejectedInJSON() throws IOException {
    String json =
        "{\n"
            + "  \"testname\" : [ {\n"
            + "    \"lat\" : -6.046997540714173,\n"
            + "    \"lon\" : -51.94128329747579\n"
            + "  } ],\n"
            + "  \"unit\" : \"m\",\n"
            + "  \"distance_type\" : \"sloppy_arc\",\n"
            + "  \"mode\" : \"SUM\"\n"
            + "}";
    XContentParser itemParser = XContentHelper.createParser(new BytesArray(json));
    itemParser.nextToken();

    QueryParseContext context =
        new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT);

    IllegalArgumentException e =
        expectThrows(
            IllegalArgumentException.class, () -> GeoDistanceSortBuilder.fromXContent(context, ""));
    assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage());
  }
  public void testReverseOptionFailsWhenNonStringField() throws IOException {
    String json =
        "{\n"
            + "  \"testname\" : [ {\n"
            + "    \"lat\" : -6.046997540714173,\n"
            + "    \"lon\" : -51.94128329747579\n"
            + "  } ],\n"
            + "  \"reverse\" : true\n"
            + "}";
    XContentParser itemParser = XContentHelper.createParser(new BytesArray(json));
    itemParser.nextToken();

    QueryParseContext context =
        new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT);

    try {
      GeoDistanceSortBuilder.fromXContent(context, "");
      fail("adding reverse sorting option should fail with an exception");
    } catch (ParsingException e) {
      assertEquals("Only geohashes of type string supported for field [reverse]", e.getMessage());
    }
  }
  public void testIgnoreMalformedIsDeprecated() throws IOException {
    String json =
        "{\n"
            + "  \"testname\" : [ {\n"
            + "    \"lat\" : -6.046997540714173,\n"
            + "    \"lon\" : -51.94128329747579\n"
            + "  } ],\n"
            + "  \"unit\" : \"m\",\n"
            + "  \"distance_type\" : \"sloppy_arc\",\n"
            + "  \"mode\" : \"SUM\",\n"
            + "  \"ignore_malformed\" : true\n"
            + "}";
    XContentParser itemParser = XContentHelper.createParser(new BytesArray(json));
    itemParser.nextToken();

    QueryParseContext context =
        new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT);

    IllegalArgumentException e =
        expectThrows(
            IllegalArgumentException.class, () -> GeoDistanceSortBuilder.fromXContent(context, ""));
    assertTrue(e.getMessage().startsWith("Deprecated field "));
  }
  private ParsedQuery parseQuery(ExplainRequest request, IndexService indexService) {
    try {
      XContentParser parser = XContentHelper.createParser(request.getSource());
      for (XContentParser.Token token = parser.nextToken();
          token != XContentParser.Token.END_OBJECT;
          token = parser.nextToken()) {
        if (token == XContentParser.Token.FIELD_NAME) {
          String fieldName = parser.currentName();
          if ("query".equals(fieldName)) {
            return indexService.queryParserService().parse(parser);
          } else if ("query_binary".equals(fieldName)) {
            byte[] querySource = parser.binaryValue();
            XContentParser qSourceParser =
                XContentFactory.xContent(querySource).createParser(querySource);
            return indexService.queryParserService().parse(qSourceParser);
          }
        }
      }
    } catch (Exception e) {
      throw new ElasticSearchException("Couldn't parse query from source.", e);
    }

    throw new ElasticSearchException("No query specified");
  }
Exemplo n.º 20
0
  public void process(
      MetaData metaData,
      String aliasOrIndex,
      @Nullable MappingMetaData mappingMd,
      boolean allowIdGeneration)
      throws ElasticsearchException {
    // resolve the routing if needed
    routing(metaData.resolveIndexRouting(routing, aliasOrIndex));
    // resolve timestamp if provided externally
    if (timestamp != null) {
      timestamp =
          MappingMetaData.Timestamp.parseStringTimestamp(
              timestamp,
              mappingMd != null
                  ? mappingMd.timestamp().dateTimeFormatter()
                  : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER);
    }
    // extract values if needed
    if (mappingMd != null) {
      MappingMetaData.ParseContext parseContext =
          mappingMd.createParseContext(id, routing, timestamp);

      if (parseContext.shouldParse()) {
        XContentParser parser = null;
        try {
          parser = XContentHelper.createParser(source);
          mappingMd.parse(parser, parseContext);
          if (parseContext.shouldParseId()) {
            id = parseContext.id();
          }
          if (parseContext.shouldParseRouting()) {
            routing = parseContext.routing();
          }
          if (parseContext.shouldParseTimestamp()) {
            timestamp = parseContext.timestamp();
            timestamp =
                MappingMetaData.Timestamp.parseStringTimestamp(
                    timestamp, mappingMd.timestamp().dateTimeFormatter());
          }
        } catch (Exception e) {
          throw new ElasticsearchParseException(
              "failed to parse doc to extract routing/timestamp", e);
        } finally {
          if (parser != null) {
            parser.close();
          }
        }
      }

      // might as well check for routing here
      if (mappingMd.routing().required() && routing == null) {
        throw new RoutingMissingException(index, type, id);
      }

      if (parent != null && !mappingMd.hasParentField()) {
        throw new ElasticsearchIllegalArgumentException(
            "Can't specify parent if no parent field has been configured");
      }
    } else {
      if (parent != null) {
        throw new ElasticsearchIllegalArgumentException(
            "Can't specify parent if no parent field has been configured");
      }
    }

    // generate id if not already provided and id generation is allowed
    if (allowIdGeneration) {
      if (id == null) {
        id(Strings.randomBase64UUID());
        // since we generate the id, change it to CREATE
        opType(IndexRequest.OpType.CREATE);
      }
    }

    // generate timestamp if not provided, we always have one post this stage...
    if (timestamp == null) {
      timestamp = Long.toString(System.currentTimeMillis());
    }
  }
Exemplo n.º 21
0
  public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener)
      throws MapperParsingException {
    ParseContext context = cache.get();

    if (source.type() != null && !source.type().equals(this.type)) {
      throw new MapperParsingException(
          "Type mismatch, provide type ["
              + source.type()
              + "] but mapper is of type ["
              + this.type
              + "]");
    }
    source.type(this.type);

    XContentParser parser = source.parser();
    try {
      if (parser == null) {
        parser = XContentHelper.createParser(source.source());
      }
      context.reset(parser, new Document(), source, listener);
      // on a newly created instance of document mapper, we always consider it as new mappers that
      // have been added
      if (initMappersAdded) {
        context.setMappingsModified();
        initMappersAdded = false;
      }

      // will result in START_OBJECT
      int countDownTokens = 0;
      XContentParser.Token token = parser.nextToken();
      if (token != XContentParser.Token.START_OBJECT) {
        throw new MapperParsingException("Malformed content, must start with an object");
      }
      boolean emptyDoc = false;
      token = parser.nextToken();
      if (token == XContentParser.Token.END_OBJECT) {
        // empty doc, we can handle it...
        emptyDoc = true;
      } else if (token != XContentParser.Token.FIELD_NAME) {
        throw new MapperParsingException(
            "Malformed content, after first object, either the type field or the actual properties should exist");
      }
      if (type.equals(parser.currentName())) {
        // first field is the same as the type, this might be because the type is provided, and the
        // object exists within it
        // or because there is a valid field that by chance is named as the type

        // Note, in this case, we only handle plain value types, an object type will be analyzed as
        // if it was the type itself
        // and other same level fields will be ignored
        token = parser.nextToken();
        countDownTokens++;
        // commented out, allow for same type with START_OBJECT, we do our best to handle it except
        // for the above corner case
        //                if (token != XContentParser.Token.START_OBJECT) {
        //                    throw new MapperException("Malformed content, a field with the same
        // name as the type must be an object with the properties/fields within it");
        //                }
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.preParse(context);
      }

      if (!emptyDoc) {
        rootObjectMapper.parse(context);
      }

      for (int i = 0; i < countDownTokens; i++) {
        parser.nextToken();
      }

      // fire up any new mappers if exists
      if (!context.newFieldMappers().mappers.isEmpty()) {
        addFieldMappers(context.newFieldMappers().mappers);
        context.newFieldMappers().mappers.clear();
      }
      if (!context.newObjectMappers().mappers.isEmpty()) {
        addObjectMappers(context.newObjectMappers().mappers);
        context.newObjectMappers().mappers.clear();
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.postParse(context);
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.validate(context);
      }
    } catch (Throwable e) {
      // we have to fire up any new mappers even on a failure, because they
      // have been added internally to each compound mapper...
      // ... we have no option to "rollback" a change, which is very tricky in our copy on change
      // system...
      if (!context.newFieldMappers().mappers.isEmpty()) {
        addFieldMappers(context.newFieldMappers().mappers);
        context.newFieldMappers().mappers.clear();
      }
      if (!context.newObjectMappers().mappers.isEmpty()) {
        addObjectMappers(context.newObjectMappers().mappers);
        context.newObjectMappers().mappers.clear();
      }

      // if its already a mapper parsing exception, no need to wrap it...
      if (e instanceof MapperParsingException) {
        throw (MapperParsingException) e;
      }

      throw new MapperParsingException("failed to parse", e);
    } finally {
      // only close the parser when its not provided externally
      if (source.parser() == null && parser != null) {
        parser.close();
      }
    }
    // reverse the order of docs for nested docs support, parent should be last
    if (context.docs().size() > 1) {
      Collections.reverse(context.docs());
    }
    // apply doc boost
    if (context.docBoost() != 1.0f) {
      Set<String> encounteredFields = Sets.newHashSet();
      for (Document doc : context.docs()) {
        encounteredFields.clear();
        for (IndexableField field : doc) {
          if (field.fieldType().indexed() && !field.fieldType().omitNorms()) {
            if (!encounteredFields.contains(field.name())) {
              ((Field) field).setBoost(context.docBoost() * field.boost());
              encounteredFields.add(field.name());
            }
          }
        }
      }
    }

    ParsedDocument doc =
        new ParsedDocument(
                context.uid(),
                context.id(),
                context.type(),
                source.routing(),
                source.timestamp(),
                source.ttl(),
                context.docs(),
                context.analyzer(),
                context.source(),
                context.mappingsModified())
            .parent(source.parent());
    // reset the context to free up memory
    context.reset(null, null, null, null);
    return doc;
  }
Exemplo n.º 22
0
  private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException {
    if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) {
      throw new IllegalArgumentException(
          "It is forbidden to index into the default mapping ["
              + MapperService.DEFAULT_MAPPING
              + "]");
    }

    ParseContext.InternalParseContext context = cache.get();

    final Mapping mapping = docMapper.mapping();
    if (source.type() != null && !source.type().equals(docMapper.type())) {
      throw new MapperParsingException(
          "Type mismatch, provide type ["
              + source.type()
              + "] but mapper is of type ["
              + docMapper.type()
              + "]");
    }
    source.type(docMapper.type());

    XContentParser parser = source.parser();
    try {
      if (parser == null) {
        parser = XContentHelper.createParser(source.source());
      }
      context.reset(parser, new ParseContext.Document(), source);

      // will result in START_OBJECT
      XContentParser.Token token = parser.nextToken();
      if (token != XContentParser.Token.START_OBJECT) {
        throw new MapperParsingException("Malformed content, must start with an object");
      }

      boolean emptyDoc = false;
      if (mapping.root.isEnabled()) {
        token = parser.nextToken();
        if (token == XContentParser.Token.END_OBJECT) {
          // empty doc, we can handle it...
          emptyDoc = true;
        } else if (token != XContentParser.Token.FIELD_NAME) {
          throw new MapperParsingException(
              "Malformed content, after first object, either the type field or the actual properties should exist");
        }
      }

      for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
        metadataMapper.preParse(context);
      }

      if (mapping.root.isEnabled() == false) {
        // entire type is disabled
        parser.skipChildren();
      } else if (emptyDoc == false) {
        Mapper update = parseObject(context, mapping.root, true);
        if (update != null) {
          context.addDynamicMappingsUpdate(update);
        }
      }

      for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
        metadataMapper.postParse(context);
      }

      // try to parse the next token, this should be null if the object is ended properly
      // but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled
      // by the catch)
      if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1)
          && source.parser() == null
          && parser != null) {
        // only check for end of tokens if we created the parser here
        token = parser.nextToken();
        if (token != null) {
          throw new IllegalArgumentException(
              "Malformed content, found extra data after parsing: " + token);
        }
      }

    } catch (Throwable e) {
      // if its already a mapper parsing exception, no need to wrap it...
      if (e instanceof MapperParsingException) {
        throw (MapperParsingException) e;
      }

      // Throw a more meaningful message if the document is empty.
      if (source.source() != null && source.source().length() == 0) {
        throw new MapperParsingException("failed to parse, document is empty");
      }

      throw new MapperParsingException("failed to parse", e);
    } finally {
      // only close the parser when its not provided externally
      if (source.parser() == null && parser != null) {
        parser.close();
      }
    }
    // reverse the order of docs for nested docs support, parent should be last
    if (context.docs().size() > 1) {
      Collections.reverse(context.docs());
    }
    // apply doc boost
    if (context.docBoost() != 1.0f) {
      Set<String> encounteredFields = new HashSet<>();
      for (ParseContext.Document doc : context.docs()) {
        encounteredFields.clear();
        for (IndexableField field : doc) {
          if (field.fieldType().indexOptions() != IndexOptions.NONE
              && !field.fieldType().omitNorms()) {
            if (!encounteredFields.contains(field.name())) {
              ((Field) field).setBoost(context.docBoost() * field.boost());
              encounteredFields.add(field.name());
            }
          }
        }
      }
    }

    Mapper rootDynamicUpdate = context.dynamicMappingsUpdate();
    Mapping update = null;
    if (rootDynamicUpdate != null) {
      update = mapping.mappingUpdate(rootDynamicUpdate);
    }

    ParsedDocument doc =
        new ParsedDocument(
                context.uid(),
                context.version(),
                context.id(),
                context.type(),
                source.routing(),
                source.timestamp(),
                source.ttl(),
                context.docs(),
                context.source(),
                update)
            .parent(source.parent());
    // reset the context to free up memory
    context.reset(null, null, null);
    return doc;
  }
Exemplo n.º 23
0
  private void pre019Upgrade() throws Exception {
    long index = -1;
    File metaDataFile = null;
    MetaData metaData = null;
    long version = -1;
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        if (logger.isTraceEnabled()) {
          logger.trace("[upgrade]: processing [" + stateFile.getName() + "]");
        }
        String name = stateFile.getName();
        if (!name.startsWith("metadata-")) {
          continue;
        }
        long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1));
        if (fileIndex >= index) {
          // try and read the meta data
          try {
            byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
            if (data.length == 0) {
              continue;
            }
            XContentParser parser = XContentHelper.createParser(data, 0, data.length);
            try {
              String currentFieldName = null;
              XContentParser.Token token = parser.nextToken();
              if (token != null) {
                while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
                  if (token == XContentParser.Token.FIELD_NAME) {
                    currentFieldName = parser.currentName();
                  } else if (token == XContentParser.Token.START_OBJECT) {
                    if ("meta-data".equals(currentFieldName)) {
                      metaData = MetaData.Builder.fromXContent(parser);
                    }
                  } else if (token.isValue()) {
                    if ("version".equals(currentFieldName)) {
                      version = parser.longValue();
                    }
                  }
                }
              }
            } finally {
              parser.close();
            }
            index = fileIndex;
            metaDataFile = stateFile;
          } catch (IOException e) {
            logger.warn("failed to read pre 0.19 state from [" + name + "], ignoring...", e);
          }
        }
      }
    }
    if (metaData == null) {
      return;
    }

    logger.info(
        "found old metadata state, loading metadata from [{}] and converting to new metadata location and strucutre...",
        metaDataFile.getAbsolutePath());

    writeGlobalState(
        "upgrade", MetaData.builder().metaData(metaData).version(version).build(), null);
    for (IndexMetaData indexMetaData : metaData) {
      IndexMetaData.Builder indexMetaDataBuilder =
          IndexMetaData.newIndexMetaDataBuilder(indexMetaData).version(version);
      // set the created version to 0.18
      indexMetaDataBuilder.settings(
          ImmutableSettings.settingsBuilder()
              .put(indexMetaData.settings())
              .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_18_0));
      writeIndex("upgrade", indexMetaDataBuilder.build(), null);
    }

    // rename shards state to backup state
    File backupFile = new File(metaDataFile.getParentFile(), "backup-" + metaDataFile.getName());
    if (!metaDataFile.renameTo(backupFile)) {
      throw new IOException(
          "failed to rename old state to backup state [" + metaDataFile.getAbsolutePath() + "]");
    }

    // delete all other shards state files
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        String name = stateFile.getName();
        if (!name.startsWith("metadata-")) {
          continue;
        }
        stateFile.delete();
      }
    }

    logger.info(
        "conversion to new metadata location and format done, backup create at [{}]",
        backupFile.getAbsolutePath());
  }