private ParsedDocument parseFetchedDoc(
      PercolateContext context,
      BytesReference fetchedDoc,
      IndexService documentIndexService,
      String type) {
    ParsedDocument doc = null;
    XContentParser parser = null;
    try {
      parser = XContentFactory.xContent(fetchedDoc).createParser(fetchedDoc);
      MapperService mapperService = documentIndexService.mapperService();
      DocumentMapper docMapper = mapperService.documentMapperWithAutoCreate(type);
      doc = docMapper.parse(source(parser).type(type).flyweight(true));

      if (context.highlight() != null) {
        doc.setSource(fetchedDoc);
      }
    } catch (Throwable e) {
      throw new ElasticsearchParseException("failed to parse request", e);
    } finally {
      if (parser != null) {
        parser.close();
      }
    }

    if (doc == null) {
      throw new ElasticsearchParseException("No doc to percolate in the request");
    }

    return doc;
  }
    public static void toXContent(
        IndexTemplateMetaData indexTemplateMetaData,
        XContentBuilder builder,
        ToXContent.Params params)
        throws IOException {
      builder.startObject(indexTemplateMetaData.name(), XContentBuilder.FieldCaseConversion.NONE);

      builder.field("order", indexTemplateMetaData.order());
      builder.field("template", indexTemplateMetaData.template());

      builder.startObject("settings");
      for (Map.Entry<String, String> entry :
          indexTemplateMetaData.settings().getAsMap().entrySet()) {
        builder.field(entry.getKey(), entry.getValue());
      }
      builder.endObject();

      builder.startArray("mappings");
      for (Map.Entry<String, CompressedString> entry :
          indexTemplateMetaData.mappings().entrySet()) {
        byte[] data = entry.getValue().uncompressed();
        XContentParser parser = XContentFactory.xContent(data).createParser(data);
        Map<String, Object> mapping = parser.map();
        parser.close();
        builder.map(mapping);
      }
      builder.endArray();

      builder.endObject();
    }
  @Override
  public void login(final RestRequest request, final ActionListener<String[]> listener) {
    String username = request.param(usernameKey);
    String password = request.param(passwordKey);
    final BytesReference content = request.content();
    final XContentType xContentType = XContentFactory.xContentType(content);
    XContentParser parser = null;
    try {
      parser = XContentFactory.xContent(xContentType).createParser(content);
      final XContentParser.Token t = parser.nextToken();
      if (t != null) {
        final Map<String, Object> contentMap = parser.map();
        username = MapUtil.getAsString(contentMap, usernameKey, username);
        password = MapUtil.getAsString(contentMap, passwordKey, password);
      }
    } catch (final Exception e) {
      listener.onFailure(e);
      return;
    } finally {
      if (parser != null) {
        parser.close();
      }
    }

    if (username == null) {
      listener.onResponse(new String[0]);
      return;
    }

    processLogin(username, password, listener);
  }
    public static void toXContent(
        AliasMetaData aliasMetaData, XContentBuilder builder, ToXContent.Params params)
        throws IOException {
      builder.startObject(aliasMetaData.alias(), XContentBuilder.FieldCaseConversion.NONE);

      boolean binary = params.paramAsBoolean("binary", false);

      if (aliasMetaData.filter() != null) {
        if (binary) {
          builder.field("filter", aliasMetaData.filter.compressed());
        } else {
          byte[] data = aliasMetaData.filter().uncompressed();
          XContentParser parser = XContentFactory.xContent(data).createParser(data);
          Map<String, Object> filter = parser.mapOrdered();
          parser.close();
          builder.field("filter", filter);
        }
      }
      if (aliasMetaData.indexRouting() != null) {
        builder.field("index_routing", aliasMetaData.indexRouting());
      }
      if (aliasMetaData.searchRouting() != null) {
        builder.field("search_routing", aliasMetaData.searchRouting());
      }

      builder.endObject();
    }
Beispiel #5
0
 @Override
 protected ShardSuggestResponse shardOperation(ShardSuggestRequest request)
     throws ElasticSearchException {
   IndexService indexService = indicesService.indexServiceSafe(request.index());
   IndexShard indexShard = indexService.shardSafe(request.shardId());
   final Engine.Searcher searcher = indexShard.searcher();
   XContentParser parser = null;
   try {
     BytesReference suggest = request.suggest();
     if (suggest != null && suggest.length() > 0) {
       parser = XContentFactory.xContent(suggest).createParser(suggest);
       if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
         throw new ElasticSearchIllegalArgumentException("suggest content missing");
       }
       final SuggestionSearchContext context =
           suggestPhase
               .parseElement()
               .parseInternal(
                   parser, indexService.mapperService(), request.index(), request.shardId());
       final Suggest result = suggestPhase.execute(context, searcher.reader());
       return new ShardSuggestResponse(request.index(), request.shardId(), result);
     }
     return new ShardSuggestResponse(request.index(), request.shardId(), new Suggest());
   } catch (Throwable ex) {
     throw new ElasticSearchException("failed to execute suggest", ex);
   } finally {
     searcher.release();
     if (parser != null) {
       parser.close();
     }
   }
 }
 public Builder putMapping(String type, String source) throws IOException {
   XContentParser parser = XContentFactory.xContent(source).createParser(source);
   try {
     putMapping(new MappingMetaData(type, parser.mapOrdered()));
   } finally {
     parser.close();
   }
   return this;
 }
    public static void toXContent(
        IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params)
        throws IOException {
      builder.startObject(
          indexMetaData.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE);

      builder.field("version", indexMetaData.getVersion());
      builder.field("state", indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH));

      boolean binary = params.paramAsBoolean("binary", false);

      builder.startObject("settings");
      for (Map.Entry<String, String> entry : indexMetaData.getSettings().getAsMap().entrySet()) {
        builder.field(entry.getKey(), entry.getValue());
      }
      builder.endObject();

      builder.startArray("mappings");
      for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) {
        if (binary) {
          builder.value(cursor.value.source().compressed());
        } else {
          byte[] data = cursor.value.source().uncompressed();
          XContentParser parser = XContentFactory.xContent(data).createParser(data);
          Map<String, Object> mapping = parser.mapOrdered();
          parser.close();
          builder.map(mapping);
        }
      }
      builder.endArray();

      for (ObjectObjectCursor<String, Custom> cursor : indexMetaData.getCustoms()) {
        builder.startObject(cursor.key, XContentBuilder.FieldCaseConversion.NONE);
        cursor.value.toXContent(builder, params);
        builder.endObject();
      }

      builder.startObject("aliases");
      for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
        AliasMetaData.Builder.toXContent(cursor.value, builder, params);
      }
      builder.endObject();

      builder.startObject(KEY_ACTIVE_ALLOCATIONS);
      for (IntObjectCursor<Set<String>> cursor : indexMetaData.activeAllocationIds) {
        builder.startArray(String.valueOf(cursor.key));
        for (String allocationId : cursor.value) {
          builder.value(allocationId);
        }
        builder.endArray();
      }
      builder.endObject();

      builder.endObject();
    }
 private void parseSource(SearchContext context, BytesReference source)
     throws SearchParseException {
   // nothing to parse...
   if (source == null || source.length() == 0) {
     return;
   }
   XContentParser parser = null;
   try {
     parser = XContentFactory.xContent(source).createParser(source);
     XContentParser.Token token;
     token = parser.nextToken();
     if (token != XContentParser.Token.START_OBJECT) {
       throw new ElasticsearchParseException(
           "Expected START_OBJECT but got " + token.name() + " " + parser.currentName());
     }
     while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
       if (token == XContentParser.Token.FIELD_NAME) {
         String fieldName = parser.currentName();
         parser.nextToken();
         SearchParseElement element = elementParsers.get(fieldName);
         if (element == null) {
           throw new SearchParseException(
               context, "No parser for element [" + fieldName + "]", parser.getTokenLocation());
         }
         element.parse(parser, context);
       } else {
         if (token == null) {
           throw new ElasticsearchParseException(
               "End of query source reached but query is not complete.");
         } else {
           throw new ElasticsearchParseException(
               "Expected field name but got "
                   + token.name()
                   + " \""
                   + parser.currentName()
                   + "\"");
         }
       }
     }
   } catch (Throwable e) {
     String sSource = "_na_";
     try {
       sSource = XContentHelper.convertToJson(source, false);
     } catch (Throwable e1) {
       // ignore
     }
     throw new SearchParseException(
         context, "Failed to parse source [" + sSource + "]", parser.getTokenLocation(), e);
   } finally {
     if (parser != null) {
       parser.close();
     }
   }
 }
 private MetaData readMetaData(byte[] data) throws IOException {
   XContentParser parser = null;
   try {
     parser = XContentHelper.createParser(data, 0, data.length);
     return MetaData.Builder.fromXContent(parser);
   } finally {
     if (parser != null) {
       parser.close();
     }
   }
 }
 @Nullable
 private IndexMetaData loadIndex(String index) {
   long highestVersion = -1;
   IndexMetaData indexMetaData = null;
   for (File indexLocation : nodeEnv.indexLocations(new Index(index))) {
     File stateDir = new File(indexLocation, "_state");
     if (!stateDir.exists() || !stateDir.isDirectory()) {
       continue;
     }
     // now, iterate over the current versions, and find latest one
     File[] stateFiles = stateDir.listFiles();
     if (stateFiles == null) {
       continue;
     }
     for (File stateFile : stateFiles) {
       if (!stateFile.getName().startsWith("state-")) {
         continue;
       }
       try {
         long version = Long.parseLong(stateFile.getName().substring("state-".length()));
         if (version > highestVersion) {
           byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
           if (data.length == 0) {
             logger.debug(
                 "[{}]: no data for [" + stateFile.getAbsolutePath() + "], ignoring...", index);
             continue;
           }
           XContentParser parser = null;
           try {
             parser = XContentHelper.createParser(data, 0, data.length);
             parser.nextToken(); // move to START_OBJECT
             indexMetaData = IndexMetaData.Builder.fromXContent(parser);
             highestVersion = version;
           } finally {
             if (parser != null) {
               parser.close();
             }
           }
         }
       } catch (Exception e) {
         logger.debug(
             "[{}]: failed to read [" + stateFile.getAbsolutePath() + "], ignoring...", e, index);
       }
     }
   }
   return indexMetaData;
 }
Beispiel #11
0
  Query parsePercolatorDocument(String id, BytesReference source) {
    String type = null;
    BytesReference querySource = null;

    XContentParser parser = null;
    try {
      parser = XContentHelper.createParser(source);
      String currentFieldName = null;
      XContentParser.Token token = parser.nextToken(); // move the START_OBJECT
      if (token != XContentParser.Token.START_OBJECT) {
        throw new ElasticsearchException(
            "failed to parse query [" + id + "], not starting with OBJECT");
      }
      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
          currentFieldName = parser.currentName();
        } else if (token == XContentParser.Token.START_OBJECT) {
          if ("query".equals(currentFieldName)) {
            if (type != null) {
              return parseQuery(type, null, parser);
            } else {
              XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
              builder.copyCurrentStructure(parser);
              querySource = builder.bytes();
              builder.close();
            }
          } else {
            parser.skipChildren();
          }
        } else if (token == XContentParser.Token.START_ARRAY) {
          parser.skipChildren();
        } else if (token.isValue()) {
          if ("type".equals(currentFieldName)) {
            type = parser.text();
          }
        }
      }
      return parseQuery(type, querySource, null);
    } catch (Exception e) {
      throw new PercolatorException(shardId().index(), "failed to parse query [" + id + "]", e);
    } finally {
      if (parser != null) {
        parser.close();
      }
    }
  }
 public Builder filter(String filter) {
   if (!Strings.hasLength(filter)) {
     this.filter = null;
     return this;
   }
   try {
     XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
     try {
       filter(parser.mapOrdered());
     } finally {
       parser.close();
     }
     return this;
   } catch (IOException e) {
     throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e);
   }
 }
  private MetaData loadGlobalState() {
    long highestVersion = -1;
    MetaData metaData = null;
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        String name = stateFile.getName();
        if (!name.startsWith("global-")) {
          continue;
        }
        try {
          long version = Long.parseLong(stateFile.getName().substring("global-".length()));
          if (version > highestVersion) {
            byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
            if (data.length == 0) {
              logger.debug(
                  "[_global] no data for [" + stateFile.getAbsolutePath() + "], ignoring...");
              continue;
            }

            XContentParser parser = null;
            try {
              parser = XContentHelper.createParser(data, 0, data.length);
              metaData = MetaData.Builder.fromXContent(parser);
              highestVersion = version;
            } finally {
              if (parser != null) {
                parser.close();
              }
            }
          }
        } catch (Exception e) {
          logger.debug("failed to load global state from [{}]", e, stateFile.getAbsolutePath());
        }
      }
    }

    return metaData;
  }
    public static void toXContent(
        IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params)
        throws IOException {
      builder.startObject(indexMetaData.index(), XContentBuilder.FieldCaseConversion.NONE);

      builder.field("version", indexMetaData.version());
      builder.field("state", indexMetaData.state().toString().toLowerCase(Locale.ENGLISH));

      boolean binary = params.paramAsBoolean("binary", false);

      builder.startObject("settings");
      for (Map.Entry<String, String> entry : indexMetaData.settings().getAsMap().entrySet()) {
        builder.field(entry.getKey(), entry.getValue());
      }
      builder.endObject();

      builder.startArray("mappings");
      for (Map.Entry<String, MappingMetaData> entry : indexMetaData.mappings().entrySet()) {
        if (binary) {
          builder.value(entry.getValue().source().compressed());
        } else {
          byte[] data = entry.getValue().source().uncompressed();
          XContentParser parser = XContentFactory.xContent(data).createParser(data);
          Map<String, Object> mapping = parser.mapOrdered();
          parser.close();
          builder.map(mapping);
        }
      }
      builder.endArray();

      for (Map.Entry<String, Custom> entry : indexMetaData.customs().entrySet()) {
        builder.startObject(entry.getKey(), XContentBuilder.FieldCaseConversion.NONE);
        lookupFactorySafe(entry.getKey()).toXContent(entry.getValue(), builder, params);
        builder.endObject();
      }

      builder.startObject("aliases");
      for (AliasMetaData alias : indexMetaData.aliases().values()) {
        AliasMetaData.Builder.toXContent(alias, builder, params);
      }
      builder.endObject();

      builder.endObject();
    }
  @Override
  @Nullable
  public Query parse(QueryParseContext parseContext) throws IOException {
    XContentParser parser = parseContext.parser();
    TemplateContext templateContext = parse(parser, QUERY, PARAMS);
    ExecutableScript executable =
        this.scriptService.executable(
            "mustache", templateContext.template(), templateContext.params());
    BytesReference querySource = (BytesReference) executable.run();

    XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
    try {
      final QueryParseContext context =
          new QueryParseContext(parseContext.index(), parseContext.indexQueryParser);
      context.reset(qSourceParser);
      Query result = context.parseInnerQuery();
      parser.nextToken();
      return result;
    } finally {
      qSourceParser.close();
    }
  }
  private void pre019Upgrade() throws Exception {
    long index = -1;
    File metaDataFile = null;
    MetaData metaData = null;
    long version = -1;
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        if (logger.isTraceEnabled()) {
          logger.trace("[upgrade]: processing [" + stateFile.getName() + "]");
        }
        String name = stateFile.getName();
        if (!name.startsWith("metadata-")) {
          continue;
        }
        long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1));
        if (fileIndex >= index) {
          // try and read the meta data
          try {
            byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
            if (data.length == 0) {
              continue;
            }
            XContentParser parser = XContentHelper.createParser(data, 0, data.length);
            try {
              String currentFieldName = null;
              XContentParser.Token token = parser.nextToken();
              if (token != null) {
                while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
                  if (token == XContentParser.Token.FIELD_NAME) {
                    currentFieldName = parser.currentName();
                  } else if (token == XContentParser.Token.START_OBJECT) {
                    if ("meta-data".equals(currentFieldName)) {
                      metaData = MetaData.Builder.fromXContent(parser);
                    }
                  } else if (token.isValue()) {
                    if ("version".equals(currentFieldName)) {
                      version = parser.longValue();
                    }
                  }
                }
              }
            } finally {
              parser.close();
            }
            index = fileIndex;
            metaDataFile = stateFile;
          } catch (IOException e) {
            logger.warn("failed to read pre 0.19 state from [" + name + "], ignoring...", e);
          }
        }
      }
    }
    if (metaData == null) {
      return;
    }

    logger.info(
        "found old metadata state, loading metadata from [{}] and converting to new metadata location and strucutre...",
        metaDataFile.getAbsolutePath());

    writeGlobalState(
        "upgrade", MetaData.builder().metaData(metaData).version(version).build(), null);
    for (IndexMetaData indexMetaData : metaData) {
      IndexMetaData.Builder indexMetaDataBuilder =
          IndexMetaData.newIndexMetaDataBuilder(indexMetaData).version(version);
      // set the created version to 0.18
      indexMetaDataBuilder.settings(
          ImmutableSettings.settingsBuilder()
              .put(indexMetaData.settings())
              .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_18_0));
      writeIndex("upgrade", indexMetaDataBuilder.build(), null);
    }

    // rename shards state to backup state
    File backupFile = new File(metaDataFile.getParentFile(), "backup-" + metaDataFile.getName());
    if (!metaDataFile.renameTo(backupFile)) {
      throw new IOException(
          "failed to rename old state to backup state [" + metaDataFile.getAbsolutePath() + "]");
    }

    // delete all other shards state files
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        String name = stateFile.getName();
        if (!name.startsWith("metadata-")) {
          continue;
        }
        stateFile.delete();
      }
    }

    logger.info(
        "conversion to new metadata location and format done, backup create at [{}]",
        backupFile.getAbsolutePath());
  }
  public void process(
      MetaData metaData,
      String aliasOrIndex,
      @Nullable MappingMetaData mappingMd,
      boolean allowIdGeneration)
      throws ElasticsearchException {
    // resolve the routing if needed
    routing(metaData.resolveIndexRouting(routing, aliasOrIndex));
    // resolve timestamp if provided externally
    if (timestamp != null) {
      timestamp =
          MappingMetaData.Timestamp.parseStringTimestamp(
              timestamp,
              mappingMd != null
                  ? mappingMd.timestamp().dateTimeFormatter()
                  : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER);
    }
    // extract values if needed
    if (mappingMd != null) {
      MappingMetaData.ParseContext parseContext =
          mappingMd.createParseContext(id, routing, timestamp);

      if (parseContext.shouldParse()) {
        XContentParser parser = null;
        try {
          parser = XContentHelper.createParser(source);
          mappingMd.parse(parser, parseContext);
          if (parseContext.shouldParseId()) {
            id = parseContext.id();
          }
          if (parseContext.shouldParseRouting()) {
            routing = parseContext.routing();
          }
          if (parseContext.shouldParseTimestamp()) {
            timestamp = parseContext.timestamp();
            timestamp =
                MappingMetaData.Timestamp.parseStringTimestamp(
                    timestamp, mappingMd.timestamp().dateTimeFormatter());
          }
        } catch (Exception e) {
          throw new ElasticsearchParseException(
              "failed to parse doc to extract routing/timestamp", e);
        } finally {
          if (parser != null) {
            parser.close();
          }
        }
      }

      // might as well check for routing here
      if (mappingMd.routing().required() && routing == null) {
        throw new RoutingMissingException(index, type, id);
      }

      if (parent != null && !mappingMd.hasParentField()) {
        throw new ElasticsearchIllegalArgumentException(
            "Can't specify parent if no parent field has been configured");
      }
    } else {
      if (parent != null) {
        throw new ElasticsearchIllegalArgumentException(
            "Can't specify parent if no parent field has been configured");
      }
    }

    // generate id if not already provided and id generation is allowed
    if (allowIdGeneration) {
      if (id == null) {
        id(Strings.randomBase64UUID());
        // since we generate the id, change it to CREATE
        opType(IndexRequest.OpType.CREATE);
      }
    }

    // generate timestamp if not provided, we always have one post this stage...
    if (timestamp == null) {
      timestamp = Long.toString(System.currentTimeMillis());
    }
  }
  private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException {
    if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) {
      throw new IllegalArgumentException(
          "It is forbidden to index into the default mapping ["
              + MapperService.DEFAULT_MAPPING
              + "]");
    }

    ParseContext.InternalParseContext context = cache.get();

    final Mapping mapping = docMapper.mapping();
    if (source.type() != null && !source.type().equals(docMapper.type())) {
      throw new MapperParsingException(
          "Type mismatch, provide type ["
              + source.type()
              + "] but mapper is of type ["
              + docMapper.type()
              + "]");
    }
    source.type(docMapper.type());

    XContentParser parser = source.parser();
    try {
      if (parser == null) {
        parser = XContentHelper.createParser(source.source());
      }
      context.reset(parser, new ParseContext.Document(), source);

      // will result in START_OBJECT
      XContentParser.Token token = parser.nextToken();
      if (token != XContentParser.Token.START_OBJECT) {
        throw new MapperParsingException("Malformed content, must start with an object");
      }

      boolean emptyDoc = false;
      if (mapping.root.isEnabled()) {
        token = parser.nextToken();
        if (token == XContentParser.Token.END_OBJECT) {
          // empty doc, we can handle it...
          emptyDoc = true;
        } else if (token != XContentParser.Token.FIELD_NAME) {
          throw new MapperParsingException(
              "Malformed content, after first object, either the type field or the actual properties should exist");
        }
      }

      for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
        metadataMapper.preParse(context);
      }

      if (mapping.root.isEnabled() == false) {
        // entire type is disabled
        parser.skipChildren();
      } else if (emptyDoc == false) {
        Mapper update = parseObject(context, mapping.root, true);
        if (update != null) {
          context.addDynamicMappingsUpdate(update);
        }
      }

      for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
        metadataMapper.postParse(context);
      }

      // try to parse the next token, this should be null if the object is ended properly
      // but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled
      // by the catch)
      if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1)
          && source.parser() == null
          && parser != null) {
        // only check for end of tokens if we created the parser here
        token = parser.nextToken();
        if (token != null) {
          throw new IllegalArgumentException(
              "Malformed content, found extra data after parsing: " + token);
        }
      }

    } catch (Throwable e) {
      // if its already a mapper parsing exception, no need to wrap it...
      if (e instanceof MapperParsingException) {
        throw (MapperParsingException) e;
      }

      // Throw a more meaningful message if the document is empty.
      if (source.source() != null && source.source().length() == 0) {
        throw new MapperParsingException("failed to parse, document is empty");
      }

      throw new MapperParsingException("failed to parse", e);
    } finally {
      // only close the parser when its not provided externally
      if (source.parser() == null && parser != null) {
        parser.close();
      }
    }
    // reverse the order of docs for nested docs support, parent should be last
    if (context.docs().size() > 1) {
      Collections.reverse(context.docs());
    }
    // apply doc boost
    if (context.docBoost() != 1.0f) {
      Set<String> encounteredFields = new HashSet<>();
      for (ParseContext.Document doc : context.docs()) {
        encounteredFields.clear();
        for (IndexableField field : doc) {
          if (field.fieldType().indexOptions() != IndexOptions.NONE
              && !field.fieldType().omitNorms()) {
            if (!encounteredFields.contains(field.name())) {
              ((Field) field).setBoost(context.docBoost() * field.boost());
              encounteredFields.add(field.name());
            }
          }
        }
      }
    }

    Mapper rootDynamicUpdate = context.dynamicMappingsUpdate();
    Mapping update = null;
    if (rootDynamicUpdate != null) {
      update = mapping.mappingUpdate(rootDynamicUpdate);
    }

    ParsedDocument doc =
        new ParsedDocument(
                context.uid(),
                context.version(),
                context.id(),
                context.type(),
                source.routing(),
                source.timestamp(),
                source.ttl(),
                context.docs(),
                context.source(),
                update)
            .parent(source.parent());
    // reset the context to free up memory
    context.reset(null, null, null);
    return doc;
  }
Beispiel #19
0
  public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener)
      throws MapperParsingException {
    ParseContext context = cache.get();

    if (source.type() != null && !source.type().equals(this.type)) {
      throw new MapperParsingException(
          "Type mismatch, provide type ["
              + source.type()
              + "] but mapper is of type ["
              + this.type
              + "]");
    }
    source.type(this.type);

    XContentParser parser = source.parser();
    try {
      if (parser == null) {
        parser = XContentHelper.createParser(source.source());
      }
      context.reset(parser, new Document(), source, listener);
      // on a newly created instance of document mapper, we always consider it as new mappers that
      // have been added
      if (initMappersAdded) {
        context.setMappingsModified();
        initMappersAdded = false;
      }

      // will result in START_OBJECT
      int countDownTokens = 0;
      XContentParser.Token token = parser.nextToken();
      if (token != XContentParser.Token.START_OBJECT) {
        throw new MapperParsingException("Malformed content, must start with an object");
      }
      boolean emptyDoc = false;
      token = parser.nextToken();
      if (token == XContentParser.Token.END_OBJECT) {
        // empty doc, we can handle it...
        emptyDoc = true;
      } else if (token != XContentParser.Token.FIELD_NAME) {
        throw new MapperParsingException(
            "Malformed content, after first object, either the type field or the actual properties should exist");
      }
      if (type.equals(parser.currentName())) {
        // first field is the same as the type, this might be because the type is provided, and the
        // object exists within it
        // or because there is a valid field that by chance is named as the type

        // Note, in this case, we only handle plain value types, an object type will be analyzed as
        // if it was the type itself
        // and other same level fields will be ignored
        token = parser.nextToken();
        countDownTokens++;
        // commented out, allow for same type with START_OBJECT, we do our best to handle it except
        // for the above corner case
        //                if (token != XContentParser.Token.START_OBJECT) {
        //                    throw new MapperException("Malformed content, a field with the same
        // name as the type must be an object with the properties/fields within it");
        //                }
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.preParse(context);
      }

      if (!emptyDoc) {
        rootObjectMapper.parse(context);
      }

      for (int i = 0; i < countDownTokens; i++) {
        parser.nextToken();
      }

      // fire up any new mappers if exists
      if (!context.newFieldMappers().mappers.isEmpty()) {
        addFieldMappers(context.newFieldMappers().mappers);
        context.newFieldMappers().mappers.clear();
      }
      if (!context.newObjectMappers().mappers.isEmpty()) {
        addObjectMappers(context.newObjectMappers().mappers);
        context.newObjectMappers().mappers.clear();
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.postParse(context);
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.validate(context);
      }
    } catch (Throwable e) {
      // we have to fire up any new mappers even on a failure, because they
      // have been added internally to each compound mapper...
      // ... we have no option to "rollback" a change, which is very tricky in our copy on change
      // system...
      if (!context.newFieldMappers().mappers.isEmpty()) {
        addFieldMappers(context.newFieldMappers().mappers);
        context.newFieldMappers().mappers.clear();
      }
      if (!context.newObjectMappers().mappers.isEmpty()) {
        addObjectMappers(context.newObjectMappers().mappers);
        context.newObjectMappers().mappers.clear();
      }

      // if its already a mapper parsing exception, no need to wrap it...
      if (e instanceof MapperParsingException) {
        throw (MapperParsingException) e;
      }

      throw new MapperParsingException("failed to parse", e);
    } finally {
      // only close the parser when its not provided externally
      if (source.parser() == null && parser != null) {
        parser.close();
      }
    }
    // reverse the order of docs for nested docs support, parent should be last
    if (context.docs().size() > 1) {
      Collections.reverse(context.docs());
    }
    // apply doc boost
    if (context.docBoost() != 1.0f) {
      Set<String> encounteredFields = Sets.newHashSet();
      for (Document doc : context.docs()) {
        encounteredFields.clear();
        for (IndexableField field : doc) {
          if (field.fieldType().indexed() && !field.fieldType().omitNorms()) {
            if (!encounteredFields.contains(field.name())) {
              ((Field) field).setBoost(context.docBoost() * field.boost());
              encounteredFields.add(field.name());
            }
          }
        }
      }
    }

    ParsedDocument doc =
        new ParsedDocument(
                context.uid(),
                context.id(),
                context.type(),
                source.routing(),
                source.timestamp(),
                source.ttl(),
                context.docs(),
                context.analyzer(),
                context.source(),
                context.mappingsModified())
            .parent(source.parent());
    // reset the context to free up memory
    context.reset(null, null, null, null);
    return doc;
  }
      private void parse(BytesReference data) throws Exception {
        XContent xContent = XContentFactory.xContent(data);
        String source = XContentBuilder.builder(xContent).string();
        int from = 0;
        int length = data.length();
        byte marker = xContent.streamSeparator();
        int nextMarker = findNextMarker(marker, from, data, length);
        if (nextMarker == -1) {
          nextMarker = length;
        }
        // now parse the action
        XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from));

        try {
          // move pointers
          from = nextMarker + 1;

          // Move to START_OBJECT
          XContentParser.Token token = parser.nextToken();
          if (token == null) {
            throw new Exception("Wrong object structure");
          }
          assert token == XContentParser.Token.START_OBJECT;
          // Move to FIELD_NAME, that's the action
          // token = parser.nextToken();
          // assert token == XContentParser.Token.FIELD_NAME;
          // String action = parser.currentName();

          String id = null;
          String routing = null;
          String parent = null;
          String timestamp = null;
          Long ttl = null;
          String opType = null;
          long version = 0;
          VersionType versionType = VersionType.INTERNAL;
          String percolate = null;

          // at this stage, next token can either be END_OBJECT
          // (and use default index and type, with auto generated
          // id)
          // or START_OBJECT which will have another set of
          // parameters

          String currentFieldName = null;
          while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
            if (token == XContentParser.Token.FIELD_NAME) {
              currentFieldName = parser.currentName();
            } else if (token.isValue()) {
              if ("_index".equals(currentFieldName)) {
                index = parser.text();
              } else if ("_type".equals(currentFieldName)) {
                type = parser.text();
              } else if ("_queryString".equals(currentFieldName)) {
                queryString = parser.text();
              } else if ("_id".equals(currentFieldName)) {
                id = parser.text();
              } else if ("_routing".equals(currentFieldName)
                  || "routing".equals(currentFieldName)) {
                routing = parser.text();
              } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
                parent = parser.text();
              } else if ("_timestamp".equals(currentFieldName)
                  || "timestamp".equals(currentFieldName)) {
                timestamp = parser.text();
              } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) {
                if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
                  ttl = TimeValue.parseTimeValue(parser.text(), null).millis();
                } else {
                  ttl = parser.longValue();
                }
              } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) {
                opType = parser.text();
              } else if ("_version".equals(currentFieldName)
                  || "version".equals(currentFieldName)) {
                version = parser.longValue();
              } else if ("_version_type".equals(currentFieldName)
                  || "_versionType".equals(currentFieldName)
                  || "version_type".equals(currentFieldName)
                  || "versionType".equals(currentFieldName)) {
                versionType = VersionType.fromString(parser.text());
              } else if ("percolate".equals(currentFieldName)
                  || "_percolate".equals(currentFieldName)) {
                percolate = parser.textOrNull();
              }
            }
          }
          if (nextMarker < length) {
            nextMarker = findNextMarker(marker, from, data, length);
            if (nextMarker == -1) {
              nextMarker = length;
            }
            content = getString(data.slice(from, nextMarker - from));
          }

        } finally {
          parser.close();
        }
      }
  private ParsedDocument parseRequest(
      IndexService documentIndexService, PercolateShardRequest request, PercolateContext context)
      throws ElasticsearchException {
    BytesReference source = request.source();
    if (source == null || source.length() == 0) {
      return null;
    }

    // TODO: combine all feature parse elements into one map
    Map<String, ? extends SearchParseElement> hlElements = highlightPhase.parseElements();
    Map<String, ? extends SearchParseElement> facetElements = facetPhase.parseElements();
    Map<String, ? extends SearchParseElement> aggregationElements =
        aggregationPhase.parseElements();

    ParsedDocument doc = null;
    XContentParser parser = null;

    // Some queries (function_score query when for decay functions) rely on a SearchContext being
    // set:
    // We switch types because this context needs to be in the context of the percolate queries in
    // the shard and
    // not the in memory percolate doc
    String[] previousTypes = context.types();
    context.types(new String[] {TYPE_NAME});
    SearchContext.setCurrent(context);
    try {
      parser = XContentFactory.xContent(source).createParser(source);
      String currentFieldName = null;
      XContentParser.Token token;
      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
          currentFieldName = parser.currentName();
          // we need to check the "doc" here, so the next token will be START_OBJECT which is
          // the actual document starting
          if ("doc".equals(currentFieldName)) {
            if (doc != null) {
              throw new ElasticsearchParseException("Either specify doc or get, not both");
            }

            MapperService mapperService = documentIndexService.mapperService();
            DocumentMapper docMapper =
                mapperService.documentMapperWithAutoCreate(request.documentType());
            doc = docMapper.parse(source(parser).type(request.documentType()).flyweight(true));
            // the document parsing exists the "doc" object, so we need to set the new current
            // field.
            currentFieldName = parser.currentName();
          }
        } else if (token == XContentParser.Token.START_OBJECT) {
          SearchParseElement element = hlElements.get(currentFieldName);
          if (element == null) {
            element = facetElements.get(currentFieldName);
            if (element == null) {
              element = aggregationElements.get(currentFieldName);
            }
          }

          if ("query".equals(currentFieldName)) {
            if (context.percolateQuery() != null) {
              throw new ElasticsearchParseException("Either specify query or filter, not both");
            }
            context.percolateQuery(documentIndexService.queryParserService().parse(parser).query());
          } else if ("filter".equals(currentFieldName)) {
            if (context.percolateQuery() != null) {
              throw new ElasticsearchParseException("Either specify query or filter, not both");
            }
            Filter filter =
                documentIndexService.queryParserService().parseInnerFilter(parser).filter();
            context.percolateQuery(new XConstantScoreQuery(filter));
          } else if ("sort".equals(currentFieldName)) {
            parseSort(parser, context);
          } else if (element != null) {
            element.parse(parser, context);
          }
        } else if (token == XContentParser.Token.START_ARRAY) {
          if ("sort".equals(currentFieldName)) {
            parseSort(parser, context);
          }
        } else if (token == null) {
          break;
        } else if (token.isValue()) {
          if ("size".equals(currentFieldName)) {
            context.size(parser.intValue());
            if (context.size() < 0) {
              throw new ElasticsearchParseException(
                  "size is set to ["
                      + context.size()
                      + "] and is expected to be higher or equal to 0");
            }
          } else if ("sort".equals(currentFieldName)) {
            parseSort(parser, context);
          } else if ("track_scores".equals(currentFieldName)
              || "trackScores".equals(currentFieldName)) {
            context.trackScores(parser.booleanValue());
          }
        }
      }

      // We need to get the actual source from the request body for highlighting, so parse the
      // request body again
      // and only get the doc source.
      if (context.highlight() != null) {
        parser.close();
        currentFieldName = null;
        parser = XContentFactory.xContent(source).createParser(source);
        token = parser.nextToken();
        assert token == XContentParser.Token.START_OBJECT;
        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
          if (token == XContentParser.Token.FIELD_NAME) {
            currentFieldName = parser.currentName();
          } else if (token == XContentParser.Token.START_OBJECT) {
            if ("doc".equals(currentFieldName)) {
              BytesStreamOutput bStream = new BytesStreamOutput();
              XContentBuilder builder = XContentFactory.contentBuilder(XContentType.SMILE, bStream);
              builder.copyCurrentStructure(parser);
              builder.close();
              doc.setSource(bStream.bytes());
              break;
            } else {
              parser.skipChildren();
            }
          } else if (token == null) {
            break;
          }
        }
      }

    } catch (Throwable e) {
      throw new ElasticsearchParseException("failed to parse request", e);
    } finally {
      context.types(previousTypes);
      SearchContext.removeCurrent();
      if (parser != null) {
        parser.close();
      }
    }

    return doc;
  }
  @Override
  public void handleRequest(final RestRequest request, final RestChannel channel) {
    IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
    indicesAliasesRequest.listenerThreaded(false);
    indicesAliasesRequest.masterNodeTimeout(
        request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout()));
    XContentParser parser = null;
    try {
      // {
      //     actions : [
      //         { add : { index : "test1", alias : "alias1", filter : {"user" : "kimchy"} } }
      //         { remove : { index : "test1", alias : "alias1" } }
      //     ]
      // }
      indicesAliasesRequest.timeout(
          request.paramAsTime("timeout", indicesAliasesRequest.timeout()));
      parser = XContentFactory.xContent(request.content()).createParser(request.content());
      XContentParser.Token token = parser.nextToken();
      if (token == null) {
        throw new ElasticsearchIllegalArgumentException("No action is specified");
      }
      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.START_ARRAY) {
          while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
            if (token == XContentParser.Token.FIELD_NAME) {
              String action = parser.currentName();
              AliasAction.Type type;
              if ("add".equals(action)) {
                type = AliasAction.Type.ADD;
              } else if ("remove".equals(action)) {
                type = AliasAction.Type.REMOVE;
              } else {
                throw new ElasticsearchIllegalArgumentException(
                    "Alias action [" + action + "] not supported");
              }
              String index = null;
              String alias = null;
              Map<String, Object> filter = null;
              String routing = null;
              boolean routingSet = false;
              String indexRouting = null;
              boolean indexRoutingSet = false;
              String searchRouting = null;
              boolean searchRoutingSet = false;
              String currentFieldName = null;
              while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
                if (token == XContentParser.Token.FIELD_NAME) {
                  currentFieldName = parser.currentName();
                } else if (token == XContentParser.Token.VALUE_STRING) {
                  if ("index".equals(currentFieldName)) {
                    index = parser.text();
                  } else if ("alias".equals(currentFieldName)) {
                    alias = parser.text();
                  } else if ("routing".equals(currentFieldName)) {
                    routing = parser.textOrNull();
                    routingSet = true;
                  } else if ("indexRouting".equals(currentFieldName)
                      || "index-routing".equals(currentFieldName)
                      || "index_routing".equals(currentFieldName)) {
                    indexRouting = parser.textOrNull();
                    indexRoutingSet = true;
                  } else if ("searchRouting".equals(currentFieldName)
                      || "search-routing".equals(currentFieldName)
                      || "search_routing".equals(currentFieldName)) {
                    searchRouting = parser.textOrNull();
                    searchRoutingSet = true;
                  }
                } else if (token == XContentParser.Token.START_OBJECT) {
                  if ("filter".equals(currentFieldName)) {
                    filter = parser.mapOrdered();
                  }
                }
              }

              if (type == AliasAction.Type.ADD) {
                AliasAction aliasAction = newAddAliasAction(index, alias).filter(filter);
                if (routingSet) {
                  aliasAction.routing(routing);
                }
                if (indexRoutingSet) {
                  aliasAction.indexRouting(indexRouting);
                }
                if (searchRoutingSet) {
                  aliasAction.searchRouting(searchRouting);
                }
                indicesAliasesRequest.addAliasAction(aliasAction);
              } else if (type == AliasAction.Type.REMOVE) {
                indicesAliasesRequest.removeAlias(index, alias);
              }
            }
          }
        }
      }
    } catch (Exception e) {
      try {
        channel.sendResponse(new XContentThrowableRestResponse(request, e));
      } catch (IOException e1) {
        logger.warn("Failed to send response", e1);
      }
      return;
    } finally {
      parser.close();
    }
    client
        .admin()
        .indices()
        .aliases(
            indicesAliasesRequest,
            new AcknowledgedRestResponseActionListener<IndicesAliasesResponse>(
                request, channel, logger));
  }