コード例 #1
0
    @Override
    public TerminationHandle warmNewReaders(
        final IndexShard indexShard,
        IndexMetaData indexMetaData,
        final WarmerContext context,
        ThreadPool threadPool) {
      final Loading defaultLoading =
          Loading.parse(indexMetaData.settings().get(NORMS_LOADING_KEY), Loading.LAZY);
      final MapperService mapperService = indexShard.mapperService();
      final ObjectSet<String> warmUp = new ObjectHashSet<>();
      for (DocumentMapper docMapper : mapperService.docMappers(false)) {
        for (FieldMapper fieldMapper : docMapper.mappers()) {
          final String indexName = fieldMapper.fieldType().names().indexName();
          Loading normsLoading = fieldMapper.fieldType().normsLoading();
          if (normsLoading == null) {
            normsLoading = defaultLoading;
          }
          if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE
              && !fieldMapper.fieldType().omitNorms()
              && normsLoading == Loading.EAGER) {
            warmUp.add(indexName);
          }
        }
      }

      final CountDownLatch latch = new CountDownLatch(1);
      // Norms loading may be I/O intensive but is not CPU intensive, so we execute it in a single
      // task
      threadPool
          .executor(executor())
          .execute(
              new Runnable() {
                @Override
                public void run() {
                  try {
                    for (ObjectCursor<String> stringObjectCursor : warmUp) {
                      final String indexName = stringObjectCursor.value;
                      final long start = System.nanoTime();
                      for (final LeafReaderContext ctx : context.searcher().reader().leaves()) {
                        final NumericDocValues values = ctx.reader().getNormValues(indexName);
                        if (values != null) {
                          values.get(0);
                        }
                      }
                      if (indexShard.warmerService().logger().isTraceEnabled()) {
                        indexShard
                            .warmerService()
                            .logger()
                            .trace(
                                "warmed norms for [{}], took [{}]",
                                indexName,
                                TimeValue.timeValueNanos(System.nanoTime() - start));
                      }
                    }
                  } catch (Throwable t) {
                    indexShard.warmerService().logger().warn("failed to warm-up norms", t);
                  } finally {
                    latch.countDown();
                  }
                }
              });

      return new TerminationHandle() {
        @Override
        public void awaitTermination() throws InterruptedException {
          latch.await();
        }
      };
    }
コード例 #2
0
ファイル: TypeParsers.java プロジェクト: lance0821/elassandra
  public static void parseField(
      FieldMapper.Builder builder,
      String name,
      Map<String, Object> fieldNode,
      Mapper.TypeParser.ParserContext parserContext) {
    NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer();
    NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer();
    Version indexVersionCreated = parserContext.indexVersionCreated();
    for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator();
        iterator.hasNext(); ) {
      Map.Entry<String, Object> entry = iterator.next();
      final String propName = Strings.toUnderscoreCase(entry.getKey());
      final Object propNode = entry.getValue();
      if (propName.equals("index_name") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
        builder.indexName(propNode.toString());
        iterator.remove();
      } else if (propName.equals("store")) {
        builder.store(parseStore(name, propNode.toString()));
        iterator.remove();
      } else if (propName.equals("index")) {
        parseIndex(name, propNode.toString(), builder);
        iterator.remove();
      } else if (propName.equals("tokenized")) {
        builder.tokenized(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals(DOC_VALUES)) {
        builder.docValues(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("term_vector")) {
        parseTermVector(name, propNode.toString(), builder);
        iterator.remove();
      } else if (propName.equals("boost")) {
        builder.boost(nodeFloatValue(propNode));
        iterator.remove();
      } else if (propName.equals("store_term_vectors")) {
        builder.storeTermVectors(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("store_term_vector_offsets")) {
        builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("store_term_vector_positions")) {
        builder.storeTermVectorPositions(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("store_term_vector_payloads")) {
        builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals(CQL_COLLECTION)) {
        switch (StringUtils.lowerCase(propNode.toString())) {
          case "list":
            builder.cqlCollection(CqlCollection.LIST);
            break;
          case "set":
            builder.cqlCollection(CqlCollection.SET);
            break;
          case "singleton":
            builder.cqlCollection(CqlCollection.SINGLETON);
            break;
        }
        iterator.remove();
      } else if (propName.equals(CQL_STRUCT)) {
        switch (StringUtils.lowerCase(propNode.toString())) {
          case "map":
            builder.cqlStruct(CqlStruct.MAP);
            break;
          case "udt":
            builder.cqlStruct(CqlStruct.UDT);
            break;
          case "tuple":
            builder.cqlStruct(CqlStruct.TUPLE);
            break;
        }
        iterator.remove();
      } else if (propName.equals(CQL_PARTIAL_UPDATE)) {
        builder.cqlPartialUpdate(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("omit_norms")) {
        builder.omitNorms(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("norms")) {
        final Map<String, Object> properties = nodeMapValue(propNode, "norms");
        for (Iterator<Entry<String, Object>> propsIterator = properties.entrySet().iterator();
            propsIterator.hasNext(); ) {
          Entry<String, Object> entry2 = propsIterator.next();
          final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
          final Object propNode2 = entry2.getValue();
          if (propName2.equals("enabled")) {
            builder.omitNorms(!nodeBooleanValue(propNode2));
            propsIterator.remove();
          } else if (propName2.equals(Loading.KEY)) {
            builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null));
            propsIterator.remove();
          }
        }
        DocumentMapperParser.checkNoRemainingFields(
            propName, properties, parserContext.indexVersionCreated());
        iterator.remove();
      } else if (propName.equals("omit_term_freq_and_positions")) {
        final IndexOptions op =
            nodeBooleanValue(propNode)
                ? IndexOptions.DOCS
                : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
        if (indexVersionCreated.onOrAfter(Version.V_1_0_0_RC2)) {
          throw new ElasticsearchParseException(
              "'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs']  instead");
        }
        // deprecated option for BW compat
        builder.indexOptions(op);
        iterator.remove();
      } else if (propName.equals("index_options")) {
        builder.indexOptions(nodeIndexOptionValue(propNode));
        iterator.remove();
      } else if (propName.equals("analyzer")
          || // for backcompat, reading old indexes, remove for v3.0
          propName.equals("index_analyzer") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {

        NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
        if (analyzer == null) {
          throw new MapperParsingException(
              "analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
        }
        indexAnalyzer = analyzer;
        iterator.remove();
      } else if (propName.equals("search_analyzer")) {
        NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
        if (analyzer == null) {
          throw new MapperParsingException(
              "analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
        }
        searchAnalyzer = analyzer;
        iterator.remove();
      } else if (propName.equals("include_in_all")) {
        builder.includeInAll(nodeBooleanValue(propNode));
        iterator.remove();
      } else if (propName.equals("postings_format")
          && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
        // ignore for old indexes
        iterator.remove();
      } else if (propName.equals("doc_values_format")
          && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
        // ignore for old indexes
        iterator.remove();
      } else if (propName.equals("similarity")) {
        builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
        iterator.remove();
      } else if (propName.equals("fielddata")) {
        final Settings settings =
            Settings.builder()
                .put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata")))
                .build();
        builder.fieldDataSettings(settings);
        iterator.remove();
      } else if (propName.equals("copy_to")) {
        if (parserContext.isWithinMultiField()) {
          if (indexVersionCreated.after(Version.V_2_1_0)
              || (indexVersionCreated.after(Version.V_2_0_1)
                  && indexVersionCreated.before(Version.V_2_1_0))) {
            throw new MapperParsingException(
                "copy_to in multi fields is not allowed. Found the copy_to in field ["
                    + name
                    + "] which is within a multi field.");
          } else {
            ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]")
                .warn(
                    "Found a copy_to in field ["
                        + name
                        + "] which is within a multi field. This feature has been removed and the copy_to will be ignored.");
            // we still parse this, otherwise the message will only appear once and the copy_to
            // removed. After that it will appear again. Better to have it always.
          }
        }
        parseCopyFields(propNode, builder);
        iterator.remove();
      }
    }

    if (indexAnalyzer == null) {
      if (searchAnalyzer != null) {
        // If the index was created before 2.0 then we are trying to upgrade the mappings so use the
        // default indexAnalyzer
        // instead of throwing an exception so the user is able to upgrade
        if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
          indexAnalyzer = parserContext.analysisService().defaultIndexAnalyzer();
        } else {
          throw new MapperParsingException(
              "analyzer on field [" + name + "] must be set when search_analyzer is set");
        }
      }
    } else if (searchAnalyzer == null) {
      searchAnalyzer = indexAnalyzer;
    }
    builder.indexAnalyzer(indexAnalyzer);
    builder.searchAnalyzer(searchAnalyzer);
  }