示例#1
0
  /**
   * Extracts all the required fields from the RestRequest 'h' parameter. In order to support
   * wildcards like 'bulk.*' this needs potentially parse all the configured headers and its aliases
   * and needs to ensure that everything is only added once to the returned headers, even if
   * 'h=bulk.*.bulk.*' is specified or some headers are contained twice due to matching aliases
   */
  private static Set<String> expandHeadersFromRequest(Table table, RestRequest request) {
    Set<String> headers = new LinkedHashSet<>(table.getHeaders().size());

    // check headers and aliases
    for (String header : Strings.splitStringByCommaToArray(request.param("h"))) {
      if (Regex.isSimpleMatchPattern(header)) {
        for (Table.Cell tableHeaderCell : table.getHeaders()) {
          String configuredHeader = tableHeaderCell.value.toString();
          if (Regex.simpleMatch(header, configuredHeader)) {
            headers.add(configuredHeader);
          } else if (tableHeaderCell.attr.containsKey("alias")) {
            String[] aliases = Strings.splitStringByCommaToArray(tableHeaderCell.attr.get("alias"));
            for (String alias : aliases) {
              if (Regex.simpleMatch(header, alias)) {
                headers.add(configuredHeader);
                break;
              }
            }
          }
        }
      } else {
        headers.add(header);
      }
    }

    return headers;
  }
  /**
   * Returns a function that filters a document map based on the given include and exclude rules.
   *
   * @see #filter(Map, String[], String[]) for details
   */
  public static Function<Map<String, ?>, Map<String, Object>> filter(
      String[] includes, String[] excludes) {
    CharacterRunAutomaton matchAllAutomaton = new CharacterRunAutomaton(Automata.makeAnyString());

    CharacterRunAutomaton include;
    if (includes == null || includes.length == 0) {
      include = matchAllAutomaton;
    } else {
      Automaton includeA = Regex.simpleMatchToAutomaton(includes);
      includeA = makeMatchDotsInFieldNames(includeA);
      include = new CharacterRunAutomaton(includeA);
    }

    Automaton excludeA;
    if (excludes == null || excludes.length == 0) {
      excludeA = Automata.makeEmpty();
    } else {
      excludeA = Regex.simpleMatchToAutomaton(excludes);
      excludeA = makeMatchDotsInFieldNames(excludeA);
    }
    CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA);

    // NOTE: We cannot use Operations.minus because of the special case that
    // we want all sub properties to match as soon as an object matches

    return (map) -> filter(map, include, 0, exclude, 0, matchAllAutomaton);
  }
  private Map<String, FieldMappingMetaData> findFieldMappingsByType(
      DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) {
    MapBuilder<String, FieldMappingMetaData> fieldMappings = new MapBuilder<>();
    final DocumentFieldMappers allFieldMappers = documentMapper.mappers();
    for (String field : request.fields()) {
      if (Regex.isMatchAllPattern(field)) {
        for (FieldMapper fieldMapper : allFieldMappers) {
          addFieldMapper(
              fieldMapper.fieldType().name(),
              fieldMapper,
              fieldMappings,
              request.includeDefaults());
        }
      } else if (Regex.isSimpleMatchPattern(field)) {
        // go through the field mappers 3 times, to make sure we give preference to the resolve
        // order: full name, index name, name.
        // also make sure we only store each mapper once.
        Collection<FieldMapper> remainingFieldMappers = newLinkedList(allFieldMappers);
        for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
          final FieldMapper fieldMapper = it.next();
          if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) {
            addFieldMapper(
                fieldMapper.fieldType().name(),
                fieldMapper,
                fieldMappings,
                request.includeDefaults());
            it.remove();
          }
        }
        for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
          final FieldMapper fieldMapper = it.next();
          if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) {
            addFieldMapper(
                fieldMapper.fieldType().name(),
                fieldMapper,
                fieldMappings,
                request.includeDefaults());
            it.remove();
          }
        }

      } else {
        // not a pattern
        FieldMapper fieldMapper = allFieldMappers.smartNameFieldMapper(field);
        if (fieldMapper != null) {
          addFieldMapper(field, fieldMapper, fieldMappings, request.includeDefaults());
        } else if (request.probablySingleFieldRequest()) {
          fieldMappings.put(field, FieldMappingMetaData.NULL);
        }
      }
    }
    return fieldMappings.immutableMap();
  }
 @Override
 public void match(LoggingEvent event) {
   if (event.getLevel() == level && event.getLoggerName().equals(logger)) {
     if (Regex.isSimpleMatchPattern(message)) {
       if (Regex.simpleMatch(message, event.getMessage().toString())) {
         saw = true;
       }
     } else {
       if (event.getMessage().toString().contains(message)) {
         saw = true;
       }
     }
   }
 }
示例#5
0
  /*
   * Finds all mappings for types and concrete indices. Types are expanded to
   * include all types that match the glob patterns in the types array. Empty
   * types array, null or {"_all"} will be expanded to all types available for
   * the given indices.
   */
  public ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> findMappings(
      String[] concreteIndices, final String[] types) {
    assert types != null;
    assert concreteIndices != null;
    if (concreteIndices.length == 0) {
      return ImmutableOpenMap.of();
    }

    ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder =
        ImmutableOpenMap.builder();
    Iterable<String> intersection =
        HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
    for (String index : intersection) {
      IndexMetaData indexMetaData = indices.get(index);
      ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings;
      if (isAllTypes(types)) {
        indexMapBuilder.put(
            index, indexMetaData.getMappings()); // No types specified means get it all

      } else {
        filteredMappings = ImmutableOpenMap.builder();
        for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) {
          if (Regex.simpleMatch(types, cursor.key)) {
            filteredMappings.put(cursor.key, cursor.value);
          }
        }
        if (!filteredMappings.isEmpty()) {
          indexMapBuilder.put(index, filteredMappings.build());
        }
      }
    }
    return indexMapBuilder.build();
  }
  @Override
  protected GetFieldMappingsResponse shardOperation(
      final GetFieldMappingsIndexRequest request, ShardId shardId) {
    assert shardId != null;
    IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
    Collection<String> typeIntersection;
    if (request.types().length == 0) {
      typeIntersection = indexService.mapperService().types();

    } else {
      typeIntersection =
          indexService
              .mapperService()
              .types()
              .stream()
              .filter(type -> Regex.simpleMatch(request.types(), type))
              .collect(Collectors.toCollection(ArrayList::new));
      if (typeIntersection.isEmpty()) {
        throw new TypeMissingException(shardId.getIndex(), request.types());
      }
    }

    MapBuilder<String, Map<String, FieldMappingMetaData>> typeMappings = new MapBuilder<>();
    for (String type : typeIntersection) {
      DocumentMapper documentMapper = indexService.mapperService().documentMapper(type);
      Map<String, FieldMappingMetaData> fieldMapping =
          findFieldMappingsByType(documentMapper, request);
      if (!fieldMapping.isEmpty()) {
        typeMappings.put(type, fieldMapping);
      }
    }

    return new GetFieldMappingsResponse(
        singletonMap(shardId.getIndexName(), typeMappings.immutableMap()));
  }
示例#7
0
 /**
  * Returns all the fields that match the given pattern. If the pattern is prefixed with a type
  * then the fields will be returned with a type prefix.
  */
 public Collection<String> simpleMatchToIndexNames(String pattern) {
   if (Regex.isSimpleMatchPattern(pattern) == false) {
     // no wildcards
     return Collections.singletonList(pattern);
   }
   return fieldTypes.simpleMatchToFullName(pattern);
 }
示例#8
0
 public static boolean hasDynamicSetting(String key) {
   for (String dynamicSetting : dynamicSettings) {
     if (Regex.simpleMatch(dynamicSetting, key)) {
       return true;
     }
   }
   return false;
 }
  private List<Tuple<Text, Integer>> getExpectedFacetEntries(
      Set<String> fieldValues,
      Map<String, Integer> controlFacetsField,
      int size,
      TermsFacet.ComparatorType sort,
      List<String> excludes,
      String regex,
      boolean allTerms) {
    Pattern pattern = null;
    if (regex != null) {
      pattern = Regex.compile(regex, null);
    }

    List<Tuple<Text, Integer>> entries = new ArrayList<Tuple<Text, Integer>>();
    for (Map.Entry<String, Integer> e : controlFacetsField.entrySet()) {
      if (excludes.contains(e.getKey())) {
        continue;
      }
      if (pattern != null && !pattern.matcher(e.getKey()).matches()) {
        continue;
      }

      entries.add(new Tuple<Text, Integer>(new StringText(e.getKey()), e.getValue()));
    }

    if (allTerms) {
      for (String fieldValue : fieldValues) {
        if (!controlFacetsField.containsKey(fieldValue)) {
          if (excludes.contains(fieldValue)) {
            continue;
          }
          if (pattern != null && !pattern.matcher(fieldValue).matches()) {
            continue;
          }

          entries.add(new Tuple<Text, Integer>(new StringText(fieldValue), 0));
        }
      }
    }

    switch (sort) {
      case COUNT:
        Collections.sort(entries, count);
        break;
      case REVERSE_COUNT:
        Collections.sort(entries, count_reverse);
        break;
      case TERM:
        Collections.sort(entries, term);
        break;
      case REVERSE_TERM:
        Collections.sort(entries, term_reverse);
        break;
    }
    return size >= entries.size() ? entries : entries.subList(0, size);
  }
 protected boolean matchesIndices(String currentIndex, String... indices) {
   final String[] concreteIndices =
       clusterService.state().metaData().concreteIndicesIgnoreMissing(indices);
   for (String index : concreteIndices) {
     if (Regex.simpleMatch(index, currentIndex)) {
       return true;
     }
   }
   return false;
 }
示例#11
0
 private void addNewIndex(
     ClusterState tribeState,
     ClusterBlocks.Builder blocks,
     MetaData.Builder metaData,
     RoutingTable.Builder routingTable,
     IndexMetaData tribeIndex) {
   Settings tribeSettings =
       Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build();
   metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings));
   routingTable.add(tribeState.routingTable().index(tribeIndex.getIndex()));
   if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex())) {
     blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK);
   }
   if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex())) {
     blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_READ_BLOCK);
   }
   if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex())) {
     blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK);
   }
 }
  @Inject
  public PatternTokenizerFactory(
      IndexSettings indexSettings, @Assisted String name, @Assisted Settings settings) {
    super(indexSettings, name, settings);

    String sPattern = settings.get("pattern", "\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/);
    if (sPattern == null) {
      throw new IllegalArgumentException(
          "pattern is missing for [" + name + "] tokenizer of type 'pattern'");
    }

    this.pattern = Regex.compile(sPattern, settings.get("flags"));
    this.group = settings.getAsInt("group", -1);
  }
示例#13
0
 public Set<String> simpleMatchToIndexNames(String pattern) {
   int dotIndex = pattern.indexOf('.');
   if (dotIndex != -1) {
     String possibleType = pattern.substring(0, dotIndex);
     DocumentMapper possibleDocMapper = mappers.get(possibleType);
     if (possibleDocMapper != null) {
       Set<String> typedFields = Sets.newHashSet();
       for (String indexName : possibleDocMapper.mappers().simpleMatchToIndexNames(pattern)) {
         typedFields.add(possibleType + "." + indexName);
       }
       return typedFields;
     }
   }
   Set<String> fields = Sets.newHashSet();
   for (Map.Entry<String, FieldMappers> entry : fullNameFieldMappers.entrySet()) {
     if (Regex.simpleMatch(pattern, entry.getKey())) {
       for (FieldMapper mapper : entry.getValue()) {
         fields.add(mapper.names().indexName());
       }
     }
   }
   for (Map.Entry<String, FieldMappers> entry : indexNameFieldMappers.entrySet()) {
     if (Regex.simpleMatch(pattern, entry.getKey())) {
       for (FieldMapper mapper : entry.getValue()) {
         fields.add(mapper.names().indexName());
       }
     }
   }
   for (Map.Entry<String, FieldMappers> entry : nameFieldMappers.entrySet()) {
     if (Regex.simpleMatch(pattern, entry.getKey())) {
       for (FieldMapper mapper : entry.getValue()) {
         fields.add(mapper.names().indexName());
       }
     }
   }
   return fields;
 }
 @Override
 public void assertAllShardsOnNodes(String index, String pattern) {
   ClusterState clusterState =
       client().admin().cluster().prepareState().execute().actionGet().getState();
   for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
     for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
       for (ShardRouting shardRouting : indexShardRoutingTable) {
         if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) {
           String name = clusterState.nodes().get(shardRouting.currentNodeId()).name();
           assertThat(
               "Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true));
         }
       }
     }
   }
 }
 /**
  * Build the {@link CharacterRunAutomaton} that represents the reindex-from-remote whitelist and
  * make sure that it doesn't whitelist the world.
  */
 static CharacterRunAutomaton buildRemoteWhitelist(List<String> whitelist) {
   if (whitelist.isEmpty()) {
     return new CharacterRunAutomaton(Automata.makeEmpty());
   }
   Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY));
   automaton =
       MinimizationOperations.minimize(automaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
   if (Operations.isTotal(automaton)) {
     throw new IllegalArgumentException(
         "Refusing to start because whitelist "
             + whitelist
             + " accepts all addresses. "
             + "This would allow users to reindex-from-remote any URL they like effectively having Elasticsearch make HTTP GETs "
             + "for them.");
   }
   return new CharacterRunAutomaton(automaton);
 }
  @Override
  protected Query doToQuery(QueryShardContext context) throws IOException {
    // field names in builder can have wildcards etc, need to resolve them here
    Map<String, Float> resolvedFieldsAndWeights = new TreeMap<>();
    // Use the default field if no fields specified
    if (fieldsAndWeights.isEmpty()) {
      resolvedFieldsAndWeights.put(
          resolveIndexName(context.defaultField(), context), AbstractQueryBuilder.DEFAULT_BOOST);
    } else {
      for (Map.Entry<String, Float> fieldEntry : fieldsAndWeights.entrySet()) {
        if (Regex.isSimpleMatchPattern(fieldEntry.getKey())) {
          for (String fieldName :
              context.mapperService().simpleMatchToIndexNames(fieldEntry.getKey())) {
            resolvedFieldsAndWeights.put(fieldName, fieldEntry.getValue());
          }
        } else {
          resolvedFieldsAndWeights.put(
              resolveIndexName(fieldEntry.getKey(), context), fieldEntry.getValue());
        }
      }
    }

    // Use standard analyzer by default if none specified
    Analyzer luceneAnalyzer;
    if (analyzer == null) {
      luceneAnalyzer = context.mapperService().searchAnalyzer();
    } else {
      luceneAnalyzer = context.analysisService().analyzer(analyzer);
      if (luceneAnalyzer == null) {
        throw new QueryShardException(
            context,
            "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer + "] not found");
      }
    }

    SimpleQueryParser sqp =
        new SimpleQueryParser(luceneAnalyzer, resolvedFieldsAndWeights, flags, settings);
    sqp.setDefaultOperator(defaultOperator.toBooleanClauseOccur());

    Query query = sqp.parse(queryText);
    if (minimumShouldMatch != null && query instanceof BooleanQuery) {
      query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
    }
    return query;
  }
示例#17
0
 public boolean match(Task task) {
   if (getActions() != null
       && getActions().length > 0
       && Regex.simpleMatch(getActions(), task.getAction()) == false) {
     return false;
   }
   if (getTaskId().isSet()) {
     if (getTaskId().getId() != task.getId()) {
       return false;
     }
   }
   if (parentTaskId.isSet()) {
     if (parentTaskId.equals(task.getParentTaskId()) == false) {
       return false;
     }
   }
   return true;
 }
示例#18
0
 /**
  * Returns all the fields that match the given pattern. If the pattern is prefixed with a type
  * then the fields will be returned with a type prefix.
  */
 public Set<String> simpleMatchToIndexNames(String pattern) {
   if (!Regex.isSimpleMatchPattern(pattern)) {
     return ImmutableSet.of(pattern);
   }
   int dotIndex = pattern.indexOf('.');
   if (dotIndex != -1) {
     String possibleType = pattern.substring(0, dotIndex);
     DocumentMapper possibleDocMapper = mappers.get(possibleType);
     if (possibleDocMapper != null) {
       Set<String> typedFields = Sets.newHashSet();
       for (String indexName : possibleDocMapper.mappers().simpleMatchToIndexNames(pattern)) {
         typedFields.add(possibleType + "." + indexName);
       }
       return typedFields;
     }
   }
   return fieldMappers.simpleMatchToIndexNames(pattern);
 }
 /**
  * Returns the stats, including type specific stats. If the types are null/0 length, then nothing
  * is returned for them. If they are set, then only types provided will be returned, or
  * <tt>_all</tt> for all types.
  */
 IndexingStats stats(boolean isThrottled, long currentThrottleInMillis, String... types) {
   IndexingStats.Stats total = totalStats.stats(isThrottled, currentThrottleInMillis);
   Map<String, IndexingStats.Stats> typesSt = null;
   if (types != null && types.length > 0) {
     typesSt = new HashMap<>(typesStats.size());
     if (types.length == 1 && types[0].equals("_all")) {
       for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) {
         typesSt.put(entry.getKey(), entry.getValue().stats(isThrottled, currentThrottleInMillis));
       }
     } else {
       for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) {
         if (Regex.simpleMatch(types, entry.getKey())) {
           typesSt.put(
               entry.getKey(), entry.getValue().stats(isThrottled, currentThrottleInMillis));
         }
       }
     }
   }
   return new IndexingStats(total, typesSt);
 }
示例#20
0
 /**
  * Returns all the fields that match the given pattern, with an optional narrowing based on a list
  * of types.
  */
 public Set<String> simpleMatchToIndexNames(String pattern, @Nullable String[] types) {
   if (types == null || types.length == 0) {
     return simpleMatchToIndexNames(pattern);
   }
   if (types.length == 1 && types[0].equals("_all")) {
     return simpleMatchToIndexNames(pattern);
   }
   if (!Regex.isSimpleMatchPattern(pattern)) {
     return ImmutableSet.of(pattern);
   }
   Set<String> fields = Sets.newHashSet();
   for (String type : types) {
     DocumentMapper possibleDocMapper = mappers.get(type);
     if (possibleDocMapper != null) {
       for (String indexName : possibleDocMapper.mappers().simpleMatchToIndexNames(pattern)) {
         fields.add(indexName);
       }
     }
   }
   return fields;
 }
示例#21
0
  /**
   * Finds the specific index aliases that match with the specified aliases directly or partially
   * via wildcards and that point to the specified concrete indices or match partially with the
   * indices via wildcards.
   *
   * @param aliases The names of the index aliases to find
   * @param concreteIndices The concrete indexes the index aliases must point to order to be
   *     returned.
   * @return the found index aliases grouped by index
   */
  public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(
      final String[] aliases, String[] concreteIndices) {
    assert aliases != null;
    assert concreteIndices != null;
    if (concreteIndices.length == 0) {
      return ImmutableOpenMap.of();
    }

    boolean matchAllAliases = matchAllAliases(aliases);
    ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
    Iterable<String> intersection =
        HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
    for (String index : intersection) {
      IndexMetaData indexMetaData = indices.get(index);
      List<AliasMetaData> filteredValues = new ArrayList<>();
      for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
        AliasMetaData value = cursor.value;
        if (matchAllAliases || Regex.simpleMatch(aliases, value.alias())) {
          filteredValues.add(value);
        }
      }

      if (!filteredValues.isEmpty()) {
        // Make the list order deterministic
        CollectionUtil.timSort(
            filteredValues,
            new Comparator<AliasMetaData>() {
              @Override
              public int compare(AliasMetaData o1, AliasMetaData o2) {
                return o1.alias().compareTo(o2.alias());
              }
            });
        mapBuilder.put(index, Collections.unmodifiableList(filteredValues));
      }
    }
    return mapBuilder.build();
  }
示例#22
0
  /**
   * Checks if at least one of the specified aliases exists in the specified concrete indices.
   * Wildcards are supported in the alias names for partial matches.
   *
   * @param aliases The names of the index aliases to find
   * @param concreteIndices The concrete indexes the index aliases must point to order to be
   *     returned.
   * @return whether at least one of the specified aliases exists in one of the specified concrete
   *     indices.
   */
  public boolean hasAliases(final String[] aliases, String[] concreteIndices) {
    assert aliases != null;
    assert concreteIndices != null;
    if (concreteIndices.length == 0) {
      return false;
    }

    Iterable<String> intersection =
        HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
    for (String index : intersection) {
      IndexMetaData indexMetaData = indices.get(index);
      List<AliasMetaData> filteredValues = new ArrayList<>();
      for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
        AliasMetaData value = cursor.value;
        if (Regex.simpleMatch(aliases, value.alias())) {
          filteredValues.add(value);
        }
      }
      if (!filteredValues.isEmpty()) {
        return true;
      }
    }
    return false;
  }
  private Table buildTable(
      RestRequest req,
      ClusterStateResponse state,
      NodesInfoResponse nodesInfo,
      NodesStatsResponse nodesStats) {
    final String[] threadPools = req.paramAsStringArray("thread_pool_patterns", new String[] {"*"});
    final DiscoveryNodes nodes = state.getState().nodes();
    final Table table = getTableWithHeader(req);

    // collect all thread pool names that we see across the nodes
    final Set<String> candidates = new HashSet<>();
    for (final NodeStats nodeStats : nodesStats.getNodes()) {
      for (final ThreadPoolStats.Stats threadPoolStats : nodeStats.getThreadPool()) {
        candidates.add(threadPoolStats.getName());
      }
    }

    // collect all thread pool names that match the specified thread pool patterns
    final Set<String> included = new HashSet<>();
    for (final String candidate : candidates) {
      if (Regex.simpleMatch(threadPools, candidate)) {
        included.add(candidate);
      }
    }

    for (final DiscoveryNode node : nodes) {
      final NodeInfo info = nodesInfo.getNodesMap().get(node.getId());
      final NodeStats stats = nodesStats.getNodesMap().get(node.getId());

      final Map<String, ThreadPoolStats.Stats> poolThreadStats;
      final Map<String, ThreadPool.Info> poolThreadInfo;

      if (stats == null) {
        poolThreadStats = Collections.emptyMap();
        poolThreadInfo = Collections.emptyMap();
      } else {
        // we use a sorted map to ensure that thread pools are sorted by name
        poolThreadStats = new TreeMap<>();
        poolThreadInfo = new HashMap<>();

        ThreadPoolStats threadPoolStats = stats.getThreadPool();
        for (ThreadPoolStats.Stats threadPoolStat : threadPoolStats) {
          poolThreadStats.put(threadPoolStat.getName(), threadPoolStat);
        }
        if (info != null) {
          for (ThreadPool.Info threadPoolInfo : info.getThreadPool()) {
            poolThreadInfo.put(threadPoolInfo.getName(), threadPoolInfo);
          }
        }
      }
      for (Map.Entry<String, ThreadPoolStats.Stats> entry : poolThreadStats.entrySet()) {

        if (!included.contains(entry.getKey())) continue;

        table.startRow();

        table.addCell(node.getName());
        table.addCell(node.getId());
        table.addCell(node.getEphemeralId());
        table.addCell(info == null ? null : info.getProcess().getId());
        table.addCell(node.getHostName());
        table.addCell(node.getHostAddress());
        table.addCell(node.getAddress().address().getPort());
        final ThreadPoolStats.Stats poolStats = entry.getValue();
        final ThreadPool.Info poolInfo = poolThreadInfo.get(entry.getKey());

        Long maxQueueSize = null;
        String keepAlive = null;
        Integer minThreads = null;
        Integer maxThreads = null;

        if (poolInfo != null) {
          if (poolInfo.getQueueSize() != null) {
            maxQueueSize = poolInfo.getQueueSize().singles();
          }
          if (poolInfo.getKeepAlive() != null) {
            keepAlive = poolInfo.getKeepAlive().toString();
          }
          if (poolInfo.getMin() >= 0) {
            minThreads = poolInfo.getMin();
          }
          if (poolInfo.getMax() >= 0) {
            maxThreads = poolInfo.getMax();
          }
        }

        table.addCell(entry.getKey());
        table.addCell(poolInfo == null ? null : poolInfo.getThreadPoolType().getType());
        table.addCell(poolStats == null ? null : poolStats.getActive());
        table.addCell(poolStats == null ? null : poolStats.getThreads());
        table.addCell(poolStats == null ? null : poolStats.getQueue());
        table.addCell(maxQueueSize);
        table.addCell(poolStats == null ? null : poolStats.getRejected());
        table.addCell(poolStats == null ? null : poolStats.getLargest());
        table.addCell(poolStats == null ? null : poolStats.getCompleted());
        table.addCell(minThreads);
        table.addCell(maxThreads);
        table.addCell(keepAlive);

        table.endRow();
      }
    }

    return table;
  }
  @Override
  public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    if (fieldName == null && fieldsNames == null && script == null) {
      throw new SearchSourceBuilderException(
          "field/fields/script must be set on terms facet for facet [" + name + "]");
    }
    builder.startObject(name);

    builder.startObject(TermsFacet.TYPE);
    if (fieldsNames != null) {
      if (fieldsNames.length == 1) {
        builder.field("field", fieldsNames[0]);
      } else {
        builder.field("fields", fieldsNames);
      }
    } else if (fieldName != null) {
      builder.field("field", fieldName);
    }
    builder.field("size", size);

    // no point in sending shard size if it's not greater than size
    if (shardSize > size) {
      builder.field("shard_size", shardSize);
    }

    if (exclude != null) {
      builder.startArray("exclude");
      for (Object ex : exclude) {
        builder.value(ex);
      }
      builder.endArray();
    }
    if (regex != null) {
      builder.field("regex", regex);
      if (regexFlags != 0) {
        builder.field("regex_flags", Regex.flagsToString(regexFlags));
      }
    }
    if (comparatorType != null) {
      builder.field("order", comparatorType.name().toLowerCase(Locale.ROOT));
    }
    if (allTerms != null) {
      builder.field("all_terms", allTerms);
    }

    if (script != null) {
      builder.field("script", script);
      if (lang != null) {
        builder.field("lang", lang);
      }
      if (this.params != null) {
        builder.field("params", this.params);
      }
    }

    if (executionHint != null) {
      builder.field("execution_hint", executionHint);
    }

    builder.endObject();

    addFilterFacetAndGlobal(builder, params);

    builder.endObject();
    return builder;
  }
  @Override
  public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context)
      throws IOException {
    String field = null;
    int size = 10;
    int shardSize = -1;

    String[] fieldsNames = null;
    ImmutableSet<BytesRef> excluded = ImmutableSet.of();
    String regex = null;
    String regexFlags = null;
    TermsFacet.ComparatorType comparatorType = TermsFacet.ComparatorType.COUNT;
    String scriptLang = null;
    String script = null;
    ScriptService.ScriptType scriptType = null;
    Map<String, Object> params = null;
    boolean allTerms = false;
    String executionHint = null;

    String currentFieldName = null;
    XContentParser.Token token;
    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
      if (token == XContentParser.Token.FIELD_NAME) {
        currentFieldName = parser.currentName();
      } else if (token == XContentParser.Token.START_OBJECT) {
        if ("params".equals(currentFieldName)) {
          params = parser.map();
        } else {
          throw new ElasticsearchParseException(
              "unknown parameter ["
                  + currentFieldName
                  + "] while parsing terms facet ["
                  + facetName
                  + "]");
        }
      } else if (token == XContentParser.Token.START_ARRAY) {
        if ("exclude".equals(currentFieldName)) {
          ImmutableSet.Builder<BytesRef> builder = ImmutableSet.builder();
          while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
            builder.add(parser.bytes());
          }
          excluded = builder.build();
        } else if ("fields".equals(currentFieldName)) {
          List<String> fields = Lists.newArrayListWithCapacity(4);
          while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
            fields.add(parser.text());
          }
          fieldsNames = fields.toArray(new String[fields.size()]);
        } else {
          throw new ElasticsearchParseException(
              "unknown parameter ["
                  + currentFieldName
                  + "] while parsing terms facet ["
                  + facetName
                  + "]");
        }
      } else if (token.isValue()) {
        if ("field".equals(currentFieldName)) {
          field = parser.text();
        } else if (ScriptService.SCRIPT_INLINE.match(currentFieldName)) {
          script = parser.text();
          scriptType = ScriptService.ScriptType.INLINE;
        } else if (ScriptService.SCRIPT_ID.match(currentFieldName)) {
          script = parser.text();
          scriptType = ScriptService.ScriptType.INDEXED;
        } else if (ScriptService.SCRIPT_FILE.match(currentFieldName)) {
          script = parser.text();
          scriptType = ScriptService.ScriptType.FILE;
        } else if (ScriptService.SCRIPT_LANG.match(currentFieldName)) {
          scriptLang = parser.text();
        } else if ("size".equals(currentFieldName)) {
          size = parser.intValue();
        } else if ("shard_size".equals(currentFieldName) || "shardSize".equals(currentFieldName)) {
          shardSize = parser.intValue();
        } else if ("all_terms".equals(currentFieldName) || "allTerms".equals(currentFieldName)) {
          allTerms = parser.booleanValue();
        } else if ("regex".equals(currentFieldName)) {
          regex = parser.text();
        } else if ("regex_flags".equals(currentFieldName)
            || "regexFlags".equals(currentFieldName)) {
          regexFlags = parser.text();
        } else if ("order".equals(currentFieldName) || "comparator".equals(currentFieldName)) {
          comparatorType = TermsFacet.ComparatorType.fromString(parser.text());
        } else if ("execution_hint".equals(currentFieldName)
            || "executionHint".equals(currentFieldName)) {
          executionHint = parser.textOrNull();
        } else {
          throw new ElasticsearchParseException(
              "unknown parameter ["
                  + currentFieldName
                  + "] while parsing terms facet ["
                  + facetName
                  + "]");
        }
      }
    }

    if (fieldsNames != null && fieldsNames.length == 1) {
      field = fieldsNames[0];
      fieldsNames = null;
    }

    Pattern pattern = null;
    if (regex != null) {
      pattern = Regex.compile(regex, regexFlags);
    }

    SearchScript searchScript = null;
    if (script != null) {
      searchScript =
          context.scriptService().search(context.lookup(), scriptLang, script, scriptType, params);
    }

    // shard_size cannot be smaller than size as we need to at least fetch <size> entries from every
    // shards in order to return <size>
    if (shardSize < size) {
      shardSize = size;
    }

    if (fieldsNames != null) {

      // in case of multi files, we only collect the fields that are mapped and facet on them.
      ArrayList<FieldMapper> mappers = new ArrayList<>(fieldsNames.length);
      for (int i = 0; i < fieldsNames.length; i++) {
        FieldMapper mapper = context.smartNameFieldMapper(fieldsNames[i]);
        if (mapper != null) {
          mappers.add(mapper);
        }
      }
      if (mappers.isEmpty()) {
        // non of the fields is mapped
        return new UnmappedFieldExecutor(size, comparatorType);
      }
      return new FieldsTermsStringFacetExecutor(
          mappers.toArray(new FieldMapper[mappers.size()]),
          size,
          shardSize,
          comparatorType,
          allTerms,
          context,
          excluded,
          pattern,
          searchScript);
    }
    if (field == null && script != null) {
      return new ScriptTermsStringFieldFacetExecutor(
          size,
          shardSize,
          comparatorType,
          context,
          excluded,
          pattern,
          scriptLang,
          script,
          scriptType,
          params,
          context.cacheRecycler());
    }

    if (field == null) {
      throw new ElasticsearchParseException(
          "terms facet [" + facetName + "] must have a field, fields or script parameter");
    }

    FieldMapper fieldMapper = context.smartNameFieldMapper(field);
    if (fieldMapper == null) {
      return new UnmappedFieldExecutor(size, comparatorType);
    }

    IndexFieldData indexFieldData = context.fieldData().getForField(fieldMapper);
    if (indexFieldData instanceof IndexNumericFieldData) {
      IndexNumericFieldData indexNumericFieldData = (IndexNumericFieldData) indexFieldData;
      if (indexNumericFieldData.getNumericType().isFloatingPoint()) {
        return new TermsDoubleFacetExecutor(
            indexNumericFieldData,
            size,
            shardSize,
            comparatorType,
            allTerms,
            context,
            excluded,
            searchScript,
            context.cacheRecycler());
      } else {
        return new TermsLongFacetExecutor(
            indexNumericFieldData,
            size,
            shardSize,
            comparatorType,
            allTerms,
            context,
            excluded,
            searchScript,
            context.cacheRecycler());
      }
    } else {
      if (script != null || "map".equals(executionHint)) {
        return new TermsStringFacetExecutor(
            indexFieldData,
            size,
            shardSize,
            comparatorType,
            allTerms,
            context,
            excluded,
            pattern,
            searchScript);
      } else if (indexFieldData instanceof IndexOrdinalsFieldData) {
        return new TermsStringOrdinalsFacetExecutor(
            (IndexOrdinalsFieldData) indexFieldData,
            size,
            shardSize,
            comparatorType,
            allTerms,
            context,
            excluded,
            pattern,
            ordinalsCacheAbove);
      } else {
        return new TermsStringFacetExecutor(
            indexFieldData,
            size,
            shardSize,
            comparatorType,
            allTerms,
            context,
            excluded,
            pattern,
            searchScript);
      }
    }
  }
示例#26
0
  @Override
  public void execute(SearchContext context) {
    final FieldsVisitor fieldsVisitor;
    Set<String> fieldNames = null;
    List<String> fieldNamePatterns = null;
    StoredFieldsContext storedFieldsContext = context.storedFieldsContext();

    if (storedFieldsContext == null) {
      // no fields specified, default to return source if no explicit indication
      if (!context.hasScriptFields() && !context.hasFetchSourceContext()) {
        context.fetchSourceContext(new FetchSourceContext(true));
      }
      fieldsVisitor = new FieldsVisitor(context.sourceRequested());
    } else if (storedFieldsContext.fetchFields() == false) {
      // disable stored fields entirely
      fieldsVisitor = null;
    } else {
      for (String fieldName : context.storedFieldsContext().fieldNames()) {
        if (fieldName.equals(SourceFieldMapper.NAME)) {
          if (context.hasFetchSourceContext()) {
            context.fetchSourceContext().fetchSource(true);
          } else {
            context.fetchSourceContext(new FetchSourceContext(true));
          }
          continue;
        }
        if (Regex.isSimpleMatchPattern(fieldName)) {
          if (fieldNamePatterns == null) {
            fieldNamePatterns = new ArrayList<>();
          }
          fieldNamePatterns.add(fieldName);
        } else {
          MappedFieldType fieldType = context.smartNameFieldType(fieldName);
          if (fieldType == null) {
            // Only fail if we know it is a object field, missing paths / fields shouldn't fail.
            if (context.getObjectMapper(fieldName) != null) {
              throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field");
            }
          }
          if (fieldNames == null) {
            fieldNames = new HashSet<>();
          }
          fieldNames.add(fieldName);
        }
      }
      boolean loadSource = context.sourceRequested();
      if (fieldNames == null && fieldNamePatterns == null) {
        // empty list specified, default to disable _source if no explicit indication
        fieldsVisitor = new FieldsVisitor(loadSource);
      } else {
        fieldsVisitor =
            new CustomFieldsVisitor(
                fieldNames == null ? Collections.emptySet() : fieldNames,
                fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns,
                loadSource);
      }
    }

    InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()];
    FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
    for (int index = 0; index < context.docIdsToLoadSize(); index++) {
      int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index];
      int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves());
      LeafReaderContext subReaderContext =
          context.searcher().getIndexReader().leaves().get(readerIndex);
      int subDocId = docId - subReaderContext.docBase;

      final InternalSearchHit searchHit;
      try {
        int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId);
        if (rootDocId != -1) {
          searchHit =
              createNestedSearchHit(
                  context,
                  docId,
                  subDocId,
                  rootDocId,
                  fieldNames,
                  fieldNamePatterns,
                  subReaderContext);
        } else {
          searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, subReaderContext);
        }
      } catch (IOException e) {
        throw ExceptionsHelper.convertToElastic(e);
      }

      hits[index] = searchHit;
      hitContext.reset(searchHit, subReaderContext, subDocId, context.searcher());
      for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
        fetchSubPhase.hitExecute(context, hitContext);
      }
    }

    for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
      fetchSubPhase.hitsExecute(context, hits);
    }

    context
        .fetchResult()
        .hits(
            new InternalSearchHits(
                hits,
                context.queryResult().topDocs().totalHits,
                context.queryResult().topDocs().getMaxScore()));
  }
 public String[] resolveNodesIds(String... nodesIds) {
   if (isAllNodes(nodesIds)) {
     int index = 0;
     nodesIds = new String[nodes.size()];
     for (DiscoveryNode node : this) {
       nodesIds[index++] = node.id();
     }
     return nodesIds;
   } else {
     Set<String> resolvedNodesIds = new HashSet<String>(nodesIds.length);
     for (String nodeId : nodesIds) {
       if (nodeId.equals("_local")) {
         String localNodeId = localNodeId();
         if (localNodeId != null) {
           resolvedNodesIds.add(localNodeId);
         }
       } else if (nodeId.equals("_master")) {
         String masterNodeId = masterNodeId();
         if (masterNodeId != null) {
           resolvedNodesIds.add(masterNodeId);
         }
       } else if (nodeExists(nodeId)) {
         resolvedNodesIds.add(nodeId);
       } else {
         // not a node id, try and search by name
         for (DiscoveryNode node : this) {
           if (Regex.simpleMatch(nodeId, node.name())) {
             resolvedNodesIds.add(node.id());
           }
         }
         for (DiscoveryNode node : this) {
           if (node.address().match(nodeId)) {
             resolvedNodesIds.add(node.id());
           }
         }
         int index = nodeId.indexOf(':');
         if (index != -1) {
           String matchAttrName = nodeId.substring(0, index);
           String matchAttrValue = nodeId.substring(index + 1);
           if ("data".equals(matchAttrName)) {
             if (Booleans.parseBoolean(matchAttrValue, true)) {
               resolvedNodesIds.addAll(dataNodes.keySet());
             } else {
               resolvedNodesIds.removeAll(dataNodes.keySet());
             }
           } else if ("master".equals(matchAttrName)) {
             if (Booleans.parseBoolean(matchAttrValue, true)) {
               resolvedNodesIds.addAll(masterNodes.keySet());
             } else {
               resolvedNodesIds.removeAll(masterNodes.keySet());
             }
           } else {
             for (DiscoveryNode node : this) {
               for (Map.Entry<String, String> entry : node.attributes().entrySet()) {
                 String attrName = entry.getKey();
                 String attrValue = entry.getValue();
                 if (Regex.simpleMatch(matchAttrName, attrName)
                     && Regex.simpleMatch(matchAttrValue, attrValue)) {
                   resolvedNodesIds.add(node.id());
                 }
               }
             }
           }
         }
       }
     }
     return resolvedNodesIds.toArray(new String[resolvedNodesIds.size()]);
   }
 }
  private ImmutableMap<String, FieldMappingMetaData> findFieldMappingsByType(
      DocumentMapper documentMapper, GetFieldMappingsIndexRequest request)
      throws ElasticsearchException {
    MapBuilder<String, FieldMappingMetaData> fieldMappings = new MapBuilder<>();
    final List<FieldMapper> allFieldMappers = documentMapper.mappers().mappers();
    for (String field : request.fields()) {
      if (Regex.isMatchAllPattern(field)) {
        for (FieldMapper fieldMapper : allFieldMappers) {
          addFieldMapper(
              fieldMapper.names().fullName(),
              fieldMapper,
              fieldMappings,
              request.includeDefaults());
        }
      } else if (Regex.isSimpleMatchPattern(field)) {
        // go through the field mappers 3 times, to make sure we give preference to the resolve
        // order: full name, index name, name.
        // also make sure we only store each mapper once.
        boolean[] resolved = new boolean[allFieldMappers.size()];
        for (int i = 0; i < allFieldMappers.size(); i++) {
          FieldMapper fieldMapper = allFieldMappers.get(i);
          if (Regex.simpleMatch(field, fieldMapper.names().fullName())) {
            addFieldMapper(
                fieldMapper.names().fullName(),
                fieldMapper,
                fieldMappings,
                request.includeDefaults());
            resolved[i] = true;
          }
        }
        for (int i = 0; i < allFieldMappers.size(); i++) {
          if (resolved[i]) {
            continue;
          }
          FieldMapper fieldMapper = allFieldMappers.get(i);
          if (Regex.simpleMatch(field, fieldMapper.names().indexName())) {
            addFieldMapper(
                fieldMapper.names().indexName(),
                fieldMapper,
                fieldMappings,
                request.includeDefaults());
            resolved[i] = true;
          }
        }
        for (int i = 0; i < allFieldMappers.size(); i++) {
          if (resolved[i]) {
            continue;
          }
          FieldMapper fieldMapper = allFieldMappers.get(i);
          if (Regex.simpleMatch(field, fieldMapper.names().name())) {
            addFieldMapper(
                fieldMapper.names().name(), fieldMapper, fieldMappings, request.includeDefaults());
            resolved[i] = true;
          }
        }

      } else {
        // not a pattern
        FieldMapper fieldMapper = documentMapper.mappers().smartNameFieldMapper(field);
        if (fieldMapper != null) {
          addFieldMapper(field, fieldMapper, fieldMappings, request.includeDefaults());
        } else if (request.probablySingleFieldRequest()) {
          fieldMappings.put(field, FieldMappingMetaData.NULL);
        }
      }
    }
    return fieldMappings.immutableMap();
  }
示例#29
0
 /**
  * Filters out list of available indices based on the list of selected indices.
  *
  * @param availableIndices list of available indices
  * @param selectedIndices list of selected indices
  * @param indicesOptions ignore indices flag
  * @return filtered out indices
  */
 public static List<String> filterIndices(
     List<String> availableIndices, String[] selectedIndices, IndicesOptions indicesOptions) {
   if (selectedIndices == null || selectedIndices.length == 0) {
     return availableIndices;
   }
   Set<String> result = null;
   for (int i = 0; i < selectedIndices.length; i++) {
     String indexOrPattern = selectedIndices[i];
     boolean add = true;
     if (!indexOrPattern.isEmpty()) {
       if (availableIndices.contains(indexOrPattern)) {
         if (result != null) {
           result.add(indexOrPattern);
         }
         continue;
       }
       if (indexOrPattern.charAt(0) == '+') {
         add = true;
         indexOrPattern = indexOrPattern.substring(1);
         // if its the first, add empty set
         if (i == 0) {
           result = new HashSet<>();
         }
       } else if (indexOrPattern.charAt(0) == '-') {
         // if its the first, fill it with all the indices...
         if (i == 0) {
           result = new HashSet<>(availableIndices);
         }
         add = false;
         indexOrPattern = indexOrPattern.substring(1);
       }
     }
     if (indexOrPattern.isEmpty() || !Regex.isSimpleMatchPattern(indexOrPattern)) {
       if (!availableIndices.contains(indexOrPattern)) {
         if (!indicesOptions.ignoreUnavailable()) {
           throw new IndexMissingException(new Index(indexOrPattern));
         } else {
           if (result == null) {
             // add all the previous ones...
             result = new HashSet<>();
             result.addAll(availableIndices.subList(0, i));
           }
         }
       } else {
         if (result != null) {
           if (add) {
             result.add(indexOrPattern);
           } else {
             result.remove(indexOrPattern);
           }
         }
       }
       continue;
     }
     if (result == null) {
       // add all the previous ones...
       result = new HashSet<>();
       result.addAll(availableIndices.subList(0, i));
     }
     boolean found = false;
     for (String index : availableIndices) {
       if (Regex.simpleMatch(indexOrPattern, index)) {
         found = true;
         if (add) {
           result.add(index);
         } else {
           result.remove(index);
         }
       }
     }
     if (!found && !indicesOptions.allowNoIndices()) {
       throw new IndexMissingException(new Index(indexOrPattern));
     }
   }
   if (result == null) {
     return ImmutableList.copyOf(selectedIndices);
   }
   return ImmutableList.copyOf(result);
 }
 @Override
 public boolean apply(
     CreateIndexClusterStateUpdateRequest request, IndexTemplateMetaData template) {
   return Regex.simpleMatch(template.template(), request.index());
 }