@Override
 public InternalAggregation doReduce(
     List<InternalAggregation> aggregations, ReduceContext reduceContext) {
   List<Object> aggregationObjects = new ArrayList<>();
   for (InternalAggregation aggregation : aggregations) {
     InternalScriptedMetric mapReduceAggregation = (InternalScriptedMetric) aggregation;
     aggregationObjects.add(mapReduceAggregation.aggregation());
   }
   InternalScriptedMetric firstAggregation = ((InternalScriptedMetric) aggregations.get(0));
   Object aggregation;
   if (firstAggregation.reduceScript != null) {
     Map<String, Object> vars = new HashMap<>();
     vars.put("_aggs", aggregationObjects);
     if (firstAggregation.reduceScript.getParams() != null) {
       vars.putAll(firstAggregation.reduceScript.getParams());
     }
     CompiledScript compiledScript =
         reduceContext
             .scriptService()
             .compile(
                 firstAggregation.reduceScript,
                 ScriptContext.Standard.AGGS,
                 Collections.emptyMap());
     ExecutableScript script = reduceContext.scriptService().executable(compiledScript, vars);
     aggregation = script.run();
   } else {
     aggregation = aggregationObjects;
   }
   return new InternalScriptedMetric(
       firstAggregation.getName(),
       aggregation,
       firstAggregation.reduceScript,
       pipelineAggregators(),
       getMetaData());
 }
Ejemplo n.º 2
0
 public void initialize(InternalAggregation.ReduceContext context) {
   script =
       context
           .scriptService()
           .executable(scriptLang, scriptString, scriptType, ScriptContext.Standard.AGGS, params);
   script.setNextVar("_subset_freq", subsetDfHolder);
   script.setNextVar("_subset_size", subsetSizeHolder);
   script.setNextVar("_superset_freq", supersetDfHolder);
   script.setNextVar("_superset_size", supersetSizeHolder);
 }
  @Test
  public void testChangingVarsCrossExecution2() {
    Map<String, Object> vars = new HashMap<String, Object>();
    Object compiledScript = se.compile("value");

    ExecutableScript script = se.executable(compiledScript, vars);
    script.setNextVar("value", 1);
    Object o = script.run();
    assertThat(((Number) o).intValue(), equalTo(1));

    script.setNextVar("value", 2);
    o = script.run();
    assertThat(((Number) o).intValue(), equalTo(2));
  }
Ejemplo n.º 4
0
 private Map<String, Object> executeScript(Script script, Map<String, Object> ctx) {
   try {
     if (scriptService != null) {
       ExecutableScript executableScript =
           scriptService.executable(script, ScriptContext.Standard.UPDATE, Collections.emptyMap());
       executableScript.setNextVar("ctx", ctx);
       executableScript.run();
       // we need to unwrap the ctx...
       ctx = (Map<String, Object>) executableScript.unwrap(ctx);
     }
   } catch (Exception e) {
     throw new IllegalArgumentException("failed to execute script", e);
   }
   return ctx;
 }
Ejemplo n.º 5
0
 @Override
 @SuppressWarnings("unchecked")
 public Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap) {
   try {
     // We use the ctx variable and the _source name to be consistent with the update api.
     ExecutableScript executable =
         scriptService.executable(script, ScriptContext.Standard.MAPPING);
     Map<String, Object> ctx = new HashMap<>(1);
     ctx.put("_source", sourceAsMap);
     executable.setNextVar("ctx", ctx);
     executable.run();
     ctx = (Map<String, Object>) executable.unwrap(ctx);
     return (Map<String, Object>) ctx.get("_source");
   } catch (Exception e) {
     throw new IllegalArgumentException("failed to execute script", e);
   }
 }
Ejemplo n.º 6
0
 public ScriptHeuristic(
     ExecutableScript searchScript,
     String scriptLang,
     String scriptString,
     ScriptService.ScriptType scriptType,
     Map<String, Object> params) {
   subsetSizeHolder = new LongAccessor();
   supersetSizeHolder = new LongAccessor();
   subsetDfHolder = new LongAccessor();
   supersetDfHolder = new LongAccessor();
   this.script = searchScript;
   if (script != null) {
     script.setNextVar("_subset_freq", subsetDfHolder);
     script.setNextVar("_subset_size", subsetSizeHolder);
     script.setNextVar("_superset_freq", supersetDfHolder);
     script.setNextVar("_superset_size", supersetSizeHolder);
   }
   this.scriptLang = scriptLang;
   this.scriptString = scriptString;
   this.scriptType = scriptType;
   this.params = params;
 }
  @Test
  public void testChangingVarsCrossExecution2() {
    Map<String, Object> vars = new HashMap<String, Object>();
    Map<String, Object> ctx = new HashMap<String, Object>();
    Object compiledScript = se.compile("value");

    ExecutableScript script =
        se.executable(
            new CompiledScript(
                ScriptService.ScriptType.INLINE,
                "testChangingVarsCrossExecution2",
                "python",
                compiledScript),
            vars);
    script.setNextVar("value", 1);
    Object o = script.run();
    assertThat(((Number) o).intValue(), equalTo(1));

    script.setNextVar("value", 2);
    o = script.run();
    assertThat(((Number) o).intValue(), equalTo(2));
  }
  @Test
  public void testJavaScriptInnerArrayCreation() {
    Map<String, Object> ctx = new HashMap<String, Object>();
    Map<String, Object> doc = new HashMap<String, Object>();
    ctx.put("doc", doc);

    Object compiled = se.compile("ctx.doc.field1 = ['value1', 'value2']");
    ExecutableScript script =
        se.executable(
            new CompiledScript(
                ScriptService.ScriptType.INLINE,
                "testJavaScriptInnerArrayCreation",
                "js",
                compiled),
            new HashMap<String, Object>());
    script.setNextVar("ctx", ctx);
    script.run();

    Map<String, Object> unwrap = (Map<String, Object>) script.unwrap(ctx);

    assertThat(((Map) unwrap.get("doc")).get("field1"), instanceOf(List.class));
  }
  @Override
  @Nullable
  public Query parse(QueryParseContext parseContext) throws IOException {
    XContentParser parser = parseContext.parser();
    TemplateContext templateContext = parse(parser, QUERY, PARAMS);
    ExecutableScript executable =
        this.scriptService.executable(
            "mustache", templateContext.template(), templateContext.params());
    BytesReference querySource = (BytesReference) executable.run();

    XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
    try {
      final QueryParseContext context =
          new QueryParseContext(parseContext.index(), parseContext.indexQueryParser);
      context.reset(qSourceParser);
      Query result = context.parseInnerQuery();
      parser.nextToken();
      return result;
    } finally {
      qSourceParser.close();
    }
  }
  @Override
  public Aggregator createInternal(
      Aggregator parent,
      boolean collectsFromSingleBucket,
      List<PipelineAggregator> pipelineAggregators,
      Map<String, Object> metaData)
      throws IOException {
    if (collectsFromSingleBucket == false) {
      return asMultiBucketAggregator(this, context, parent);
    }
    Map<String, Object> params = this.params;
    if (params != null) {
      params = deepCopyParams(params, context.searchContext());
    } else {
      params = new HashMap<>();
      params.put("_agg", new HashMap<String, Object>());
    }

    final ExecutableScript initScript = this.initScript.apply(params);
    final SearchScript mapScript = this.mapScript.apply(params);
    final ExecutableScript combineScript = this.combineScript.apply(params);

    final Script reduceScript = deepCopyScript(this.reduceScript, context.searchContext());
    if (initScript != null) {
      initScript.run();
    }
    return new ScriptedMetricAggregator(
        name,
        mapScript,
        combineScript,
        reduceScript,
        params,
        context,
        parent,
        pipelineAggregators,
        metaData);
  }
Ejemplo n.º 11
0
 /**
  * Calculates score with a script
  *
  * @param subsetFreq The frequency of the term in the selected sample
  * @param subsetSize The size of the selected sample (typically number of docs)
  * @param supersetFreq The frequency of the term in the superset from which the sample was taken
  * @param supersetSize The size of the superset from which the sample was taken (typically number
  *     of docs)
  * @return a "significance" score
  */
 @Override
 public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) {
   if (script == null) {
     // In tests, wehn calling assertSearchResponse(..) the response is streamed one additional
     // time with an arbitrary version, see assertVersionSerializable(..).
     // Now, for version before 1.5.0 the score is computed after streaming the response but for
     // scripts the script does not exists yet.
     // assertSearchResponse() might therefore fail although there is no problem.
     // This should be replaced by an exception in 2.0.
     ESLoggerFactory.getLogger("script heuristic")
         .warn("cannot compute score - script has not been initialized yet.");
     return 0;
   }
   subsetSizeHolder.value = subsetSize;
   supersetSizeHolder.value = supersetSize;
   subsetDfHolder.value = subsetFreq;
   supersetDfHolder.value = supersetFreq;
   return ((Number) script.run()).doubleValue();
 }
  @SuppressWarnings("unchecked")
  public static synchronized MongoDBRiverDefinition parseSettings(
      String riverName,
      String riverIndexName,
      RiverSettings settings,
      ScriptService scriptService) {

    logger.trace("Parse river settings for {}", riverName);
    Preconditions.checkNotNull(riverName, "No riverName specified");
    Preconditions.checkNotNull(riverIndexName, "No riverIndexName specified");
    Preconditions.checkNotNull(settings, "No settings specified");

    Builder builder = new Builder();
    builder.riverName(riverName);
    builder.riverIndexName(riverIndexName);

    List<ServerAddress> mongoServers = new ArrayList<ServerAddress>();
    String mongoHost;
    int mongoPort;

    if (settings.settings().containsKey(MongoDBRiver.TYPE)) {
      Map<String, Object> mongoSettings =
          (Map<String, Object>) settings.settings().get(MongoDBRiver.TYPE);
      if (mongoSettings.containsKey(SERVERS_FIELD)) {
        Object mongoServersSettings = mongoSettings.get(SERVERS_FIELD);
        logger.trace("mongoServersSettings: " + mongoServersSettings);
        boolean array = XContentMapValues.isArray(mongoServersSettings);

        if (array) {
          ArrayList<Map<String, Object>> feeds =
              (ArrayList<Map<String, Object>>) mongoServersSettings;
          for (Map<String, Object> feed : feeds) {
            mongoHost = XContentMapValues.nodeStringValue(feed.get(HOST_FIELD), null);
            mongoPort = XContentMapValues.nodeIntegerValue(feed.get(PORT_FIELD), DEFAULT_DB_PORT);
            logger.trace("Server: " + mongoHost + " - " + mongoPort);
            try {
              mongoServers.add(new ServerAddress(mongoHost, mongoPort));
            } catch (UnknownHostException uhEx) {
              logger.warn("Cannot add mongo server {}:{}", uhEx, mongoHost, mongoPort);
            }
          }
        }
      } else {
        mongoHost =
            XContentMapValues.nodeStringValue(mongoSettings.get(HOST_FIELD), DEFAULT_DB_HOST);
        mongoPort =
            XContentMapValues.nodeIntegerValue(mongoSettings.get(PORT_FIELD), DEFAULT_DB_PORT);
        try {
          mongoServers.add(new ServerAddress(mongoHost, mongoPort));
        } catch (UnknownHostException uhEx) {
          logger.warn("Cannot add mongo server {}:{}", uhEx, mongoHost, mongoPort);
        }
      }
      builder.mongoServers(mongoServers);

      MongoClientOptions.Builder mongoClientOptionsBuilder =
          MongoClientOptions.builder().socketKeepAlive(true);

      // MongoDB options
      if (mongoSettings.containsKey(OPTIONS_FIELD)) {
        Map<String, Object> mongoOptionsSettings =
            (Map<String, Object>) mongoSettings.get(OPTIONS_FIELD);
        logger.trace("mongoOptionsSettings: " + mongoOptionsSettings);
        builder.mongoSecondaryReadPreference(
            XContentMapValues.nodeBooleanValue(
                mongoOptionsSettings.get(SECONDARY_READ_PREFERENCE_FIELD), false));
        builder.connectTimeout(
            XContentMapValues.nodeIntegerValue(
                mongoOptionsSettings.get(CONNECT_TIMEOUT), DEFAULT_CONNECT_TIMEOUT));
        builder.socketTimeout(
            XContentMapValues.nodeIntegerValue(
                mongoOptionsSettings.get(SOCKET_TIMEOUT), DEFAULT_SOCKET_TIMEOUT));
        builder.dropCollection(
            XContentMapValues.nodeBooleanValue(
                mongoOptionsSettings.get(DROP_COLLECTION_FIELD), false));
        String isMongos =
            XContentMapValues.nodeStringValue(mongoOptionsSettings.get(IS_MONGOS_FIELD), null);
        if (isMongos != null) {
          builder.isMongos(Boolean.valueOf(isMongos));
        }
        builder.mongoUseSSL(
            XContentMapValues.nodeBooleanValue(
                mongoOptionsSettings.get(SSL_CONNECTION_FIELD), false));
        builder.mongoSSLVerifyCertificate(
            XContentMapValues.nodeBooleanValue(
                mongoOptionsSettings.get(SSL_VERIFY_CERT_FIELD), true));
        builder.advancedTransformation(
            XContentMapValues.nodeBooleanValue(
                mongoOptionsSettings.get(ADVANCED_TRANSFORMATION_FIELD), false));
        builder.skipInitialImport(
            XContentMapValues.nodeBooleanValue(
                mongoOptionsSettings.get(SKIP_INITIAL_IMPORT_FIELD), false));
        builder.connectionsPerHost(
            XContentMapValues.nodeIntegerValue(
                mongoOptionsSettings.get(CONNECTIONS_PER_HOST), DEFAULT_CONNECTIONS_PER_HOST));
        builder.threadsAllowedToBlockForConnectionMultiplier(
            XContentMapValues.nodeIntegerValue(
                mongoOptionsSettings.get(THREADS_ALLOWED_TO_BLOCK_FOR_CONNECTION_MULTIPLIER),
                DEFAULT_THREADS_ALLOWED_TO_BLOCK_FOR_CONNECTION_MULTIPLIER));

        mongoClientOptionsBuilder
            .connectTimeout(builder.connectTimeout)
            .socketTimeout(builder.socketTimeout)
            .connectionsPerHost(builder.connectionsPerHost)
            .threadsAllowedToBlockForConnectionMultiplier(
                builder.threadsAllowedToBlockForConnectionMultiplier);

        if (builder.mongoSecondaryReadPreference) {
          mongoClientOptionsBuilder.readPreference(ReadPreference.secondaryPreferred());
        }

        if (builder.mongoUseSSL) {
          mongoClientOptionsBuilder.socketFactory(getSSLSocketFactory());
        }

        if (mongoOptionsSettings.containsKey(PARENT_TYPES_FIELD)) {
          Set<String> parentTypes = new HashSet<String>();
          Object parentTypesSettings = mongoOptionsSettings.get(PARENT_TYPES_FIELD);
          logger.trace("parentTypesSettings: " + parentTypesSettings);
          boolean array = XContentMapValues.isArray(parentTypesSettings);

          if (array) {
            ArrayList<String> fields = (ArrayList<String>) parentTypesSettings;
            for (String field : fields) {
              logger.trace("Field: " + field);
              parentTypes.add(field);
            }
          }

          builder.parentTypes(parentTypes);
        }

        if (mongoOptionsSettings.containsKey(STORE_STATISTICS_FIELD)) {
          Object storeStatistics = mongoOptionsSettings.get(STORE_STATISTICS_FIELD);
          boolean object = XContentMapValues.isObject(storeStatistics);
          if (object) {
            Map<String, Object> storeStatisticsSettings = (Map<String, Object>) storeStatistics;
            builder.storeStatistics(true);
            builder.statisticsIndexName(
                XContentMapValues.nodeStringValue(
                    storeStatisticsSettings.get(INDEX_OBJECT), riverName + "-stats"));
            builder.statisticsTypeName(
                XContentMapValues.nodeStringValue(
                    storeStatisticsSettings.get(TYPE_FIELD), "stats"));
          } else {
            builder.storeStatistics(XContentMapValues.nodeBooleanValue(storeStatistics, false));
            if (builder.storeStatistics) {
              builder.statisticsIndexName(riverName + "-stats");
              builder.statisticsTypeName("stats");
            }
          }
        }
        // builder.storeStatistics(XContentMapValues.nodeBooleanValue(mongoOptionsSettings.get(STORE_STATISTICS_FIELD),
        // false));
        builder.importAllCollections(
            XContentMapValues.nodeBooleanValue(
                mongoOptionsSettings.get(IMPORT_ALL_COLLECTIONS_FIELD), false));
        builder.disableIndexRefresh(
            XContentMapValues.nodeBooleanValue(
                mongoOptionsSettings.get(DISABLE_INDEX_REFRESH_FIELD), false));
        builder.includeCollection(
            XContentMapValues.nodeStringValue(
                mongoOptionsSettings.get(INCLUDE_COLLECTION_FIELD), ""));

        if (mongoOptionsSettings.containsKey(INCLUDE_FIELDS_FIELD)) {
          Set<String> includeFields = new HashSet<String>();
          Object includeFieldsSettings = mongoOptionsSettings.get(INCLUDE_FIELDS_FIELD);
          logger.trace("includeFieldsSettings: " + includeFieldsSettings);
          boolean array = XContentMapValues.isArray(includeFieldsSettings);

          if (array) {
            ArrayList<String> fields = (ArrayList<String>) includeFieldsSettings;
            for (String field : fields) {
              logger.trace("Field: " + field);
              includeFields.add(field);
            }
          }

          if (!includeFields.contains(MongoDBRiver.MONGODB_ID_FIELD)) {
            includeFields.add(MongoDBRiver.MONGODB_ID_FIELD);
          }
          builder.includeFields(includeFields);
        } else if (mongoOptionsSettings.containsKey(EXCLUDE_FIELDS_FIELD)) {
          Set<String> excludeFields = new HashSet<String>();
          Object excludeFieldsSettings = mongoOptionsSettings.get(EXCLUDE_FIELDS_FIELD);
          logger.trace("excludeFieldsSettings: " + excludeFieldsSettings);
          boolean array = XContentMapValues.isArray(excludeFieldsSettings);

          if (array) {
            ArrayList<String> fields = (ArrayList<String>) excludeFieldsSettings;
            for (String field : fields) {
              logger.trace("Field: " + field);
              excludeFields.add(field);
            }
          }

          builder.excludeFields(excludeFields);
        }

        if (mongoOptionsSettings.containsKey(INITIAL_TIMESTAMP_FIELD)) {
          BSONTimestamp timeStamp = null;
          try {
            Map<String, Object> initalTimestampSettings =
                (Map<String, Object>) mongoOptionsSettings.get(INITIAL_TIMESTAMP_FIELD);
            String scriptType = "js";
            if (initalTimestampSettings.containsKey(INITIAL_TIMESTAMP_SCRIPT_TYPE_FIELD)) {
              scriptType =
                  initalTimestampSettings.get(INITIAL_TIMESTAMP_SCRIPT_TYPE_FIELD).toString();
            }
            if (initalTimestampSettings.containsKey(INITIAL_TIMESTAMP_SCRIPT_FIELD)) {

              ExecutableScript scriptExecutable =
                  scriptService.executable(
                      scriptType,
                      initalTimestampSettings.get(INITIAL_TIMESTAMP_SCRIPT_FIELD).toString(),
                      ScriptService.ScriptType.INLINE,
                      Maps.newHashMap());
              Object ctx = scriptExecutable.run();
              logger.trace("initialTimestamp script returned: {}", ctx);
              if (ctx != null) {
                long timestamp = Long.parseLong(ctx.toString());
                timeStamp = new BSONTimestamp((int) (new Date(timestamp).getTime() / 1000), 1);
              }
            }
          } catch (Throwable t) {
            logger.error("Could not set initial timestamp", t);
          } finally {
            builder.initialTimestamp(timeStamp);
          }
        }
      }
      builder.mongoClientOptions(mongoClientOptionsBuilder.build());

      // Credentials
      if (mongoSettings.containsKey(CREDENTIALS_FIELD)) {
        String dbCredential;
        String mau = "";
        String map = "";
        String maad = "";
        String mlu = "";
        String mlp = "";
        String mlad = "";
        // String mdu = "";
        // String mdp = "";
        Object mongoCredentialsSettings = mongoSettings.get(CREDENTIALS_FIELD);
        boolean array = XContentMapValues.isArray(mongoCredentialsSettings);

        if (array) {
          ArrayList<Map<String, Object>> credentials =
              (ArrayList<Map<String, Object>>) mongoCredentialsSettings;
          for (Map<String, Object> credential : credentials) {
            dbCredential = XContentMapValues.nodeStringValue(credential.get(DB_FIELD), null);
            if (ADMIN_DB_FIELD.equals(dbCredential)) {
              mau = XContentMapValues.nodeStringValue(credential.get(USER_FIELD), null);
              map = XContentMapValues.nodeStringValue(credential.get(PASSWORD_FIELD), null);
              maad = XContentMapValues.nodeStringValue(credential.get(AUTH_FIELD), null);
            } else if (LOCAL_DB_FIELD.equals(dbCredential)) {
              mlu = XContentMapValues.nodeStringValue(credential.get(USER_FIELD), null);
              mlp = XContentMapValues.nodeStringValue(credential.get(PASSWORD_FIELD), null);
              mlad = XContentMapValues.nodeStringValue(credential.get(AUTH_FIELD), null);
              // } else {
              // mdu = XContentMapValues.nodeStringValue(
              // credential.get(USER_FIELD), null);
              // mdp = XContentMapValues.nodeStringValue(
              // credential.get(PASSWORD_FIELD), null);
            }
          }
        }
        builder.mongoAdminUser(mau);
        builder.mongoAdminPassword(map);
        builder.mongoAdminAuthDatabase(maad);
        builder.mongoLocalUser(mlu);
        builder.mongoLocalPassword(mlp);
        builder.mongoLocalAuthDatabase(mlad);
        // mongoDbUser = mdu;
        // mongoDbPassword = mdp;
      }

      builder.mongoDb(XContentMapValues.nodeStringValue(mongoSettings.get(DB_FIELD), riverName));
      builder.mongoCollection(
          XContentMapValues.nodeStringValue(mongoSettings.get(COLLECTION_FIELD), riverName));
      builder.mongoGridFS(
          XContentMapValues.nodeBooleanValue(mongoSettings.get(GRIDFS_FIELD), false));
      if (mongoSettings.containsKey(FILTER_FIELD)) {
        String filter = XContentMapValues.nodeStringValue(mongoSettings.get(FILTER_FIELD), "");
        filter = removePrefix("o.", filter);
        builder.mongoCollectionFilter(convertToBasicDBObject(filter));
        // DBObject bsonObject = (DBObject) JSON.parse(filter);
        // builder.mongoOplogFilter(convertToBasicDBObject(addPrefix("o.",
        // filter)));
        builder.mongoOplogFilter(convertToBasicDBObject(removePrefix("o.", filter)));
        // } else {
        // builder.mongoOplogFilter("");
      }

      if (mongoSettings.containsKey(SCRIPT_FIELD)) {
        String scriptType = "js";
        builder.script(mongoSettings.get(SCRIPT_FIELD).toString());
        if (mongoSettings.containsKey("scriptType")) {
          scriptType = mongoSettings.get("scriptType").toString();
        } else if (mongoSettings.containsKey(SCRIPT_TYPE_FIELD)) {
          scriptType = mongoSettings.get(SCRIPT_TYPE_FIELD).toString();
        }
        builder.scriptType(scriptType);
      }
    } else {
      mongoHost = DEFAULT_DB_HOST;
      mongoPort = DEFAULT_DB_PORT;
      try {
        mongoServers.add(new ServerAddress(mongoHost, mongoPort));
        builder.mongoServers(mongoServers);
      } catch (UnknownHostException e) {
        e.printStackTrace();
      }
      builder.mongoDb(riverName);
      builder.mongoCollection(riverName);
    }

    if (settings.settings().containsKey(INDEX_OBJECT)) {
      Map<String, Object> indexSettings =
          (Map<String, Object>) settings.settings().get(INDEX_OBJECT);
      builder.indexName(
          XContentMapValues.nodeStringValue(indexSettings.get(NAME_FIELD), builder.mongoDb));
      builder.typeName(
          XContentMapValues.nodeStringValue(indexSettings.get(TYPE_FIELD), builder.mongoDb));

      Bulk.Builder bulkBuilder = new Bulk.Builder();
      if (indexSettings.containsKey(BULK_FIELD)) {
        Map<String, Object> bulkSettings = (Map<String, Object>) indexSettings.get(BULK_FIELD);
        int bulkActions =
            XContentMapValues.nodeIntegerValue(
                bulkSettings.get(ACTIONS_FIELD), DEFAULT_BULK_ACTIONS);
        bulkBuilder.bulkActions(bulkActions);
        String size =
            XContentMapValues.nodeStringValue(
                bulkSettings.get(SIZE_FIELD), DEFAULT_BULK_SIZE.toString());
        bulkBuilder.bulkSize(ByteSizeValue.parseBytesSizeValue(size));
        bulkBuilder.concurrentRequests(
            XContentMapValues.nodeIntegerValue(
                bulkSettings.get(CONCURRENT_REQUESTS_FIELD),
                EsExecutors.boundedNumberOfProcessors(ImmutableSettings.EMPTY)));
        bulkBuilder.flushInterval(
            XContentMapValues.nodeTimeValue(
                bulkSettings.get(FLUSH_INTERVAL_FIELD), DEFAULT_FLUSH_INTERVAL));
        builder.throttleSize(
            XContentMapValues.nodeIntegerValue(
                indexSettings.get(THROTTLE_SIZE_FIELD), bulkActions * 5));
      } else {
        int bulkActions =
            XContentMapValues.nodeIntegerValue(
                indexSettings.get(BULK_SIZE_FIELD), DEFAULT_BULK_ACTIONS);
        bulkBuilder.bulkActions(bulkActions);
        bulkBuilder.bulkSize(DEFAULT_BULK_SIZE);
        bulkBuilder.flushInterval(
            XContentMapValues.nodeTimeValue(
                indexSettings.get(BULK_TIMEOUT_FIELD), DEFAULT_FLUSH_INTERVAL));
        bulkBuilder.concurrentRequests(
            XContentMapValues.nodeIntegerValue(
                indexSettings.get(CONCURRENT_BULK_REQUESTS_FIELD),
                EsExecutors.boundedNumberOfProcessors(ImmutableSettings.EMPTY)));
        builder.throttleSize(
            XContentMapValues.nodeIntegerValue(
                indexSettings.get(THROTTLE_SIZE_FIELD), bulkActions * 5));
      }
      builder.bulk(bulkBuilder.build());
    } else {
      builder.indexName(builder.mongoDb);
      builder.typeName(builder.mongoDb);
      builder.bulk(new Bulk.Builder().build());
    }
    return builder.build();
  }
Ejemplo n.º 13
0
  /*
   * More Ideas:
   *   - add ability to find whitespace problems -> we can build a poor mans decompounder with our index based on a automaton?
   *   - add ability to build different error models maybe based on a confusion matrix?
   *   - try to combine a token with its subsequent token to find / detect word splits (optional)
   *      - for this to work we need some way to defined the position length of a candidate
   *   - phonetic filters could be interesting here too for candidate selection
   */
  @Override
  public Suggestion<? extends Entry<? extends Option>> innerExecute(
      String name,
      PhraseSuggestionContext suggestion,
      IndexSearcher searcher,
      CharsRefBuilder spare)
      throws IOException {
    double realWordErrorLikelihood = suggestion.realworldErrorLikelyhood();
    final PhraseSuggestion response = new PhraseSuggestion(name, suggestion.getSize());
    final IndexReader indexReader = searcher.getIndexReader();
    List<PhraseSuggestionContext.DirectCandidateGenerator> generators = suggestion.generators();
    final int numGenerators = generators.size();
    final List<CandidateGenerator> gens = new ArrayList<>(generators.size());
    for (int i = 0; i < numGenerators; i++) {
      PhraseSuggestionContext.DirectCandidateGenerator generator = generators.get(i);
      DirectSpellChecker directSpellChecker = SuggestUtils.getDirectSpellChecker(generator);
      Terms terms = MultiFields.getTerms(indexReader, generator.field());
      if (terms != null) {
        gens.add(
            new DirectCandidateGenerator(
                directSpellChecker,
                generator.field(),
                generator.suggestMode(),
                indexReader,
                realWordErrorLikelihood,
                generator.size(),
                generator.preFilter(),
                generator.postFilter(),
                terms));
      }
    }
    final String suggestField = suggestion.getField();
    final Terms suggestTerms = MultiFields.getTerms(indexReader, suggestField);
    if (gens.size() > 0 && suggestTerms != null) {
      final NoisyChannelSpellChecker checker =
          new NoisyChannelSpellChecker(
              realWordErrorLikelihood, suggestion.getRequireUnigram(), suggestion.getTokenLimit());
      final BytesRef separator = suggestion.separator();
      TokenStream stream =
          checker.tokenStream(
              suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField());

      WordScorer wordScorer =
          suggestion
              .model()
              .newScorer(
                  indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator);
      Result checkerResult =
          checker.getCorrections(
              stream,
              new MultiCandidateGeneratorWrapper(
                  suggestion.getShardSize(), gens.toArray(new CandidateGenerator[gens.size()])),
              suggestion.maxErrors(),
              suggestion.getShardSize(),
              wordScorer,
              suggestion.confidence(),
              suggestion.gramSize());

      PhraseSuggestion.Entry resultEntry =
          buildResultEntry(suggestion, spare, checkerResult.cutoffScore);
      response.addTerm(resultEntry);

      final BytesRefBuilder byteSpare = new BytesRefBuilder();
      final EarlyTerminatingCollector collector = Lucene.createExistsCollector();
      final CompiledScript collateScript;
      if (suggestion.getCollateQueryScript() != null) {
        collateScript = suggestion.getCollateQueryScript();
      } else if (suggestion.getCollateFilterScript() != null) {
        collateScript = suggestion.getCollateFilterScript();
      } else {
        collateScript = null;
      }
      final boolean collatePrune = (collateScript != null) && suggestion.collatePrune();
      for (int i = 0; i < checkerResult.corrections.length; i++) {
        Correction correction = checkerResult.corrections[i];
        spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, null, null));
        boolean collateMatch = true;
        if (collateScript != null) {
          // Checks if the template query collateScript yields any documents
          // from the index for a correction, collateMatch is updated
          final Map<String, Object> vars = suggestion.getCollateScriptParams();
          vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString());
          final ExecutableScript executable = scriptService.executable(collateScript, vars);
          final BytesReference querySource = (BytesReference) executable.run();
          final ParsedQuery parsedQuery;
          if (suggestion.getCollateFilterScript() != null) {
            parsedQuery =
                suggestion
                    .getQueryParserService()
                    .parse(
                        QueryBuilders.constantScoreQuery(QueryBuilders.wrapperQuery(querySource)));
          } else {
            parsedQuery = suggestion.getQueryParserService().parse(querySource);
          }
          collateMatch = Lucene.exists(searcher, parsedQuery.query(), collector);
        }
        if (!collateMatch && !collatePrune) {
          continue;
        }
        Text phrase = new StringText(spare.toString());
        Text highlighted = null;
        if (suggestion.getPreTag() != null) {
          spare.copyUTF8Bytes(
              correction.join(
                  SEPARATOR, byteSpare, suggestion.getPreTag(), suggestion.getPostTag()));
          highlighted = new StringText(spare.toString());
        }
        if (collatePrune) {
          resultEntry.addOption(
              new Suggestion.Entry.Option(
                  phrase, highlighted, (float) (correction.score), collateMatch));
        } else {
          resultEntry.addOption(
              new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
        }
      }
    } else {
      response.addTerm(buildResultEntry(suggestion, spare, Double.MIN_VALUE));
    }
    return response;
  }
Ejemplo n.º 14
0
  private void parseTemplate(ShardSearchRequest request) {

    BytesReference processedQuery;
    if (request.template() != null) {
      ExecutableScript executable =
          this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH);
      processedQuery = (BytesReference) executable.run();
    } else {
      if (!hasLength(request.templateSource())) {
        return;
      }
      XContentParser parser = null;
      Template template = null;

      try {
        parser =
            XContentFactory.xContent(request.templateSource())
                .createParser(request.templateSource());
        template = TemplateQueryParser.parse(parser, "params", "template");

        if (template.getType() == ScriptService.ScriptType.INLINE) {
          // Try to double parse for nested template id/file
          parser = null;
          try {
            ExecutableScript executable =
                this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
            processedQuery = (BytesReference) executable.run();
            parser = XContentFactory.xContent(processedQuery).createParser(processedQuery);
          } catch (ElasticsearchParseException epe) {
            // This was an non-nested template, the parse failure was due to this, it is safe to
            // assume this refers to a file
            // for backwards compatibility and keep going
            template =
                new Template(
                    template.getScript(),
                    ScriptService.ScriptType.FILE,
                    MustacheScriptEngineService.NAME,
                    null,
                    template.getParams());
            ExecutableScript executable =
                this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
            processedQuery = (BytesReference) executable.run();
          }
          if (parser != null) {
            try {
              Template innerTemplate = TemplateQueryParser.parse(parser);
              if (hasLength(innerTemplate.getScript())
                  && !innerTemplate.getType().equals(ScriptService.ScriptType.INLINE)) {
                // An inner template referring to a filename or id
                template =
                    new Template(
                        innerTemplate.getScript(),
                        innerTemplate.getType(),
                        MustacheScriptEngineService.NAME,
                        null,
                        template.getParams());
                ExecutableScript executable =
                    this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
                processedQuery = (BytesReference) executable.run();
              }
            } catch (ScriptParseException e) {
              // No inner template found, use original template from above
            }
          }
        } else {
          ExecutableScript executable =
              this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
          processedQuery = (BytesReference) executable.run();
        }
      } catch (IOException e) {
        throw new ElasticsearchParseException("Failed to parse template", e);
      } finally {
        Releasables.closeWhileHandlingException(parser);
      }

      if (!hasLength(template.getScript())) {
        throw new ElasticsearchParseException("Template must have [template] field configured");
      }
    }
    request.source(processedQuery);
  }
  protected void shardOperation(
      final UpdateRequest request,
      final ActionListener<UpdateResponse> listener,
      final int retryCount)
      throws ElasticSearchException {
    IndexService indexService = indicesService.indexServiceSafe(request.index());
    IndexShard indexShard = indexService.shardSafe(request.shardId());

    long getDate = System.currentTimeMillis();
    final GetResult getResult =
        indexShard
            .getService()
            .get(
                request.type(),
                request.id(),
                new String[] {
                  SourceFieldMapper.NAME,
                  RoutingFieldMapper.NAME,
                  ParentFieldMapper.NAME,
                  TTLFieldMapper.NAME
                },
                true);

    // no doc, what to do, what to do...
    if (!getResult.isExists()) {
      if (request.upsertRequest() == null) {
        listener.onFailure(
            new DocumentMissingException(
                new ShardId(request.index(), request.shardId()), request.type(), request.id()));
        return;
      }
      final IndexRequest indexRequest = request.upsertRequest();
      indexRequest
          .index(request.index())
          .type(request.type())
          .id(request.id())
          // it has to be a "create!"
          .create(true)
          .routing(request.routing())
          .percolate(request.percolate())
          .refresh(request.refresh())
          .replicationType(request.replicationType())
          .consistencyLevel(request.consistencyLevel());
      indexRequest.operationThreaded(false);
      // we fetch it from the index request so we don't generate the bytes twice, its already done
      // in the index request
      final BytesReference updateSourceBytes = indexRequest.source();
      indexAction.execute(
          indexRequest,
          new ActionListener<IndexResponse>() {
            @Override
            public void onResponse(IndexResponse response) {
              UpdateResponse update =
                  new UpdateResponse(
                      response.getIndex(),
                      response.getType(),
                      response.getId(),
                      response.getVersion());
              update.setMatches(response.getMatches());
              if (request.fields() != null && request.fields().length > 0) {
                Tuple<XContentType, Map<String, Object>> sourceAndContent =
                    XContentHelper.convertToMap(updateSourceBytes, true);
                update.setGetResult(
                    extractGetResult(
                        request,
                        response.getVersion(),
                        sourceAndContent.v2(),
                        sourceAndContent.v1(),
                        updateSourceBytes));
              } else {
                update.setGetResult(null);
              }
              listener.onResponse(update);
            }

            @Override
            public void onFailure(Throwable e) {
              e = ExceptionsHelper.unwrapCause(e);
              if (e instanceof VersionConflictEngineException
                  || e instanceof DocumentAlreadyExistsException) {
                if (retryCount < request.retryOnConflict()) {
                  threadPool
                      .executor(executor())
                      .execute(
                          new Runnable() {
                            @Override
                            public void run() {
                              shardOperation(request, listener, retryCount + 1);
                            }
                          });
                  return;
                }
              }
              listener.onFailure(e);
            }
          });
      return;
    }

    if (getResult.internalSourceRef() == null) {
      // no source, we can't do nothing, through a failure...
      listener.onFailure(
          new DocumentSourceMissingException(
              new ShardId(request.index(), request.shardId()), request.type(), request.id()));
      return;
    }

    Tuple<XContentType, Map<String, Object>> sourceAndContent =
        XContentHelper.convertToMap(getResult.internalSourceRef(), true);
    String operation = null;
    String timestamp = null;
    Long ttl = null;
    Object fetchedTTL = null;
    final Map<String, Object> updatedSourceAsMap;
    final XContentType updateSourceContentType = sourceAndContent.v1();
    String routing =
        getResult.getFields().containsKey(RoutingFieldMapper.NAME)
            ? getResult.field(RoutingFieldMapper.NAME).getValue().toString()
            : null;
    String parent =
        getResult.getFields().containsKey(ParentFieldMapper.NAME)
            ? getResult.field(ParentFieldMapper.NAME).getValue().toString()
            : null;

    if (request.script() == null && request.doc() != null) {
      IndexRequest indexRequest = request.doc();
      updatedSourceAsMap = sourceAndContent.v2();
      if (indexRequest.ttl() > 0) {
        ttl = indexRequest.ttl();
      }
      timestamp = indexRequest.timestamp();
      if (indexRequest.routing() != null) {
        routing = indexRequest.routing();
      }
      if (indexRequest.parent() != null) {
        parent = indexRequest.parent();
      }
      XContentHelper.update(updatedSourceAsMap, indexRequest.sourceAsMap());
    } else {
      Map<String, Object> ctx = new HashMap<String, Object>(2);
      ctx.put("_source", sourceAndContent.v2());

      try {
        ExecutableScript script =
            scriptService.executable(request.scriptLang, request.script, request.scriptParams);
        script.setNextVar("ctx", ctx);
        script.run();
        // we need to unwrap the ctx...
        ctx = (Map<String, Object>) script.unwrap(ctx);
      } catch (Exception e) {
        throw new ElasticSearchIllegalArgumentException("failed to execute script", e);
      }

      operation = (String) ctx.get("op");
      timestamp = (String) ctx.get("_timestamp");
      fetchedTTL = ctx.get("_ttl");
      if (fetchedTTL != null) {
        if (fetchedTTL instanceof Number) {
          ttl = ((Number) fetchedTTL).longValue();
        } else {
          ttl = TimeValue.parseTimeValue((String) fetchedTTL, null).millis();
        }
      }

      updatedSourceAsMap = (Map<String, Object>) ctx.get("_source");
    }

    // apply script to update the source
    // No TTL has been given in the update script so we keep previous TTL value if there is one
    if (ttl == null) {
      ttl =
          getResult.getFields().containsKey(TTLFieldMapper.NAME)
              ? (Long) getResult.field(TTLFieldMapper.NAME).getValue()
              : null;
      if (ttl != null) {
        ttl =
            ttl
                - (System.currentTimeMillis()
                    - getDate); // It is an approximation of exact TTL value, could be improved
      }
    }

    // TODO: external version type, does it make sense here? does not seem like it...

    if (operation == null || "index".equals(operation)) {
      final IndexRequest indexRequest =
          Requests.indexRequest(request.index())
              .type(request.type())
              .id(request.id())
              .routing(routing)
              .parent(parent)
              .source(updatedSourceAsMap, updateSourceContentType)
              .version(getResult.getVersion())
              .replicationType(request.replicationType())
              .consistencyLevel(request.consistencyLevel())
              .timestamp(timestamp)
              .ttl(ttl)
              .percolate(request.percolate())
              .refresh(request.refresh());
      indexRequest.operationThreaded(false);
      // we fetch it from the index request so we don't generate the bytes twice, its already done
      // in the index request
      final BytesReference updateSourceBytes = indexRequest.source();
      indexAction.execute(
          indexRequest,
          new ActionListener<IndexResponse>() {
            @Override
            public void onResponse(IndexResponse response) {
              UpdateResponse update =
                  new UpdateResponse(
                      response.getIndex(),
                      response.getType(),
                      response.getId(),
                      response.getVersion());
              update.setMatches(response.getMatches());
              update.setGetResult(
                  extractGetResult(
                      request,
                      response.getVersion(),
                      updatedSourceAsMap,
                      updateSourceContentType,
                      updateSourceBytes));
              listener.onResponse(update);
            }

            @Override
            public void onFailure(Throwable e) {
              e = ExceptionsHelper.unwrapCause(e);
              if (e instanceof VersionConflictEngineException) {
                if (retryCount < request.retryOnConflict()) {
                  threadPool
                      .executor(executor())
                      .execute(
                          new Runnable() {
                            @Override
                            public void run() {
                              shardOperation(request, listener, retryCount + 1);
                            }
                          });
                  return;
                }
              }
              listener.onFailure(e);
            }
          });
    } else if ("delete".equals(operation)) {
      DeleteRequest deleteRequest =
          Requests.deleteRequest(request.index())
              .type(request.type())
              .id(request.id())
              .routing(routing)
              .parent(parent)
              .version(getResult.getVersion())
              .replicationType(request.replicationType())
              .consistencyLevel(request.consistencyLevel());
      deleteRequest.operationThreaded(false);
      deleteAction.execute(
          deleteRequest,
          new ActionListener<DeleteResponse>() {
            @Override
            public void onResponse(DeleteResponse response) {
              UpdateResponse update =
                  new UpdateResponse(
                      response.getIndex(),
                      response.getType(),
                      response.getId(),
                      response.getVersion());
              update.setGetResult(
                  extractGetResult(
                      request,
                      response.getVersion(),
                      updatedSourceAsMap,
                      updateSourceContentType,
                      null));
              listener.onResponse(update);
            }

            @Override
            public void onFailure(Throwable e) {
              e = ExceptionsHelper.unwrapCause(e);
              if (e instanceof VersionConflictEngineException) {
                if (retryCount < request.retryOnConflict()) {
                  threadPool
                      .executor(executor())
                      .execute(
                          new Runnable() {
                            @Override
                            public void run() {
                              shardOperation(request, listener, retryCount + 1);
                            }
                          });
                  return;
                }
              }
              listener.onFailure(e);
            }
          });
    } else if ("none".equals(operation)) {
      UpdateResponse update =
          new UpdateResponse(
              getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion());
      update.setGetResult(
          extractGetResult(
              request, getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, null));
      listener.onResponse(update);
    } else {
      logger.warn(
          "Used update operation [{}] for script [{}], doing nothing...",
          operation,
          request.script);
      listener.onResponse(
          new UpdateResponse(
              getResult.getIndex(),
              getResult.getType(),
              getResult.getId(),
              getResult.getVersion()));
    }
  }
  @Override
  public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
    InternalMultiBucketAggregation<
            InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>
        originalAgg =
            (InternalMultiBucketAggregation<
                    InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>)
                aggregation;
    List<? extends Bucket> buckets = originalAgg.getBuckets();

    CompiledScript compiledScript =
        reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext);
    List newBuckets = new ArrayList<>();
    for (Bucket bucket : buckets) {
      Map<String, Object> vars = new HashMap<>();
      if (script.getParams() != null) {
        vars.putAll(script.getParams());
      }
      boolean skipBucket = false;
      for (Map.Entry<String, String> entry : bucketsPathsMap.entrySet()) {
        String varName = entry.getKey();
        String bucketsPath = entry.getValue();
        Double value = resolveBucketValue(originalAgg, bucket, bucketsPath, gapPolicy);
        if (GapPolicy.SKIP == gapPolicy && (value == null || Double.isNaN(value))) {
          skipBucket = true;
          break;
        }
        vars.put(varName, value);
      }
      if (skipBucket) {
        newBuckets.add(bucket);
      } else {
        ExecutableScript executableScript =
            reduceContext.scriptService().executable(compiledScript, vars);
        Object returned = executableScript.run();
        if (returned == null) {
          newBuckets.add(bucket);
        } else {
          if (!(returned instanceof Number)) {
            throw new AggregationExecutionException(
                "series_arithmetic script for reducer [" + name() + "] must return a Number");
          }
          List<InternalAggregation> aggs =
              new ArrayList<>(eagerTransform(bucket.getAggregations().asList(), FUNCTION));
          aggs.add(
              new InternalSimpleValue(
                  name(),
                  ((Number) returned).doubleValue(),
                  formatter,
                  new ArrayList<PipelineAggregator>(),
                  metaData()));
          InternalMultiBucketAggregation.InternalBucket newBucket =
              originalAgg.createBucket(
                  new InternalAggregations(aggs),
                  (InternalMultiBucketAggregation.InternalBucket) bucket);
          newBuckets.add(newBucket);
        }
      }
    }
    return originalAgg.create(newBuckets);
  }