예제 #1
0
  @Inject
  public TribeService(
      Settings settings, ClusterService clusterService, DiscoveryService discoveryService) {
    super(settings);
    this.clusterService = clusterService;
    Map<String, Settings> nodesSettings = new HashMap<>(settings.getGroups("tribe", true));
    nodesSettings.remove("blocks"); // remove prefix settings that don't indicate a client
    nodesSettings.remove("on_conflict"); // remove prefix settings that don't indicate a client
    for (Map.Entry<String, Settings> entry : nodesSettings.entrySet()) {
      Settings.Builder sb = Settings.builder().put(entry.getValue());
      sb.put("name", settings.get("name") + "/" + entry.getKey());
      sb.put(
          Environment.PATH_HOME_SETTING.getKey(),
          Environment.PATH_HOME_SETTING.get(settings)); // pass through ES home dir
      sb.put(TRIBE_NAME, entry.getKey());
      if (sb.get("http.enabled") == null) {
        sb.put("http.enabled", false);
      }
      sb.put(Node.NODE_CLIENT_SETTING.getKey(), true);
      nodes.add(new TribeClientNode(sb.build()));
    }

    String[] blockIndicesWrite = Strings.EMPTY_ARRAY;
    String[] blockIndicesRead = Strings.EMPTY_ARRAY;
    String[] blockIndicesMetadata = Strings.EMPTY_ARRAY;
    if (!nodes.isEmpty()) {
      // remove the initial election / recovery blocks since we are not going to have a
      // master elected in this single tribe  node local "cluster"
      clusterService.removeInitialStateBlock(discoveryService.getNoMasterBlock());
      clusterService.removeInitialStateBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK);
      if (settings.getAsBoolean("tribe.blocks.write", false)) {
        clusterService.addInitialStateBlock(TRIBE_WRITE_BLOCK);
      }
      blockIndicesWrite = settings.getAsArray("tribe.blocks.write.indices", Strings.EMPTY_ARRAY);
      if (settings.getAsBoolean("tribe.blocks.metadata", false)) {
        clusterService.addInitialStateBlock(TRIBE_METADATA_BLOCK);
      }
      blockIndicesMetadata =
          settings.getAsArray("tribe.blocks.metadata.indices", Strings.EMPTY_ARRAY);
      blockIndicesRead = settings.getAsArray("tribe.blocks.read.indices", Strings.EMPTY_ARRAY);
      for (Node node : nodes) {
        node.injector().getInstance(ClusterService.class).add(new TribeClusterStateListener(node));
      }
    }
    this.blockIndicesMetadata = blockIndicesMetadata;
    this.blockIndicesRead = blockIndicesRead;
    this.blockIndicesWrite = blockIndicesWrite;

    this.onConflict = settings.get("tribe.on_conflict", ON_CONFLICT_ANY);
  }
  public AnalysisService createAnalysisService() throws IOException {
    InputStream empty_dict = getClass().getResourceAsStream("empty_user_dict.txt");
    InputStream dict = getClass().getResourceAsStream("user_dict.txt");
    Path home = createTempDir();
    Path config = home.resolve("config");
    Files.createDirectory(config);
    Files.copy(empty_dict, config.resolve("empty_user_dict.txt"));
    Files.copy(dict, config.resolve("user_dict.txt"));

    String json = "/org/elasticsearch/index/analysis/kuromoji_analysis.json";
    Settings settings =
        Settings.settingsBuilder()
            .put(Environment.PATH_HOME_SETTING.getKey(), home)
            .loadFromStream(json, getClass().getResourceAsStream(json))
            .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
            .build();
    final SettingsModule settingsModule = new SettingsModule(settings);
    settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
    Index index = new Index("test", "_na_");

    AnalysisModule analysisModule = new AnalysisModule(new Environment(settings));
    new AnalysisKuromojiPlugin().onModule(analysisModule);
    Injector parentInjector =
        new ModulesBuilder()
            .add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule)
            .createInjector();

    return parentInjector
        .getInstance(AnalysisRegistry.class)
        .build(IndexSettingsModule.newIndexSettings(index, settings));
  }
  @Override
  protected Settings nodeSettings(int nodeOrdinal) {
    Settings.Builder settings =
        Settings.builder()
            .put(super.nodeSettings(nodeOrdinal))
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
            .extendArray("plugin.types", Ec2DiscoveryPlugin.class.getName())
            .put("cloud.aws.test.random", randomInt())
            .put("cloud.aws.test.write_failures", 0.1)
            .put("cloud.aws.test.read_failures", 0.1);

    // if explicit, just load it and don't load from env
    try {
      if (Strings.hasText(System.getProperty("tests.config"))) {
        settings.loadFromPath(PathUtils.get(System.getProperty("tests.config")));
      } else {
        throw new IllegalStateException(
            "to run integration tests, you need to set -Dtest.thirdparty=true and -Dtests.config=/path/to/elasticsearch.yml");
      }
    } catch (SettingsException exception) {
      throw new IllegalStateException(
          "your test configuration file is incorrect: " + System.getProperty("tests.config"),
          exception);
    }
    return settings.build();
  }
예제 #4
0
  @BeforeClass
  public static void createTribes() throws NodeValidationException {
    Settings baseSettings =
        Settings.builder()
            .put(NetworkModule.HTTP_ENABLED.getKey(), false)
            .put("transport.type", MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)
            .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local")
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
            .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 2)
            .build();

    tribe1 =
        new TribeClientNode(
                Settings.builder()
                    .put(baseSettings)
                    .put("cluster.name", "tribe1")
                    .put("node.name", "tribe1_node")
                    .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), random().nextLong())
                    .build(),
                Collections.singleton(MockTcpTransportPlugin.class))
            .start();
    tribe2 =
        new TribeClientNode(
                Settings.builder()
                    .put(baseSettings)
                    .put("cluster.name", "tribe2")
                    .put("node.name", "tribe2_node")
                    .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), random().nextLong())
                    .build(),
                Collections.singleton(MockTcpTransportPlugin.class))
            .start();
  }
  private static Client startClient(Path tempDir, TransportAddress... transportAddresses) {
    Settings clientSettings =
        Settings.settingsBuilder()
            .put("name", "qa_smoke_client_" + counter.getAndIncrement())
            .put(
                InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING,
                true) // prevents any settings to be replaced by system properties.
            .put("client.transport.ignore_cluster_name", true)
            .put(Environment.PATH_HOME_SETTING.getKey(), tempDir)
            .put("node.mode", "network")
            .build(); // we require network here!

    TransportClient.Builder transportClientBuilder =
        TransportClient.builder().settings(clientSettings);
    TransportClient client =
        transportClientBuilder.build().addTransportAddresses(transportAddresses);

    logger.info("--> Elasticsearch Java TransportClient started");

    Exception clientException = null;
    try {
      ClusterHealthResponse health = client.admin().cluster().prepareHealth().get();
      logger.info(
          "--> connected to [{}] cluster which is running [{}] node(s).",
          health.getClusterName(),
          health.getNumberOfNodes());
    } catch (Exception e) {
      clientException = e;
    }

    assumeNoException(
        "Sounds like your cluster is not running at " + clusterAddresses, clientException);

    return client;
  }
 public void testDefaultAnalyzers() throws IOException {
   Version version = VersionUtils.randomVersion(random());
   Settings settings =
       Settings.builder()
           .put(IndexMetaData.SETTING_VERSION_CREATED, version)
           .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
           .build();
   IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
   IndexAnalyzers indexAnalyzers =
       new AnalysisRegistry(
               new Environment(settings),
               emptyMap(),
               emptyMap(),
               emptyMap(),
               emptyMap(),
               emptyMap())
           .build(idxSettings);
   assertThat(
       indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
   assertThat(
       indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
   assertThat(
       indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(),
       instanceOf(StandardAnalyzer.class));
 }
  public void testWithSniffing() throws Exception {
    try (TransportClient client =
        TransportClient.builder()
            .settings(
                Settings.builder()
                    .put("client.transport.sniff", true)
                    .put("cluster.name", "cluster1")
                    .put("node.name", "transport_client_" + this.getTestName() + "_1")
                    .put("client.transport.nodes_sampler_interval", "1s")
                    .put(HEADER_SETTINGS)
                    .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
                    .build())
            .addPlugin(InternalTransportService.TestPlugin.class)
            .build()) {
      client.addTransportAddress(address);

      InternalTransportService service =
          (InternalTransportService) client.injector.getInstance(TransportService.class);

      if (!service.clusterStateLatch.await(5, TimeUnit.SECONDS)) {
        fail("takes way too long to get the cluster state");
      }

      assertThat(client.connectedNodes().size(), is(1));
      assertThat(client.connectedNodes().get(0).getAddress(), is((TransportAddress) address));
    }
  }
  public void testNoTypeOrTokenizerErrorMessage() throws IOException {
    Version version = VersionUtils.randomVersion(random());
    Settings settings =
        Settings.builder()
            .put(IndexMetaData.SETTING_VERSION_CREATED, version)
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
            .putArray(
                "index.analysis.analyzer.test_analyzer.filter",
                new String[] {"lowercase", "stop", "shingle"})
            .putArray(
                "index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"})
            .build();
    IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);

    IllegalArgumentException e =
        expectThrows(
            IllegalArgumentException.class,
            () ->
                new AnalysisRegistry(
                        new Environment(settings),
                        emptyMap(),
                        emptyMap(),
                        emptyMap(),
                        emptyMap(),
                        emptyMap())
                    .build(idxSettings));
    assertThat(
        e.getMessage(),
        equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer"));
  }
 public void testBuiltInAnalyzersAreCached() throws IOException {
   Settings settings =
       Settings.builder()
           .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
           .build();
   Settings indexSettings =
       Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
   IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
   IndexAnalyzers indexAnalyzers =
       new AnalysisRegistry(
               new Environment(settings),
               emptyMap(),
               emptyMap(),
               emptyMap(),
               emptyMap(),
               emptyMap())
           .build(idxSettings);
   IndexAnalyzers otherIndexAnalyzers =
       new AnalysisRegistry(
               new Environment(settings),
               emptyMap(),
               emptyMap(),
               emptyMap(),
               emptyMap(),
               emptyMap())
           .build(idxSettings);
   final int numIters = randomIntBetween(5, 20);
   for (int i = 0; i < numIters; i++) {
     PreBuiltAnalyzers preBuiltAnalyzers =
         RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values());
     assertSame(
         indexAnalyzers.get(preBuiltAnalyzers.name()),
         otherIndexAnalyzers.get(preBuiltAnalyzers.name()));
   }
 }
예제 #10
0
 /** Adds access to all configurable paths. */
 static void addFilePermissions(Permissions policy, Environment environment) {
   // read-only dirs
   addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binFile(), "read,readlink");
   addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libFile(), "read,readlink");
   addPath(
       policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesFile(), "read,readlink");
   addPath(
       policy, Environment.PATH_HOME_SETTING.getKey(), environment.pluginsFile(), "read,readlink");
   addPath(
       policy, Environment.PATH_CONF_SETTING.getKey(), environment.configFile(), "read,readlink");
   addPath(
       policy,
       Environment.PATH_SCRIPTS_SETTING.getKey(),
       environment.scriptsFile(),
       "read,readlink");
   // read-write dirs
   addPath(policy, "java.io.tmpdir", environment.tmpFile(), "read,readlink,write,delete");
   addPath(
       policy,
       Environment.PATH_LOGS_SETTING.getKey(),
       environment.logsFile(),
       "read,readlink,write,delete");
   if (environment.sharedDataFile() != null) {
     addPath(
         policy,
         Environment.PATH_SHARED_DATA_SETTING.getKey(),
         environment.sharedDataFile(),
         "read,readlink,write,delete");
   }
   for (Path path : environment.dataFiles()) {
     addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete");
   }
   // TODO: this should be removed in ES 6.0! We will no longer support data paths with the cluster
   // as a folder
   assert Version.CURRENT.major < 6 : "cluster name is no longer used in data path";
   for (Path path : environment.dataWithClusterFiles()) {
     addPathIfExists(
         policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete");
   }
   for (Path path : environment.repoFiles()) {
     addPath(policy, Environment.PATH_REPO_SETTING.getKey(), path, "read,readlink,write,delete");
   }
   if (environment.pidFile() != null) {
     // we just need permission to remove the file if its elsewhere.
     policy.add(new FilePermission(environment.pidFile().toString(), "delete"));
   }
 }
 public void testThatTransportClientSettingCannotBeChanged() {
   Settings baseSettings =
       settingsBuilder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
   try (TransportClient client = TransportClient.builder().settings(baseSettings).build()) {
     Settings settings = client.injector.getInstance(Settings.class);
     assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("transport"));
   }
 }
  public void testConfigureCamelCaseTokenFilter() throws IOException {
    Settings settings =
        Settings.builder()
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
            .build();
    Settings indexSettings =
        Settings.builder()
            .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
            .put("index.analysis.filter.wordDelimiter.type", "word_delimiter")
            .put("index.analysis.filter.wordDelimiter.split_on_numerics", false)
            .put("index.analysis.analyzer.custom_analyzer.tokenizer", "whitespace")
            .putArray(
                "index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter")
            .put("index.analysis.analyzer.custom_analyzer_1.tokenizer", "whitespace")
            .putArray(
                "index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter")
            .build();

    IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);

    IndexAnalyzers indexAnalyzers =
        new AnalysisModule(new Environment(settings), emptyList())
            .getAnalysisRegistry()
            .build(idxSettings);
    try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer")) {
      assertNotNull(custom_analyser);
      TokenStream tokenStream = custom_analyser.tokenStream("foo", "J2SE j2ee");
      tokenStream.reset();
      CharTermAttribute charTermAttribute = tokenStream.addAttribute(CharTermAttribute.class);
      List<String> token = new ArrayList<>();
      while (tokenStream.incrementToken()) {
        token.add(charTermAttribute.toString());
      }
      assertEquals(token.toString(), 2, token.size());
      assertEquals("j2se", token.get(0));
      assertEquals("j2ee", token.get(1));
    }

    try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer_1")) {
      assertNotNull(custom_analyser);
      TokenStream tokenStream = custom_analyser.tokenStream("foo", "J2SE j2ee");
      tokenStream.reset();
      CharTermAttribute charTermAttribute = tokenStream.addAttribute(CharTermAttribute.class);
      List<String> token = new ArrayList<>();
      while (tokenStream.incrementToken()) {
        token.add(charTermAttribute.toString());
      }
      assertEquals(token.toString(), 6, token.size());
      assertEquals("j", token.get(0));
      assertEquals("2", token.get(1));
      assertEquals("se", token.get(2));
      assertEquals("j", token.get(3));
      assertEquals("2", token.get(4));
      assertEquals("ee", token.get(5));
    }
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   Settings settings =
       Settings.builder()
           .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
           .build();
   registry =
       new AnalysisRegistry(
           new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap());
 }
 public void testDefault() throws IOException {
   AnalysisService analysisService =
       AnalysisTestsHelper.createAnalysisServiceFromSettings(
           settingsBuilder()
               .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
               .put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
               .build());
   TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_ascii_folding");
   String source = "Ansprüche";
   String[] expected = new String[] {"Anspruche"};
   Tokenizer tokenizer = new WhitespaceTokenizer();
   tokenizer.setReader(new StringReader(source));
   assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
 }
예제 #15
0
 public void testThatSuggestStopFilterWorks() throws Exception {
   Settings settings =
       Settings.builder()
           .put("index.analysis.filter.my_stop.type", "stop")
           .put("index.analysis.filter.my_stop.remove_trailing", false)
           .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
           .build();
   ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
   TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_stop");
   assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class));
   Tokenizer tokenizer = new WhitespaceTokenizer();
   tokenizer.setReader(new StringReader("foo an"));
   TokenStream create = tokenFilter.create(tokenizer);
   assertThat(create, instanceOf(SuggestStopFilter.class));
 }
예제 #16
0
 public void testCorrectPositionIncrementSetting() throws IOException {
   Builder builder = Settings.builder().put("index.analysis.filter.my_stop.type", "stop");
   if (random().nextBoolean()) {
     builder.put("index.analysis.filter.my_stop.version", Version.LATEST);
   } else {
     // don't specify
   }
   builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
   ESTestCase.TestAnalysis analysis =
       AnalysisTestsHelper.createTestAnalysisFromSettings(builder.build());
   TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_stop");
   assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class));
   Tokenizer tokenizer = new WhitespaceTokenizer();
   tokenizer.setReader(new StringReader("foo bar"));
   TokenStream create = tokenFilter.create(tokenizer);
   assertThat(create, instanceOf(StopFilter.class));
 }
  public void testNodeVersionIsUpdated() {
    TransportClient client = (TransportClient) internalCluster().client();
    TransportClientNodesService nodeService = client.nodeService();
    Node node =
        new Node(
            Settings.builder()
                .put(internalCluster().getDefaultSettings())
                .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
                .put("node.name", "testNodeVersionIsUpdated")
                .put("http.enabled", false)
                .put(Node.NODE_DATA_SETTING.getKey(), false)
                .put("cluster.name", "foobar")
                .put(
                    InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(),
                    true) // make sure we get what we set :)
                .build());
    node.start();
    try {
      TransportAddress transportAddress =
          node.injector().getInstance(TransportService.class).boundAddress().publishAddress();
      client.addTransportAddress(transportAddress);
      assertThat(
          nodeService.connectedNodes().size(),
          greaterThanOrEqualTo(
              1)); // since we force transport clients there has to be one node started that we
                   // connect to.
      for (DiscoveryNode discoveryNode :
          nodeService.connectedNodes()) { // connected nodes have updated version
        assertThat(discoveryNode.getVersion(), equalTo(Version.CURRENT));
      }

      for (DiscoveryNode discoveryNode : nodeService.listedNodes()) {
        assertThat(discoveryNode.id(), startsWith("#transport#-"));
        assertThat(
            discoveryNode.getVersion(), equalTo(Version.CURRENT.minimumCompatibilityVersion()));
      }

      assertThat(nodeService.filteredNodes().size(), equalTo(1));
      for (DiscoveryNode discoveryNode : nodeService.filteredNodes()) {
        assertThat(
            discoveryNode.getVersion(), equalTo(Version.CURRENT.minimumCompatibilityVersion()));
      }
    } finally {
      node.close();
    }
  }
예제 #18
0
 public void testPositionIncrementSetting() throws IOException {
   Builder builder =
       Settings.builder()
           .put("index.analysis.filter.my_stop.type", "stop")
           .put("index.analysis.filter.my_stop.enable_position_increments", false);
   if (random().nextBoolean()) {
     builder.put("index.analysis.filter.my_stop.version", "5.0");
   }
   builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
   Settings settings = builder.build();
   try {
     AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
     fail("Expected IllegalArgumentException");
   } catch (IllegalArgumentException e) {
     assertThat(
         e.getMessage(), containsString("enable_position_increments is not supported anymore"));
   }
 }
예제 #19
0
  public void testDefaultsCompoundAnalysis() throws Exception {
    String json = "/org/elasticsearch/index/analysis/stop.json";
    Settings settings =
        Settings.builder()
            .loadFromStream(json, getClass().getResourceAsStream(json))
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
            .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
            .build();
    IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
    AnalysisService analysisService = createAnalysisService(idxSettings, settings);

    NamedAnalyzer analyzer1 = analysisService.analyzer("analyzer1");

    assertTokenStreamContents(analyzer1.tokenStream("test", "to be or not to be"), new String[0]);

    NamedAnalyzer analyzer2 = analysisService.analyzer("analyzer2");

    assertTokenStreamContents(analyzer2.tokenStream("test", "to be or not to be"), new String[0]);
  }
  public void testCameCaseOverride() throws IOException {
    Settings settings =
        Settings.builder()
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
            .build();
    Settings indexSettings =
        settingsBuilder()
            .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
            .put("index.analysis.filter.wordDelimiter.type", "word_delimiter")
            .put("index.analysis.filter.wordDelimiter.split_on_numerics", false)
            .put("index.analysis.analyzer.custom_analyzer.tokenizer", "whitespace")
            .putArray(
                "index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter")
            .put("index.analysis.analyzer.custom_analyzer_1.tokenizer", "whitespace")
            .putArray(
                "index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter")
            .build();
    IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
    AnalysisService analysisService =
        new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);

    TokenFilterFactory word_delimiter = analysisService.tokenFilter("word_delimiter");
    TokenFilterFactory override = analysisService.tokenFilter("wordDelimiter");
    assertNotEquals(word_delimiter.name(), override.name());
    assertNotSame(
        analysisService.tokenFilter("wordDelimiter"),
        analysisService.tokenFilter("word_delimiter"));
    assertSame(
        analysisService.tokenFilter("porterStem"), analysisService.tokenFilter("porter_stem"));

    // unconfigured
    IndexSettings idxSettings1 =
        IndexSettingsModule.newIndexSettings(
            "index",
            settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
    AnalysisService analysisService1 =
        new AnalysisRegistry(null, new Environment(settings)).build(idxSettings1);
    assertSame(
        analysisService1.tokenFilter("wordDelimiter"),
        analysisService1.tokenFilter("word_delimiter"));
    assertSame(
        analysisService1.tokenFilter("porterStem"), analysisService1.tokenFilter("porter_stem"));
  }
 public void testCloseIndexAnalyzersMultipleTimes() throws IOException {
   Settings settings =
       Settings.builder()
           .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
           .build();
   Settings indexSettings =
       Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
   IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
   IndexAnalyzers indexAnalyzers =
       new AnalysisRegistry(
               new Environment(settings),
               emptyMap(),
               emptyMap(),
               emptyMap(),
               emptyMap(),
               emptyMap())
           .build(idxSettings);
   indexAnalyzers.close();
   indexAnalyzers.close();
 }
예제 #22
0
  public void testHtmlStripCharFilter() throws Exception {
    Settings settings =
        settingsBuilder()
            .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
            .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard")
            .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "html_strip")
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
            .build();
    IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
    AnalysisService analysisService =
        new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);

    NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");

    assertTokenStreamContents(
        analyzer1.tokenStream("test", "<b>hello</b>!"), new String[] {"hello"});

    // Repeat one more time to make sure that char filter is reinitialized correctly
    assertTokenStreamContents(
        analyzer1.tokenStream("test", "<b>hello</b>!"), new String[] {"hello"});
  }
예제 #23
0
 @Override
 protected Client createClient() {
   StopWatch watch = new StopWatch();
   try {
     Settings.Builder settings = Settings.builder();
     settings
         .put(NetworkModule.TRANSPORT_TYPE_SETTING.getKey(), NetworkModule.LOCAL_TRANSPORT)
         .put(NetworkModule.HTTP_ENABLED.getKey(), false)
         .put(Environment.PATH_HOME_SETTING.getKey(), dataPath);
     Node node = new Node(settings.build());
     node.start();
     return node.client();
   } catch (NodeValidationException e) {
     throw new Error(e);
   } finally {
     logger.info(
         "create local elasticsearch node, dataPath={}, elapsedTime={}",
         dataPath,
         watch.elapsedTime());
   }
 }
예제 #24
0
  private static void assertTribeNodeSuccessfullyCreated(Settings extraSettings) throws Exception {
    // The tribe clients do need it to make sure they can find their corresponding tribes using the
    // proper transport
    Settings settings =
        Settings.builder()
            .put(NetworkModule.HTTP_ENABLED.getKey(), false)
            .put("node.name", "tribe_node")
            .put("transport.type", MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)
            .put("discovery.type", "local")
            .put("tribe.t1.transport.type", MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)
            .put("tribe.t2.transport.type", MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)
            .put("tribe.t1.discovery.type", "local")
            .put("tribe.t2.discovery.type", "local")
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
            .put(extraSettings)
            .build();

    try (Node node =
        new MockNode(settings, Collections.singleton(MockTcpTransportPlugin.class)).start()) {
      try (Client client = node.client()) {
        assertBusy(
            () -> {
              ClusterState state =
                  client.admin().cluster().prepareState().clear().setNodes(true).get().getState();
              assertThat(state.getClusterName().value(), equalTo("tribe_node_cluster"));
              assertThat(state.getNodes().getSize(), equalTo(5));
              for (DiscoveryNode discoveryNode : state.getNodes()) {
                assertThat(
                    discoveryNode.getName(),
                    either(equalTo("tribe1_node"))
                        .or(equalTo("tribe2_node"))
                        .or(equalTo("tribe_node"))
                        .or(equalTo("tribe_node/t1"))
                        .or(equalTo("tribe_node/t2")));
              }
            });
      }
    }
  }
예제 #25
0
 ScriptService makeScriptService(Settings settings) throws Exception {
   Path homeDir = createTempDir();
   Path scriptsDir = homeDir.resolve("config").resolve("scripts");
   Files.createDirectories(scriptsDir);
   Path mockscript = scriptsDir.resolve("script1.mockscript");
   Files.write(mockscript, "1".getBytes("UTF-8"));
   settings =
       Settings.builder()
           .put(Environment.PATH_HOME_SETTING.getKey(), homeDir)
           // no file watching, so we don't need a ResourceWatcherService
           .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false)
           .put(settings)
           .build();
   Set<ScriptEngineService> engines =
       new HashSet<>(Collections.singletonList(new MockScriptEngine()));
   return new ScriptService(
       settings,
       new Environment(settings),
       engines,
       null,
       new ScriptContextRegistry(Collections.emptyList()));
 }
  @Before
  public void setup() throws IOException {
    Settings settings =
        Settings.builder()
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
            .put(Environment.PATH_CONF_SETTING.getKey(), this.getDataPath("config"))
            .put("node.name", getClass().getName())
            .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
            .build();
    final Client proxy =
        (Client)
            Proxy.newProxyInstance(
                Client.class.getClassLoader(),
                new Class<?>[] {Client.class},
                (proxy1, method, args) -> {
                  throw new UnsupportedOperationException("client is just a dummy");
                });
    IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
    Index index = idxSettings.getIndex();
    SettingsModule settingsModule = new SettingsModule(settings);
    ScriptModule scriptModule = new ScriptModule();
    scriptModule.prepareSettings(settingsModule);
    // TODO: make this use a mock engine instead of mustache and it will no longer be messy!
    scriptModule.addScriptEngine(
        new ScriptEngineRegistry.ScriptEngineRegistration(
            MustacheScriptEngineService.class, MustacheScriptEngineService.TYPES));
    settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
    injector =
        new ModulesBuilder()
            .add(
                new EnvironmentModule(new Environment(settings)),
                settingsModule,
                new ThreadPoolModule(new ThreadPool(settings)),
                new SearchModule(settings, new NamedWriteableRegistry()) {
                  @Override
                  protected void configureSearch() {
                    // skip so we don't need transport
                  }
                },
                scriptModule,
                new IndexSettingsModule(index, settings),
                new AbstractModule() {
                  @Override
                  protected void configure() {
                    bind(Client.class).toInstance(proxy); // not needed here
                    Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
                    bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
                    bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
                  }
                })
            .createInjector();

    AnalysisService analysisService =
        new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
    ScriptService scriptService = injector.getInstance(ScriptService.class);
    SimilarityService similarityService =
        new SimilarityService(idxSettings, Collections.emptyMap());
    MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry();
    MapperService mapperService =
        new MapperService(
            idxSettings, analysisService, similarityService, mapperRegistry, () -> context);
    IndicesFieldDataCache cache =
        new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() {});
    IndexFieldDataService indexFieldDataService =
        new IndexFieldDataService(
            idxSettings, cache, injector.getInstance(CircuitBreakerService.class), mapperService);
    BitsetFilterCache bitsetFilterCache =
        new BitsetFilterCache(
            idxSettings,
            new BitsetFilterCache.Listener() {
              @Override
              public void onCache(ShardId shardId, Accountable accountable) {}

              @Override
              public void onRemoval(ShardId shardId, Accountable accountable) {}
            });
    IndicesQueriesRegistry indicesQueriesRegistry =
        injector.getInstance(IndicesQueriesRegistry.class);
    context =
        new QueryShardContext(
            idxSettings,
            bitsetFilterCache,
            indexFieldDataService,
            mapperService,
            similarityService,
            scriptService,
            indicesQueriesRegistry,
            null,
            null);
  }
  /** Setup for the whole base test class. */
  @BeforeClass
  public static void init() throws IOException {
    // we have to prefer CURRENT since with the range of versions we support
    // it's rather unlikely to get the current actually.
    Version version =
        randomBoolean()
            ? Version.CURRENT
            : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT);
    Settings settings =
        Settings.builder()
            .put("node.name", AbstractQueryTestCase.class.toString())
            .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
            .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
            .build();

    index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
    Settings indexSettings =
        Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
    final ThreadPool threadPool = new ThreadPool(settings);
    final ClusterService clusterService = createClusterService(threadPool);
    setState(
        clusterService,
        new ClusterState.Builder(clusterService.state())
            .metaData(
                new MetaData.Builder()
                    .put(
                        new IndexMetaData.Builder(index.getName())
                            .settings(indexSettings)
                            .numberOfShards(1)
                            .numberOfReplicas(0))));
    ScriptModule scriptModule = newTestScriptModule();
    List<Setting<?>> scriptSettings = scriptModule.getSettings();
    scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
    SettingsModule settingsModule =
        new SettingsModule(settings, scriptSettings, Collections.emptyList());

    IndicesModule indicesModule =
        new IndicesModule(Collections.emptyList()) {
          @Override
          protected void configure() {
            bindMapperExtension();
          }
        };
    SearchModule searchModule =
        new SearchModule(settings, false, emptyList()) {
          @Override
          protected void configureSearch() {
            // Skip me
          }
        };
    List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
    entries.addAll(indicesModule.getNamedWriteables());
    entries.addAll(searchModule.getNamedWriteables());
    namedWriteableRegistry = new NamedWriteableRegistry(entries);
    injector =
        new ModulesBuilder()
            .add(
                (b) -> {
                  b.bind(Environment.class).toInstance(new Environment(settings));
                  b.bind(ThreadPool.class).toInstance(threadPool);
                  b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
                },
                settingsModule,
                indicesModule,
                searchModule,
                new IndexSettingsModule(index, settings),
                new AbstractModule() {
                  @Override
                  protected void configure() {
                    bind(ClusterService.class).toInstance(clusterService);
                    bind(CircuitBreakerService.class).toInstance(new NoneCircuitBreakerService());
                    bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
                  }
                })
            .createInjector();
    aggParsers = injector.getInstance(SearchRequestParsers.class).aggParsers;
    // create some random type with some default field, those types will
    // stick around for all of the subclasses
    currentTypes = new String[randomIntBetween(0, 5)];
    for (int i = 0; i < currentTypes.length; i++) {
      String type = randomAsciiOfLengthBetween(1, 10);
      currentTypes[i] = type;
    }
    queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
    parseFieldMatcher = ParseFieldMatcher.STRICT;
  }