public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() { Version version = VersionUtils.randomVersionBetween( getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_3_0_0)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Map<String, AnalyzerProvider> analyzers = new HashMap<>(); analyzers.put("default_index", analyzerProvider("default_index")); analyzers.put("default_search", analyzerProvider("default_search")); AnalysisService analysisService = new AnalysisService( IndexSettingsModule.newIndexSettings("index", settings), analyzers, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); assertThat( analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat( analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat( analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); }
public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() { Version version = VersionUtils.randomVersionBetween( random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Map<String, AnalyzerProvider<?>> analyzers = new HashMap<>(); analyzers.put("default_index", analyzerProvider("default_index")); analyzers.put("default_search", analyzerProvider("default_search")); IndexAnalyzers indexAnalyzers = registry.build( IndexSettingsModule.newIndexSettings("index", settings), analyzers, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); assertThat( indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat( indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat( indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertWarnings( "setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] " + "instead for index [index]"); }
public void testAllVersionsTested() throws Exception { SortedSet<String> expectedVersions = new TreeSet<>(); for (Version v : VersionUtils.allVersions()) { if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet if (v.isRelease() == false) continue; // no guarantees for prereleases if (v.onOrBefore(Version.V_2_0_0_beta1)) continue; // we can only test back one major lucene version if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself expectedVersions.add("index-" + v.toString() + ".zip"); } for (String index : indexes) { if (expectedVersions.remove(index) == false) { logger.warn("Old indexes tests contain extra index: {}", index); } } if (expectedVersions.isEmpty() == false) { StringBuilder msg = new StringBuilder("Old index tests are missing indexes:"); for (String expected : expectedVersions) { msg.append("\n" + expected); } fail(msg.toString()); } }
@Override protected void handleParsedResponse( final TransportResponse response, final TransportResponseHandler handler) { ElasticsearchAssertions.assertVersionSerializable( VersionUtils.randomVersionBetween(random, minVersion, maxVersion), response); super.handleParsedResponse(response, handler); }
public void testEmptyName() throws IOException { String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") .startObject("") .field("type", "ip") .endObject() .endObject() .endObject() .endObject() .string(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertThat(e.getMessage(), containsString("name cannot be empty string")); // before 5.x Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); indexService = createIndex("test_old", oldIndexSettings); parser = indexService.mapperService().documentMapperParser(); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, defaultMapper.mappingSource().string()); }
public void testDefaultAnalyzers() throws IOException { Version version = VersionUtils.randomVersion(random()); Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, version) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexAnalyzers indexAnalyzers = new AnalysisRegistry( new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap()) .build(idxSettings); assertThat( indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat( indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat( indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); }
public void testNoTypeOrTokenizerErrorMessage() throws IOException { Version version = VersionUtils.randomVersion(random()); Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, version) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .putArray( "index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"}) .putArray( "index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"}) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> new AnalysisRegistry( new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap()) .build(idxSettings)); assertThat( e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer")); }
@SuppressWarnings("unchecked") private IngestProxyActionFilter buildFilter(int ingestNodes, int totalNodes) { ClusterState.Builder clusterState = new ClusterState.Builder(new ClusterName("_name")); DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder(); DiscoveryNode localNode = null; for (int i = 0; i < totalNodes; i++) { String nodeId = "node" + i; Map<String, String> attributes = new HashMap<>(); if (i >= ingestNodes) { attributes.put("ingest", "false"); } else if (randomBoolean()) { attributes.put("ingest", "true"); } DiscoveryNode node = new DiscoveryNode( nodeId, nodeId, DummyTransportAddress.INSTANCE, attributes, VersionUtils.randomVersion(random())); builder.put(node); if (i == totalNodes - 1) { localNode = node; } } clusterState.nodes(builder); ClusterService clusterService = mock(ClusterService.class); when(clusterService.localNode()).thenReturn(localNode); when(clusterService.state()).thenReturn(clusterState.build()); transportService = mock(TransportService.class); return new IngestProxyActionFilter(clusterService, transportService); }
private <T extends Throwable> T serialize(T exception) throws IOException { ElasticsearchAssertions.assertVersionSerializable( VersionUtils.randomVersion(random()), exception); BytesStreamOutput out = new BytesStreamOutput(); out.writeThrowable(exception); StreamInput in = StreamInput.wrap(out.bytes()); return in.readThrowable(); }
@Override public void sendRequest( final DiscoveryNode node, final long requestId, final String action, final TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { ElasticsearchAssertions.assertVersionSerializable( VersionUtils.randomVersionBetween(random, minVersion, maxVersion), request); super.sendRequest(node, requestId, action, request, options); }
public void testInitialSearchParamsFields() { SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); // Test request without any fields Version remoteVersion = VersionUtils.randomVersion(random()); assertThat( initialSearchParams(searchRequest, remoteVersion), not(either(hasKey("stored_fields")).or(hasKey("fields")))); // Setup some fields for the next two tests searchRequest.source().storedField("_source").storedField("_id"); // Test stored_fields for versions that support it remoteVersion = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha4, null); assertThat( initialSearchParams(searchRequest, remoteVersion), hasEntry("stored_fields", "_source,_id")); // Test fields for versions that support it remoteVersion = VersionUtils.randomVersionBetween(random(), null, Version.V_5_0_0_alpha3); assertThat( initialSearchParams(searchRequest, remoteVersion), hasEntry("fields", "_source,_id")); }
@Test public void testSerialization() throws Exception { int iterations = randomIntBetween(5, 20); for (int i = 0; i < iterations; i++) { IndicesOptions indicesOptions = IndicesOptions.fromOptions( randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); ClusterStateRequest clusterStateRequest = new ClusterStateRequest() .routingTable(randomBoolean()) .metaData(randomBoolean()) .nodes(randomBoolean()) .blocks(randomBoolean()) .indices("testindex", "testindex2") .indicesOptions(indicesOptions); Version testVersion = VersionUtils.randomVersionBetween( random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT); BytesStreamOutput output = new BytesStreamOutput(); output.setVersion(testVersion); clusterStateRequest.writeTo(output); StreamInput streamInput = StreamInput.wrap(output.bytes()); streamInput.setVersion(testVersion); ClusterStateRequest deserializedCSRequest = new ClusterStateRequest(); deserializedCSRequest.readFrom(streamInput); assertThat(deserializedCSRequest.routingTable(), equalTo(clusterStateRequest.routingTable())); assertThat(deserializedCSRequest.metaData(), equalTo(clusterStateRequest.metaData())); assertThat(deserializedCSRequest.nodes(), equalTo(clusterStateRequest.nodes())); assertThat(deserializedCSRequest.blocks(), equalTo(clusterStateRequest.blocks())); assertThat(deserializedCSRequest.indices(), equalTo(clusterStateRequest.indices())); if (testVersion.onOrAfter(Version.V_1_5_0)) { assertOptionsMatch( deserializedCSRequest.indicesOptions(), clusterStateRequest.indicesOptions()); } else { // versions before V_1_5_0 use IndicesOptions.lenientExpandOpen() assertOptionsMatch( deserializedCSRequest.indicesOptions(), IndicesOptions.lenientExpandOpen()); } } }
public void testOverrideDefaultAnalyzer() throws IOException { Version version = VersionUtils.randomVersion(getRandom()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalysisService analysisService = new AnalysisService( IndexSettingsModule.newIndexSettings("index", settings), Collections.singletonMap("default", analyzerProvider("default")), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); assertThat( analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat( analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat( analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); }
public void testOverrideDefaultAnalyzer() throws IOException { Version version = VersionUtils.randomVersion(random()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexAnalyzers indexAnalyzers = registry.build( IndexSettingsModule.newIndexSettings("index", settings), singletonMap("default", analyzerProvider("default")), emptyMap(), emptyMap(), emptyMap(), emptyMap()); assertThat( indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat( indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat( indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); }
public void testOverrideDefaultIndexAnalyzerIsUnsupported() { Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalyzerProvider<?> defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> registry.build( IndexSettingsModule.newIndexSettings("index", settings), singletonMap("default_index", defaultIndex), emptyMap(), emptyMap(), emptyMap(), emptyMap())); assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported")); }
public void testAcceptDocValuesFormat() throws IOException { String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") .startObject("field") .field("type", "string") .field("doc_values_format", Codec.getDefault().docValuesFormat().getName()) .endObject() .endObject() .endObject() .endObject() .string(); int i = 0; for (Version v : VersionUtils.allVersions()) { if (v.onOrAfter(Version.V_2_0_0) == false) { // no need to test, we don't support upgrading from these versions continue; } IndexService indexService = createIndex( "test-" + i++, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); try { parser.parse("type", new CompressedXContent(mapping)); if (v.onOrAfter(Version.V_2_0_0_beta1)) { fail("Elasticsearch 2.0 should not support custom postings formats"); } } catch (MapperParsingException e) { if (v.before(Version.V_2_0_0_beta1)) { // Elasticsearch 1.x should ignore custom postings formats throw e; } Assert.assertThat( e.getMessage(), containsString("unsupported parameters: [doc_values_format")); } } }
public void testOverrideDefaultIndexAnalyzer() { Version version = VersionUtils.randomVersionBetween(getRandom(), Version.V_3_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); try { AnalysisService analysisService = new AnalysisService( IndexSettingsModule.newIndexSettings("index", settings), Collections.singletonMap( "default_index", new PreBuiltAnalyzerProvider( "default_index", AnalyzerScope.INDEX, new EnglishAnalyzer())), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); fail("Expected ISE"); } catch (IllegalArgumentException e) { // expected assertTrue( e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported")); } }
/** Setup for the whole base test class. */ @BeforeClass public static void init() throws IOException { // we have to prefer CURRENT since with the range of versions we support // it's rather unlikely to get the current actually. Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); Settings settings = Settings.builder() .put("node.name", AbstractQueryTestCase.class.toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) .build(); index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); final ThreadPool threadPool = new ThreadPool(settings); final ClusterService clusterService = createClusterService(threadPool); setState( clusterService, new ClusterState.Builder(clusterService.state()) .metaData( new MetaData.Builder() .put( new IndexMetaData.Builder(index.getName()) .settings(indexSettings) .numberOfShards(1) .numberOfReplicas(0)))); ScriptModule scriptModule = newTestScriptModule(); List<Setting<?>> scriptSettings = scriptModule.getSettings(); scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList()); IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) { @Override protected void configure() { bindMapperExtension(); } }; SearchModule searchModule = new SearchModule(settings, false, emptyList()) { @Override protected void configureSearch() { // Skip me } }; List<NamedWriteableRegistry.Entry> entries = new ArrayList<>(); entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); namedWriteableRegistry = new NamedWriteableRegistry(entries); injector = new ModulesBuilder() .add( (b) -> { b.bind(Environment.class).toInstance(new Environment(settings)); b.bind(ThreadPool.class).toInstance(threadPool); b.bind(ScriptService.class).toInstance(scriptModule.getScriptService()); }, settingsModule, indicesModule, searchModule, new IndexSettingsModule(index, settings), new AbstractModule() { @Override protected void configure() { bind(ClusterService.class).toInstance(clusterService); bind(CircuitBreakerService.class).toInstance(new NoneCircuitBreakerService()); bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); } }) .createInjector(); aggParsers = injector.getInstance(SearchRequestParsers.class).aggParsers; // create some random type with some default field, those types will // stick around for all of the subclasses currentTypes = new String[randomIntBetween(0, 5)]; for (int i = 0; i < currentTypes.length; i++) { String type = randomAsciiOfLengthBetween(1, 10); currentTypes[i] = type; } queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); parseFieldMatcher = ParseFieldMatcher.STRICT; }
public void testExternalValues() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); MapperRegistry mapperRegistry = new MapperRegistry( Collections.singletonMap( ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo")), Collections.singletonMap( ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser())); DocumentMapperParser parser = new DocumentMapperParser( indexService.getIndexSettings(), indexService.mapperService(), indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext); DocumentMapper documentMapper = parser.parse( "type", new CompressedXContent( XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject(ExternalMetadataMapper.CONTENT_TYPE) .endObject() .startObject("properties") .startObject("field") .field("type", "external") .endObject() .endObject() .endObject() .endObject() .string())); ParsedDocument doc = documentMapper.parse( "test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject().bytes()); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); assertThat(doc.rootDoc().getField("field.point"), notNullValue()); if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); } else { assertThat( Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(51.0, 42.0))); } assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); assertThat(doc.rootDoc().getField("field.field"), notNullValue()); assertThat(doc.rootDoc().getField("field.field").stringValue(), is("foo")); assertThat( doc.rootDoc().getField(ExternalMetadataMapper.FIELD_NAME).stringValue(), is(ExternalMetadataMapper.FIELD_VALUE)); }