public void testInlineTemplate() throws ExecutionException, InterruptedException { Map<String, Object> params = new HashMap<>(); params.put("value", "bar"); params.put("size", 20); SearchTemplateResponse response = prepareRenderSearchTemplate(TEMPLATE_CONTENTS, ScriptType.INLINE, params).get(); assertThat(response, notNullValue()); assertFalse(response.hasResponse()); BytesReference source = response.getSource(); assertThat(source, notNullValue()); Map<String, Object> sourceAsMap = XContentHelper.convertToMap(source, false).v2(); assertThat(sourceAsMap, notNullValue()); String expected = TEMPLATE_CONTENTS.replace("{{value}}", "bar").replace("{{size}}", "20"); Map<String, Object> expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); assertThat(sourceAsMap, equalTo(expectedMap)); params = new HashMap<>(); params.put("value", "baz"); params.put("size", 100); response = prepareRenderSearchTemplate(TEMPLATE_CONTENTS, ScriptType.INLINE, params).get(); assertThat(response, notNullValue()); assertFalse(response.hasResponse()); source = response.getSource(); assertThat(source, notNullValue()); sourceAsMap = XContentHelper.convertToMap(source, false).v2(); expected = TEMPLATE_CONTENTS.replace("{{value}}", "baz").replace("{{size}}", "100"); expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); assertThat(sourceAsMap, equalTo(expectedMap)); }
private static GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Exception { XContentParser parser = XContentHelper.createParser(sortBuilder.bytes()); QueryParseContext parseContext = new QueryParseContext(new IndicesQueriesRegistry(), parser, ParseFieldMatcher.STRICT); parser.nextToken(); return GeoDistanceSortBuilder.fromXContent(parseContext, null); }
public Filter getInnerFilter() throws IOException { if (filterParsed) { return innerFilter; } else { if (path == null) { throw new QueryParsingException(parseContext.index(), "[nested] requires 'path' field"); } if (!filterFound) { throw new QueryParsingException( parseContext.index(), "[nested] requires either 'query' or 'filter' field"); } setPathLevel(); XContentParser old = parseContext.parser(); try { XContentParser innerParser = XContentHelper.createParser(source); parseContext.parser(innerParser); innerFilter = parseContext.parseInnerFilter(); filterParsed = true; return innerFilter; } finally { resetPathLevel(); parseContext.parser(old); } } }
static Query parseQuery(QueryBuilder queryBuilder) throws IOException { QueryParseContext context = new QueryParseContext(new Index("test"), SearchContext.current().queryParserService()); XContentParser parser = XContentHelper.createParser(queryBuilder.buildAsBytes()); context.reset(parser); return context.parseInnerQuery(); }
public Map<String, Object> asMap() { try { XContentBuilder builder = jsonBuilder() .startObject() .field("rivername", name) .field("settings", settings) .field("locale", LocaleUtil.fromLocale(locale)) .field("job", job) .field("sql", sql) .field("autocommit", autocommit) .field("fetchsize", fetchSize) .field("maxrows", maxRows) .field("retries", retries) .field("maxretrywait", maxretrywait) .field("resultsetconcurrency", resultSetConcurrency) .field("resultsettype", resultSetType) .field("rounding", rounding) .field("scale", scale) .field("columnCreatedAt", columnCreatedAt) .field("columnUpdatedAt", columnUpdatedAt) .field("columnDeletedAt", columnDeletedAt) .field("columnEscape", columnEscape) .field("ignoreNull", ignoreNull) .field("allowConcurrency", allowConcurrency) .endObject(); return XContentHelper.convertToMap(builder.bytes(), true).v2(); } catch (IOException e) { // should really not happen return new HashMap<String, Object>(); } }
protected DocIndexMetaData merge( DocIndexMetaData other, TransportPutIndexTemplateAction transportPutIndexTemplateAction, boolean thisIsCreatedFromTemplate) throws IOException { if (schemaEquals(other)) { return this; } else if (thisIsCreatedFromTemplate) { if (this.references.size() < other.references.size()) { // this is older, update template and return other // settings in template are always authoritative for table information about // number_of_shards and number_of_replicas updateTemplate(other, transportPutIndexTemplateAction, this.metaData.settings()); // merge the new mapping with the template settings return new DocIndexMetaData( IndexMetaData.builder(other.metaData).settings(this.metaData.settings()).build(), other.ident) .build(); } else if (references().size() == other.references().size() && !references().keySet().equals(other.references().keySet())) { XContentHelper.update(defaultMappingMap, other.defaultMappingMap, false); // update the template with new information updateTemplate(this, transportPutIndexTemplateAction, this.metaData.settings()); return this; } // other is older, just return this return this; } else { throw new TableAliasSchemaException(other.ident.name()); } }
public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { String json = "{\n" + " \"VDcvDuFjE\" : [ \"7umzzv8eychg\", \"dmdgmt5z13uw\", " + " \"ezu09wxw6v4c\", \"kc7s3515p6k6\", \"jgeuvjwrmfzn\", \"kcpcfj7ruyf8\" ],\n" + " \"unit\" : \"m\",\n" + " \"distance_type\" : \"sloppy_arc\",\n" + " \"mode\" : \"MAX\",\n" + " \"nested_filter\" : {\n" + " \"ids\" : {\n" + " \"type\" : [ ],\n" + " \"values\" : [ ],\n" + " \"boost\" : 5.711116\n" + " }\n" + " },\n" + " \"validation_method\" : \"STRICT\"\n" + " }"; XContentParser itemParser = XContentHelper.createParser(new BytesArray(json)); itemParser.nextToken(); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT); GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(context, json); assertEquals( "[-19.700583312660456, -2.8225036337971687, " + "31.537466906011105, -74.63590376079082, " + "43.71844606474042, -5.548660643398762, " + "-37.20467280596495, 38.71751043945551, " + "-69.44606635719538, 84.25200328230858, " + "-39.03717711567879, 44.74099852144718]", Arrays.toString(result.points())); }
public void testRegisteredQueries() throws IOException { SearchModule module = new SearchModule(Settings.EMPTY, false, emptyList()); List<String> allSupportedQueries = new ArrayList<>(); Collections.addAll(allSupportedQueries, NON_DEPRECATED_QUERIES); Collections.addAll(allSupportedQueries, DEPRECATED_QUERIES); String[] supportedQueries = allSupportedQueries.toArray(new String[allSupportedQueries.size()]); assertThat(module.getQueryParserRegistry().getNames(), containsInAnyOrder(supportedQueries)); IndicesQueriesRegistry indicesQueriesRegistry = module.getQueryParserRegistry(); XContentParser dummyParser = XContentHelper.createParser(new BytesArray("{}")); for (String queryName : supportedQueries) { indicesQueriesRegistry.lookup( queryName, ParseFieldMatcher.EMPTY, dummyParser.getTokenLocation()); } for (String queryName : NON_DEPRECATED_QUERIES) { QueryParser<?> queryParser = indicesQueriesRegistry.lookup( queryName, ParseFieldMatcher.STRICT, dummyParser.getTokenLocation()); assertThat(queryParser, notNullValue()); } for (String queryName : DEPRECATED_QUERIES) { try { indicesQueriesRegistry.lookup( queryName, ParseFieldMatcher.STRICT, dummyParser.getTokenLocation()); fail("query is deprecated, getQueryParser should have failed in strict mode"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Deprecated field [" + queryName + "] used")); } } }
@Override protected void afterTestFailed() { logger.info( "Stash dump on failure [{}]", XContentHelper.toString(restTestExecutionContext.stash())); // after we reset the global cluster, we have to make sure the client gets re-initialized too restTestExecutionContext.resetClient(); }
@Override public String toString() { String sSource = "_na_"; try { sSource = XContentHelper.convertToJson(source, false); } catch (Exception e) { // ignore } return "index {[" + index + "][" + type + "][" + id + "], source[" + sSource + "]}"; }
@Test public void inlineTemplate() { Map<String, Object> params = new HashMap<>(); params.put("value", "bar"); params.put("size", 20); Template template = new Template( TEMPLATE_CONTENTS, ScriptType.INLINE, MustacheScriptEngineService.NAME, XContentType.JSON, params); RenderSearchTemplateResponse response = client().admin().indices().prepareRenderSearchTemplate().template(template).get(); assertThat(response, notNullValue()); BytesReference source = response.source(); assertThat(source, notNullValue()); Map<String, Object> sourceAsMap = XContentHelper.convertToMap(source, false).v2(); assertThat(sourceAsMap, notNullValue()); String expected = TEMPLATE_CONTENTS.replace("{{value}}", "bar").replace("{{size}}", "20"); Map<String, Object> expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); assertThat(sourceAsMap, equalTo(expectedMap)); params = new HashMap<>(); params.put("value", "baz"); params.put("size", 100); template = new Template( TEMPLATE_CONTENTS, ScriptType.INLINE, MustacheScriptEngineService.NAME, XContentType.JSON, params); response = client().admin().indices().prepareRenderSearchTemplate().template(template).get(); assertThat(response, notNullValue()); source = response.source(); assertThat(source, notNullValue()); sourceAsMap = XContentHelper.convertToMap(source, false).v2(); expected = TEMPLATE_CONTENTS.replace("{{value}}", "baz").replace("{{size}}", "100"); expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); assertThat(sourceAsMap, equalTo(expectedMap)); }
public void testThatFilterIncludesEmptyObjectWhenUsingIncludes() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("obj").endObject().endObject(); Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(builder.bytes(), true); Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[] {"obj"}, Strings.EMPTY_ARRAY); assertThat(mapTuple.v2(), equalTo(filteredSource)); }
private void parseSource(SearchContext context, BytesReference source) throws SearchParseException { // nothing to parse... if (source == null || source.length() == 0) { return; } XContentParser parser = null; try { parser = XContentFactory.xContent(source).createParser(source); XContentParser.Token token; token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException( "Expected START_OBJECT but got " + token.name() + " " + parser.currentName()); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { String fieldName = parser.currentName(); parser.nextToken(); SearchParseElement element = elementParsers.get(fieldName); if (element == null) { throw new SearchParseException( context, "No parser for element [" + fieldName + "]", parser.getTokenLocation()); } element.parse(parser, context); } else { if (token == null) { throw new ElasticsearchParseException( "End of query source reached but query is not complete."); } else { throw new ElasticsearchParseException( "Expected field name but got " + token.name() + " \"" + parser.currentName() + "\""); } } } } catch (Throwable e) { String sSource = "_na_"; try { sSource = XContentHelper.convertToJson(source, false); } catch (Throwable e1) { // ignore } throw new SearchParseException( context, "Failed to parse source [" + sSource + "]", parser.getTokenLocation(), e); } finally { if (parser != null) { parser.close(); } } }
private MetaData readMetaData(byte[] data) throws IOException { XContentParser parser = null; try { parser = XContentHelper.createParser(data, 0, data.length); return MetaData.Builder.fromXContent(parser); } finally { if (parser != null) { parser.close(); } } }
/** The source of the document (as a string). */ public String sourceAsString() { if (source == null) { return null; } BytesReference source = sourceRef(); try { return XContentHelper.convertToJson(source, false); } catch (IOException e) { throw new ElasticsearchParseException("failed to convert source to a json string"); } }
private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XContentBuilder builder) throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext("test", settings, parser, mapper, new ContentPath(0)); SourceToParse source = SourceToParse.source(builder.bytes()); ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source); assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken()); ctx.parser().nextToken(); return DocumentParser.parseObject(ctx, mapper.root()); }
/** Sets the aliases that will be associated with the index when it gets created */ public CreateIndexRequest aliases(BytesReference source) { try (XContentParser parser = XContentHelper.createParser(source)) { // move to the first alias parser.nextToken(); while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { alias(Alias.fromXContent(parser)); } return this; } catch (IOException e) { throw new ElasticsearchParseException("Failed to parse aliases", e); } }
@VisibleForTesting Map<String, Object> buildMapFromSource( Reference[] insertColumns, Object[] insertValues, boolean isRawSourceInsert) { Map<String, Object> sourceAsMap; if (isRawSourceInsert) { BytesRef source = (BytesRef) insertValues[0]; sourceAsMap = XContentHelper.convertToMap(source.bytes, true).v2(); } else { sourceAsMap = new LinkedHashMap<>(insertColumns.length); for (int i = 0; i < insertColumns.length; i++) { sourceAsMap.put(insertColumns[i].ident().columnIdent().fqn(), insertValues[i]); } } return sourceAsMap; }
@Override public String toString() { String sSource = "_na_"; try { sSource = XContentHelper.convertToJson(source, false); } catch (Exception e) { // ignore } return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", source[" + sSource + "]"; }
@Nullable private IndexMetaData loadIndex(String index) { long highestVersion = -1; IndexMetaData indexMetaData = null; for (File indexLocation : nodeEnv.indexLocations(new Index(index))) { File stateDir = new File(indexLocation, "_state"); if (!stateDir.exists() || !stateDir.isDirectory()) { continue; } // now, iterate over the current versions, and find latest one File[] stateFiles = stateDir.listFiles(); if (stateFiles == null) { continue; } for (File stateFile : stateFiles) { if (!stateFile.getName().startsWith("state-")) { continue; } try { long version = Long.parseLong(stateFile.getName().substring("state-".length())); if (version > highestVersion) { byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile)); if (data.length == 0) { logger.debug( "[{}]: no data for [" + stateFile.getAbsolutePath() + "], ignoring...", index); continue; } XContentParser parser = null; try { parser = XContentHelper.createParser(data, 0, data.length); parser.nextToken(); // move to START_OBJECT indexMetaData = IndexMetaData.Builder.fromXContent(parser); highestVersion = version; } finally { if (parser != null) { parser.close(); } } } } catch (Exception e) { logger.debug( "[{}]: failed to read [" + stateFile.getAbsolutePath() + "], ignoring...", e, index); } } } return indexMetaData; }
private MetaData loadGlobalState() { long highestVersion = -1; MetaData metaData = null; for (File dataLocation : nodeEnv.nodeDataLocations()) { File stateLocation = new File(dataLocation, "_state"); if (!stateLocation.exists()) { continue; } File[] stateFiles = stateLocation.listFiles(); if (stateFiles == null) { continue; } for (File stateFile : stateFiles) { String name = stateFile.getName(); if (!name.startsWith("global-")) { continue; } try { long version = Long.parseLong(stateFile.getName().substring("global-".length())); if (version > highestVersion) { byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile)); if (data.length == 0) { logger.debug( "[_global] no data for [" + stateFile.getAbsolutePath() + "], ignoring..."); continue; } XContentParser parser = null; try { parser = XContentHelper.createParser(data, 0, data.length); metaData = MetaData.Builder.fromXContent(parser); highestVersion = version; } finally { if (parser != null) { parser.close(); } } } } catch (Exception e) { logger.debug("failed to load global state from [{}]", e, stateFile.getAbsolutePath()); } } } return metaData; }
Query parsePercolatorDocument(String id, BytesReference source) { String type = null; BytesReference querySource = null; XContentParser parser = null; try { parser = XContentHelper.createParser(source); String currentFieldName = null; XContentParser.Token token = parser.nextToken(); // move the START_OBJECT if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchException( "failed to parse query [" + id + "], not starting with OBJECT"); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("query".equals(currentFieldName)) { if (type != null) { return parseQuery(type, null, parser); } else { XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); builder.copyCurrentStructure(parser); querySource = builder.bytes(); builder.close(); } } else { parser.skipChildren(); } } else if (token == XContentParser.Token.START_ARRAY) { parser.skipChildren(); } else if (token.isValue()) { if ("type".equals(currentFieldName)) { type = parser.text(); } } } return parseQuery(type, querySource, null); } catch (Exception e) { throw new PercolatorException(shardId().index(), "failed to parse query [" + id + "]", e); } finally { if (parser != null) { parser.close(); } } }
public void testFromXContent() throws Exception { SliceBuilder sliceBuilder = randomSliceBuilder(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } builder.startObject(); sliceBuilder.innerToXContent(builder); builder.endObject(); XContentParser parser = XContentHelper.createParser(shuffleXContent(builder).bytes()); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(context); assertNotSame(sliceBuilder, secondSliceBuilder); assertEquals(sliceBuilder, secondSliceBuilder); assertEquals(sliceBuilder.hashCode(), secondSliceBuilder.hashCode()); }
public void testNotOmittingObjectsWithExcludedProperties() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() .startObject("obj") .field("f1", "v1") .endObject() .endObject(); Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(builder.bytes(), true); Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[] {"obj.f1"}); assertThat(filteredSource.size(), equalTo(1)); assertThat(filteredSource, hasKey("obj")); assertThat(((Map) filteredSource.get("obj")).size(), equalTo(0)); }
/** * Test that creates new smoothing model from a random test smoothing model and checks both for * equality */ public void testFromXContent() throws IOException { SmoothingModel testModel = createTestModel(); XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { contentBuilder.prettyPrint(); } contentBuilder.startObject(); testModel.innerToXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); XContentParser parser = XContentHelper.createParser(shuffleXContent(contentBuilder).bytes()); QueryParseContext context = new QueryParseContext(new IndicesQueriesRegistry(), parser, ParseFieldMatcher.STRICT); parser.nextToken(); // go to start token, real parsing would do that in the outer element parser SmoothingModel parsedModel = fromXContent(context); assertNotSame(testModel, parsedModel); assertEquals(testModel, parsedModel); assertEquals(testModel.hashCode(), parsedModel.hashCode()); }
public void testReverseOptionFailsBuildWhenInvalidGeoHashString() throws IOException { String json = "{\n" + " \"reverse\" : \"false\"\n" + "}"; XContentParser itemParser = XContentHelper.createParser(new BytesArray(json)); itemParser.nextToken(); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT); try { GeoDistanceSortBuilder item = GeoDistanceSortBuilder.fromXContent(context, ""); item.validation(GeoValidationMethod.STRICT); item.build(createMockShardContext()); fail("adding reverse sorting option should fail with an exception"); } catch (ElasticsearchParseException e) { assertEquals( "illegal latitude value [269.384765625] for [GeoDistanceSort] for field [reverse].", e.getMessage()); } }
/** * creates random suggestion builder, renders it to xContent and back to new instance that should * be equal to original */ public void testFromXContent() throws IOException { Suggesters suggesters = new Suggesters(Collections.emptyMap()); QueryParseContext context = new QueryParseContext(null); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { SuggestBuilder suggestBuilder = createTestModel(); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { xContentBuilder.prettyPrint(); } suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes()); context.reset(parser); SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters); assertNotSame(suggestBuilder, secondSuggestBuilder); assertEquals(suggestBuilder, secondSuggestBuilder); assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); } }
public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException { List<GetField> metaFields = new ArrayList<>(); List<GetField> otherFields = new ArrayList<>(); if (fields != null && !fields.isEmpty()) { for (GetField field : fields.values()) { if (field.getValues().isEmpty()) { continue; } if (field.isMetadataField()) { metaFields.add(field); } else { otherFields.add(field); } } } for (GetField field : metaFields) { builder.field(field.getName(), field.getValue()); } builder.field(Fields.FOUND, exists); if (source != null) { XContentHelper.writeRawField("_source", source, builder, params); } if (!otherFields.isEmpty()) { builder.startObject(Fields.FIELDS); for (GetField field : otherFields) { builder.startArray(field.getName()); for (Object value : field.getValues()) { builder.value(value); } builder.endArray(); } builder.endObject(); } return builder; }
public void testSortModeSumIsRejectedInJSON() throws IOException { String json = "{\n" + " \"testname\" : [ {\n" + " \"lat\" : -6.046997540714173,\n" + " \"lon\" : -51.94128329747579\n" + " } ],\n" + " \"unit\" : \"m\",\n" + " \"distance_type\" : \"sloppy_arc\",\n" + " \"mode\" : \"SUM\"\n" + "}"; XContentParser itemParser = XContentHelper.createParser(new BytesArray(json)); itemParser.nextToken(); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> GeoDistanceSortBuilder.fromXContent(context, "")); assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage()); }
public void testReverseOptionFailsWhenNonStringField() throws IOException { String json = "{\n" + " \"testname\" : [ {\n" + " \"lat\" : -6.046997540714173,\n" + " \"lon\" : -51.94128329747579\n" + " } ],\n" + " \"reverse\" : true\n" + "}"; XContentParser itemParser = XContentHelper.createParser(new BytesArray(json)); itemParser.nextToken(); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT); try { GeoDistanceSortBuilder.fromXContent(context, ""); fail("adding reverse sorting option should fail with an exception"); } catch (ParsingException e) { assertEquals("Only geohashes of type string supported for field [reverse]", e.getMessage()); } }