From 8a89d95372a910255479d0b05cd1f3f46e333c77 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Mon, 27 Jul 2020 13:25:55 -0700 Subject: [PATCH] Add search `fields` parameter to support high-level field retrieval. (#60100) This feature adds a new `fields` parameter to the search request, which consults both the document `_source` and the mappings to fetch fields in a consistent way. The PR merges the `field-retrieval` feature branch. Addresses #49028 and #55363. --- docs/build.gradle | 17 +- docs/reference/aggregations/misc.asciidoc | 18 +- docs/reference/docs/get.asciidoc | 6 +- docs/reference/mapping/types.asciidoc | 1 + .../modules/cross-cluster-search.asciidoc | 10 +- docs/reference/search/search-fields.asciidoc | 287 ++++++++++---- .../rest-api-spec/test/geo_shape/10_basic.yml | 23 ++ .../index/mapper/RankFeatureFieldMapper.java | 22 +- .../index/mapper/RankFeaturesFieldMapper.java | 8 + .../index/mapper/ScaledFloatFieldMapper.java | 25 ++ .../mapper/SearchAsYouTypeFieldMapper.java | 18 + .../index/mapper/TokenCountFieldMapper.java | 9 + .../mapper/RankFeatureFieldMapperTests.java | 11 + .../mapper/ScaledFloatFieldMapperTests.java | 26 ++ .../join/mapper/MetaJoinFieldMapper.java | 5 + .../join/mapper/ParentIdFieldMapper.java | 5 + .../join/mapper/ParentJoinFieldMapper.java | 8 + .../percolator/PercolatorFieldMapper.java | 8 + .../ICUCollationKeywordFieldMapper.java | 18 + .../ICUCollationKeywordFieldMapperTests.java | 28 ++ .../AnnotatedTextFieldMapper.java | 8 + .../AnnotatedTextFieldMapperTests.java | 20 + .../mapper/murmur3/Murmur3FieldMapper.java | 9 +- .../test/search/330_fetch_fields.yml | 216 +++++++++++ .../action/search/SearchRequestBuilder.java | 21 + .../common/document/DocumentField.java | 7 +- .../org/elasticsearch/common/geo/GeoJson.java | 1 - .../common/geo/GeoJsonGeometryFormat.java | 82 ++++ .../common/geo/GeometryFormat.java | 11 + .../common/geo/GeometryParser.java | 76 ++-- .../common/geo/WKTGeometryFormat.java | 66 ++++ .../common/network/InetAddresses.java | 13 + .../xcontent/support/XContentMapValues.java | 74 +++- .../mapper/AbstractGeometryFieldMapper.java | 58 ++- .../AbstractPointGeometryFieldMapper.java | 26 +- .../index/mapper/BinaryFieldMapper.java | 7 + .../index/mapper/BooleanFieldMapper.java | 19 + .../index/mapper/CompletionFieldMapper.java | 13 + .../index/mapper/DateFieldMapper.java | 19 + .../index/mapper/FieldMapper.java | 56 +++ .../index/mapper/FieldTypeLookup.java | 46 ++- .../index/mapper/GeoPointFieldMapper.java | 8 +- .../index/mapper/GeoShapeFieldMapper.java | 2 +- .../index/mapper/GeoShapeParser.java | 74 ++++ .../index/mapper/IpFieldMapper.java | 20 + .../index/mapper/KeywordFieldMapper.java | 70 +++- .../mapper/LegacyGeoShapeFieldMapper.java | 28 +- .../index/mapper/MapperService.java | 8 + .../index/mapper/MetadataFieldMapper.java | 5 + .../index/mapper/NumberFieldMapper.java | 18 + .../index/mapper/RangeFieldMapper.java | 26 ++ .../elasticsearch/index/mapper/RangeType.java | 53 ++- .../index/mapper/TextFieldMapper.java | 18 + .../index/query/InnerHitBuilder.java | 2 +- .../search/DefaultSearchContext.java | 13 + .../elasticsearch/search/SearchModule.java | 2 + .../elasticsearch/search/SearchService.java | 7 + .../metrics/TopHitsAggregationBuilder.java | 2 +- .../metrics/TopHitsAggregatorFactory.java | 2 +- .../search/builder/SearchSourceBuilder.java | 65 +++- .../search/fetch/FetchPhase.java | 5 +- .../fetch/subphase/FetchDocValuesContext.java | 78 ---- .../fetch/subphase/FetchDocValuesPhase.java | 1 - .../fetch/subphase/FetchFieldsContext.java | 53 +++ .../fetch/subphase/FetchFieldsPhase.java | 69 ++++ .../search/fetch/subphase/FieldAndFormat.java | 115 ++++++ .../fetch/subphase/FieldValueRetriever.java | 107 +++++ .../search/internal/SearchContext.java | 11 + .../search/internal/SubSearchContext.java | 13 + .../search/lookup/SourceLookup.java | 17 + .../support/XContentMapValuesTests.java | 29 ++ .../index/get/DocumentFieldTests.java | 27 +- .../index/mapper/BooleanFieldMapperTests.java | 25 ++ .../mapper/CompletionFieldMapperTests.java | 20 + .../index/mapper/DateFieldMapperTests.java | 76 ++++ .../mapper/DocumentFieldMapperTests.java | 5 + .../index/mapper/ExternalMapper.java | 5 + .../index/mapper/FakeStringFieldMapper.java | 5 + .../index/mapper/FieldTypeLookupTests.java | 55 +++ .../mapper/GeoPointFieldMapperTests.java | 40 ++ .../mapper/GeoShapeFieldMapperTests.java | 40 ++ .../index/mapper/IpFieldMapperTests.java | 23 ++ .../index/mapper/IpRangeFieldMapperTests.java | 13 + .../index/mapper/KeywordFieldMapperTests.java | 40 ++ .../LegacyGeoShapeFieldMapperTests.java | 40 ++ .../index/mapper/NumberFieldMapperTests.java | 23 ++ .../index/mapper/ParametrizedMapperTests.java | 5 + .../index/mapper/RangeFieldMapperTests.java | 36 ++ .../index/mapper/RangeFieldTypeTests.java | 10 +- .../index/mapper/TextFieldMapperTests.java | 14 + .../index/query/InnerHitBuilderTests.java | 2 +- .../elasticsearch/search/SearchHitTests.java | 4 +- .../subphase/FieldValueRetrieverTests.java | 367 ++++++++++++++++++ .../CompletionSuggestionOptionTests.java | 6 +- .../search/suggest/SuggestionEntryTests.java | 5 +- .../search/suggest/SuggestionTests.java | 6 +- .../index/mapper/MockFieldMapper.java | 34 ++ .../elasticsearch/test/TestSearchContext.java | 11 + .../mapper/HistogramFieldMapper.java | 8 + .../mapper/ConstantKeywordFieldMapper.java | 17 + .../ConstantKeywordFieldMapperTests.java | 26 ++ .../mapper/FlatObjectFieldMapper.java | 13 + .../mapper/FlatObjectFieldMapperTests.java | 24 ++ .../ql/execution/search/QlSourceBuilder.java | 2 +- .../xpack/spatial/common/CartesianPoint.java | 65 ++-- .../GeoShapeWithDocValuesFieldMapper.java | 3 +- .../index/mapper/PointFieldMapper.java | 22 +- .../index/mapper/ShapeFieldMapper.java | 3 +- .../index/mapper/PointFieldMapperTests.java | 42 ++ .../index/mapper/ShapeFieldMapperTests.java | 41 ++ .../xpack/sql/action/SqlLicenseIT.java | 4 +- .../sql/action/SqlTranslateActionIT.java | 4 +- .../test/constant_keyword/10_basic.yml | 15 + .../mapper/DenseVectorFieldMapper.java | 8 + .../mapper/SparseVectorFieldMapper.java | 6 +- .../wildcard/mapper/WildcardFieldMapper.java | 18 + .../mapper/WildcardFieldMapperTests.java | 27 ++ 117 files changed, 3219 insertions(+), 388 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml create mode 100644 server/src/main/java/org/elasticsearch/common/geo/GeoJsonGeometryFormat.java create mode 100644 server/src/main/java/org/elasticsearch/common/geo/WKTGeometryFormat.java create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/GeoShapeParser.java create mode 100644 server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java create mode 100644 server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java create mode 100644 server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java create mode 100644 server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldValueRetriever.java create mode 100644 server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldValueRetrieverTests.java diff --git a/docs/build.gradle b/docs/build.gradle index 94631365529ec..47da3df34290a 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -144,23 +144,28 @@ Closure setupTwitter = { String name, int count -> type: date likes: type: long + location: + properties: + city: + type: keyword + country: + type: keyword - do: bulk: index: twitter refresh: true body: |''' for (int i = 0; i < count; i++) { - String user, text + String body if (i == 0) { - user = 'kimchy' - text = 'trying out Elasticsearch' + body = """{"user": "kimchy", "message": "trying out Elasticsearch", "date": "2009-11-15T14:12:12", "likes": 0, + "location": { "city": "Amsterdam", "country": "Netherlands" }}""" } else { - user = 'test' - text = "some message with the number $i" + body = """{"user": "test", "message": "some message with the number $i", "date": "2009-11-15T14:12:12", "likes": $i}""" } buildRestTests.setups[name] += """ {"index":{"_id": "$i"}} - {"user": "$user", "message": "$text", "date": "2009-11-15T14:12:12", "likes": $i}""" + $body""" } } setupTwitter('twitter', 5) diff --git a/docs/reference/aggregations/misc.asciidoc b/docs/reference/aggregations/misc.asciidoc index 9c0e6206f0a67..28d0df30cd537 100644 --- a/docs/reference/aggregations/misc.asciidoc +++ b/docs/reference/aggregations/misc.asciidoc @@ -105,7 +105,8 @@ GET /twitter/_search?typed_keys "aggregations": { "top_users": { "top_hits": { - "size": 1 + "size": 1, + "_source": ["user", "likes", "message"] } } } @@ -133,7 +134,7 @@ In the response, the aggregations names will be changed to respectively `date_hi "total": { "value": 5, "relation": "eq" - }, + }, "max_score": 1.0, "hits": [ { @@ -141,9 +142,8 @@ In the response, the aggregations names will be changed to respectively `date_hi "_id": "0", "_score": 1.0, "_source": { - "date": "2009-11-15T14:12:12", - "message": "trying out Elasticsearch", "user": "kimchy", + "message": "trying out Elasticsearch", "likes": 0 } } @@ -167,12 +167,12 @@ request. This is the case for Terms, Significant Terms and Percentiles aggregati also contains information about the type of the targeted field: `lterms` (for a terms aggregation on a Long field), `sigsterms` (for a significant terms aggregation on a String field), `tdigest_percentiles` (for a percentile aggregation based on the TDigest algorithm). - + [[indexing-aggregation-results]] == Indexing aggregation results with {transforms} - -<> enable you to convert existing {es} indices -into summarized indices, which provide opportunities for new insights and -analytics. You can use {transforms} to persistently index your aggregation + +<> enable you to convert existing {es} indices +into summarized indices, which provide opportunities for new insights and +analytics. You can use {transforms} to persistently index your aggregation results into entity-centric indices. diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index bc6028e222c65..14a90be86943a 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -241,7 +241,11 @@ The API returns the following result: "user": "kimchy", "date": "2009-11-15T14:12:12", "likes": 0, - "message": "trying out Elasticsearch" + "message": "trying out Elasticsearch", + "location": { + "city": "Amsterdam", + "country": "Netherlands" + } } } -------------------------------------------------- diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index 7745bb9131adf..ea641bb49c316 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -22,6 +22,7 @@ string:: <>, <> and <>:: `nested` for arrays of JSON objects [discrete] +[[spatial_datatypes]] === Spatial data types <>:: `geo_point` for lat/lon points diff --git a/docs/reference/modules/cross-cluster-search.asciidoc b/docs/reference/modules/cross-cluster-search.asciidoc index 8806a05884175..4eb88f361a639 100644 --- a/docs/reference/modules/cross-cluster-search.asciidoc +++ b/docs/reference/modules/cross-cluster-search.asciidoc @@ -76,7 +76,8 @@ GET /cluster_one:twitter/_search "match": { "user": "kimchy" } - } + }, + "_source": ["user", "message", "likes"] } -------------------------------------------------- // TEST[continued] @@ -113,7 +114,6 @@ The API returns the following response: "_score": 1, "_source": { "user": "kimchy", - "date": "2009-11-15T14:12:12", "message": "trying out Elasticsearch", "likes": 0 } @@ -147,7 +147,8 @@ GET /twitter,cluster_one:twitter,cluster_two:twitter/_search "match": { "user": "kimchy" } - } + }, + "_source": ["user", "message", "likes"] } -------------------------------------------------- // TEST[continued] @@ -184,7 +185,6 @@ The API returns the following response: "_score": 2, "_source": { "user": "kimchy", - "date": "2009-11-15T14:12:12", "message": "trying out Elasticsearch", "likes": 0 } @@ -195,7 +195,6 @@ The API returns the following response: "_score": 1, "_source": { "user": "kimchy", - "date": "2009-11-15T14:12:12", "message": "trying out Elasticsearch", "likes": 0 } @@ -206,7 +205,6 @@ The API returns the following response: "_score": 1, "_source": { "user": "kimchy", - "date": "2009-11-15T14:12:12", "message": "trying out Elasticsearch", "likes": 0 } diff --git a/docs/reference/search/search-fields.asciidoc b/docs/reference/search/search-fields.asciidoc index 20605c48e3206..b3de7b0e58016 100644 --- a/docs/reference/search/search-fields.asciidoc +++ b/docs/reference/search/search-fields.asciidoc @@ -4,33 +4,224 @@ By default, each hit in the search response includes the document <>, which is the entire JSON object that was -provided when indexing the document. If you only need certain source fields in -the search response, you can use the <> to -restrict what parts of the source are returned. +provided when indexing the document. To retrieve specific fields in the search +response, you can use the `fields` parameter: -Returning fields using only the document source has some limitations: +[source,console] +---- +POST twitter/_search +{ + "query": { + "match": { + "message": "elasticsearch" + } + }, + "fields": ["user", "date"], + "_source": false +} +---- +// TEST[setup:twitter] -* The `_source` field does not include <> or -<>. Likewise, a field in the source does not contain -values copied using the <> mapping parameter. -* Since the `_source` is stored as a single field in Lucene, the whole source -object must be loaded and parsed, even if only a small number of fields are -needed. +The `fields` parameter consults both a document's `_source` and the index +mappings to load and return values. Because it makes use of the mappings, +`fields` has some advantages over referencing the `_source` directly: it +accepts <> and <>, and +also formats field values like dates in a consistent way. -To avoid these limitations, you can: +A document's `_source` is stored as a single field in Lucene. So the whole +`_source` object must be loaded and parsed even if only a small number of +fields are requested. To avoid this limitation, you can try another option for +loading fields: * Use the <> parameter to get values for selected fields. This can be a good choice when returning a fairly small number of fields that support doc values, such as keywords and dates. -* Use the <> parameter to get the values for specific stored fields. (Fields that use the <> mapping option.) +* Use the <> parameter to +get the values for specific stored fields (fields that use the +<> mapping option). -You can find more detailed information on each of these methods in the +You can find more detailed information on each of these methods in the following sections: -* <> +* <> * <> * <> +* <> + +[discrete] +[[search-fields-param]] +=== Fields + +The `fields` parameter allows for retrieving a list of document fields in +the search response. It consults both the document `_source` and the index +mappings to return each value in a standardized way that matches its mapping +type. By default, date fields are formatted according to the +<> parameter in their mappings. + +The following search request uses the `fields` parameter to retrieve values +for the `user` field, all fields starting with `location.`, and the +`date` field: + +[source,console] +---- +POST twitter/_search +{ + "query": { + "match": { + "message": "elasticsearch" + } + }, + "fields": [ + "user", + "location.*", <1> + { + "field": "date", + "format": "epoch_millis" <2> + } + ], + "_source": false +} +---- +// TEST[continued] + +<1> Both full field names and wildcard patterns are accepted. +<2> Using object notation, you can pass a `format` parameter to apply a custom + format for the field's values. The date fields + <> and <> accept a + <>. <> + accept either `geojson` for http://www.geojson.org[GeoJSON] (the default) + or `wkt` for + https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry[Well Known Text]. + Other field types do not support the `format` parameter. + +The values are returned as a flat list in the `fields` section in each hit: + +[source,console-result] +---- +{ + "took" : 2, + "timed_out" : false, + "_shards" : { + "total" : 1, + "successful" : 1, + "skipped" : 0, + "failed" : 0 + }, + "hits" : { + "total" : { + "value" : 1, + "relation" : "eq" + }, + "max_score" : 1.0, + "hits" : [ + { + "_index" : "twitter", + "_id" : "0", + "_score" : 1.0, + "fields" : { + "user" : [ + "kimchy" + ], + "date" : [ + "1258294332000" + ], + "location.city": [ + "Amsterdam" + ], + "location.country": [ + "Netherlands" + ] + } + } + ] + } +} +---- +// TESTRESPONSE[s/"took" : 2/"took": $body.took/] +// TESTRESPONSE[s/"max_score" : 1.0/"max_score" : $body.hits.max_score/] +// TESTRESPONSE[s/"_score" : 1.0/"_score" : $body.hits.hits.0._score/] + +Only leaf fields are returned -- `fields` does not allow for fetching entire +objects. + +The `fields` parameter handles field types like <> and +<> whose values aren't always present in +the `_source`. Other mapping options are also respected, including +<>, <> and +<>. + +NOTE: The `fields` response always returns an array of values for each field, +even when there is a single value in the `_source`. This is because {es} has +no dedicated array type, and any field could contain multiple values. The +`fields` parameter also does not guarantee that array values are returned in +a specific order. See the mapping documentation on <> for more +background. + + + +[discrete] +[[docvalue-fields]] +=== Doc value fields + +You can use the <> parameter to return +<> for one or more fields in the search response. + +Doc values store the same values as the `_source` but in an on-disk, +column-based structure that's optimized for sorting and aggregations. Since each +field is stored separately, {es} only reads the field values that were requested +and can avoid loading the whole document `_source`. + +Doc values are stored for supported fields by default. However, doc values are +not supported for <> or +{plugins}/mapper-annotated-text-usage.html[`text_annotated`] fields. + +The following search request uses the `docvalue_fields` parameter to retrieve +doc values for the `user` field, all fields starting with `location.`, and the +`date` field: + +[source,console] +---- +GET twitter/_search +{ + "query": { + "match": { + "message": "elasticsearch" + } + }, + "docvalue_fields": [ + "user", + "location.*", <1> + { + "field": "date", + "format": "epoch_millis" <2> + } + ] +} +---- +// TEST[continued] + +<1> Both full field names and wildcard patterns are accepted. +<2> Using object notation, you can pass a `format` parameter to apply a custom + format for the field's doc values. <> support a + <>. <> support a + https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat + pattern]. Other field datatypes do not support the `format` parameter. + +TIP: You cannot use the `docvalue_fields` parameter to retrieve doc values for +nested objects. If you specify a nested object, the search returns an empty +array (`[ ]`) for the field. To access nested fields, use the +<> parameter's `docvalue_fields` +property. + +[discrete] +[[stored-fields]] +=== Stored fields + +It's also possible to store an individual field's values by using the +<> mapping option. You can use the +<> parameter to include +these stored values in the search response. [discrete] [[source-filtering]] @@ -117,71 +308,3 @@ GET /_search } } ---- - - -[discrete] -[[docvalue-fields]] -=== Doc value fields - -You can use the <> parameter to return -<> for one or more fields in the search response. - -Doc values store the same values as the `_source` but in an on-disk, -column-based structure that's optimized for sorting and aggregations. Since each -field is stored separately, {es} only reads the field values that were requested -and can avoid loading the whole document `_source`. - -Doc values are stored for supported fields by default. However, doc values are -not supported for <> or -{plugins}/mapper-annotated-text-usage.html[`text_annotated`] fields. - -The following search request uses the `docvalue_fields` parameter to -retrieve doc values for the following fields: - -* Fields with names starting with `my_ip` -* `my_keyword_field` -* Fields with names ending with `_date_field` - -[source,console] ----- -GET /_search -{ - "query": { - "match_all": {} - }, - "docvalue_fields": [ - "my_ip*", <1> - { - "field": "my_keyword_field" <2> - }, - { - "field": "*_date_field", - "format": "epoch_millis" <3> - } - ] -} ----- - -<1> Wildcard patten used to match field names, specified as a string. -<2> Wildcard patten used to match field names, specified as an object. -<3> With the object notation, you can use the `format` parameter to specify a - format for the field's returned doc values. <> support a - <>. <> support a - https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat - pattern]. Other field data types do not support the `format` parameter. - -TIP: You cannot use the `docvalue_fields` parameter to retrieve doc values for -nested objects. If you specify a nested object, the search returns an empty -array (`[ ]`) for the field. To access nested fields, use the -<> parameter's `docvalue_fields` -property. - - -[discrete] -[[stored-fields]] -=== Stored fields - -It's also possible to store an individual field's values by using the -<> mapping option. You can use the -<> parameter to include -these stored values in the search response. diff --git a/modules/geo/src/yamlRestTest/resources/rest-api-spec/test/geo_shape/10_basic.yml b/modules/geo/src/yamlRestTest/resources/rest-api-spec/test/geo_shape/10_basic.yml index 7ddfa5832d4c5..aaa692ba933b7 100644 --- a/modules/geo/src/yamlRestTest/resources/rest-api-spec/test/geo_shape/10_basic.yml +++ b/modules/geo/src/yamlRestTest/resources/rest-api-spec/test/geo_shape/10_basic.yml @@ -57,3 +57,26 @@ setup: field: location - match: {hits.total: 1} + +--- +"Test retrieve geo_shape field": + - do: + search: + index: test + body: + fields: [location] + _source: false + + - match: { hits.hits.0.fields.location.0.type: "Point" } + - match: { hits.hits.0.fields.location.0.coordinates: [1.0, 1.0] } + + - do: + search: + index: test + body: + fields: + - field: location + format: wkt + _source: false + + - match: { hits.hits.0.fields.location.0: "POINT (1.0 1.0)" } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/RankFeatureFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/RankFeatureFieldMapper.java index 9d34562f35733..2b2677ec4a501 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/RankFeatureFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/RankFeatureFieldMapper.java @@ -152,11 +152,7 @@ protected void parseCreateField(ParseContext context) throws IOException { float value; if (context.externalValueSet()) { Object v = context.externalValue(); - if (v instanceof Number) { - value = ((Number) v).floatValue(); - } else { - value = Float.parseFloat(v.toString()); - } + value = objectToFloat(v); } else if (context.parser().currentToken() == Token.VALUE_NULL) { // skip return; @@ -176,6 +172,22 @@ protected void parseCreateField(ParseContext context) throws IOException { context.doc().addWithKey(name(), new FeatureField("_feature", name(), value)); } + private Float objectToFloat(Object value) { + if (value instanceof Number) { + return ((Number) value).floatValue(); + } else { + return Float.parseFloat(value.toString()); + } + } + + @Override + protected Float parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return objectToFloat(value); + } + @Override protected String contentType() { return CONTENT_TYPE; diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/RankFeaturesFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/RankFeaturesFieldMapper.java index 4968bf5ec6b0e..b692b834448ee 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/RankFeaturesFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/RankFeaturesFieldMapper.java @@ -159,6 +159,14 @@ protected void parseCreateField(ParseContext context) throws IOException { throw new AssertionError("parse is implemented directly"); } + @Override + protected Object parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value; + } + @Override protected boolean indexedByDefault() { return false; diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index b204b29eb587e..c8946de34a2d3 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -356,6 +356,11 @@ protected ScaledFloatFieldMapper clone() { return (ScaledFloatFieldMapper) super.clone(); } + @Override + protected Double nullValue() { + return nullValue; + } + @Override protected void parseCreateField(ParseContext context) throws IOException { @@ -474,6 +479,26 @@ private static double objectToDouble(Object value) { return doubleValue; } + @Override + protected Double parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + double doubleValue; + if (value.equals("")) { + if (nullValue == null) { + return null; + } + doubleValue = nullValue; + } else { + doubleValue = objectToDouble(value); + } + + double scalingFactor = fieldType().getScalingFactor(); + return Math.round(doubleValue * scalingFactor) / scalingFactor; + } + private static class ScaledFloatIndexFieldData extends IndexNumericFieldData { private final IndexNumericFieldData scaledFieldData; diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java index 4ef205a921ecf..48042c2d0ee2b 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java @@ -418,6 +418,11 @@ protected void parseCreateField(ParseContext context) { throw new UnsupportedOperationException(); } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException(); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { @@ -459,6 +464,11 @@ protected void mergeOptions(FieldMapper other, List conflicts) { } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException(); + } + @Override protected String contentType() { return "shingle"; @@ -577,6 +587,14 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value.toString(); + } + @Override protected String contentType() { return CONTENT_TYPE; diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java index fb6a41a0f7608..2b3041e0dc3a2 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java @@ -158,6 +158,15 @@ protected void parseCreateField(ParseContext context) throws IOException { context.doc().addAll(NumberFieldMapper.NumberType.INTEGER.createFields(fieldType().name(), tokenCount, indexed, docValued, stored)); } + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + return value.toString(); + } + /** * Count position increments in a token stream. Package private for testing. * @param analyzer analyzer to create token stream diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RankFeatureFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RankFeatureFieldMapperTests.java index 5fa95e6e15f2f..30df4394b7418 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RankFeatureFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RankFeatureFieldMapperTests.java @@ -23,9 +23,12 @@ import org.apache.lucene.analysis.tokenattributes.TermFrequencyAttribute; import org.apache.lucene.document.FeatureField; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; @@ -186,4 +189,12 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep e.getCause().getMessage()); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + RankFeatureFieldMapper mapper = new RankFeatureFieldMapper.Builder("field").build(context); + + assertEquals(3.14f, mapper.parseSourceValue(3.14, null), 0.0001); + assertEquals(42.9f, mapper.parseSourceValue("42.9", null), 0.0001); + } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java index 7fa4a81ec5c13..6758750f66581 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -21,14 +21,18 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; @@ -398,4 +402,26 @@ public void testMeta() throws Exception { new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping3, mapper.mappingSource().toString()); } + + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + ScaledFloatFieldMapper mapper = new ScaledFloatFieldMapper.Builder("field") + .scalingFactor(100) + .build(context); + assertEquals(3.14, mapper.parseSourceValue(3.1415926, null), 0.00001); + assertEquals(3.14, mapper.parseSourceValue("3.1415", null), 0.00001); + assertNull(mapper.parseSourceValue("", null)); + + ScaledFloatFieldMapper nullValueMapper = new ScaledFloatFieldMapper.Builder("field") + .scalingFactor(100) + .nullValue(2.71) + .build(context); + assertEquals(2.71, nullValueMapper.parseSourceValue("", null), 0.00001); + + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of(2.71), nullValueMapper.lookupValues(sourceLookup, null)); + } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/MetaJoinFieldMapper.java b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/MetaJoinFieldMapper.java index 5900de9cabe58..404b7d50be00d 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/MetaJoinFieldMapper.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/MetaJoinFieldMapper.java @@ -135,6 +135,11 @@ protected void parseCreateField(ParseContext context) throws IOException { throw new IllegalStateException("Should never be called"); } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException("The " + typeName() + " field is not stored in _source."); + } + @Override protected String contentType() { return CONTENT_TYPE; diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentIdFieldMapper.java b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentIdFieldMapper.java index 0f1216b17729d..c81548c3d32de 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentIdFieldMapper.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentIdFieldMapper.java @@ -185,6 +185,11 @@ protected void parseCreateField(ParseContext context) throws IOException { context.doc().add(new SortedDocValuesField(fieldType().name(), binaryValue)); } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException("The " + typeName() + " field is not stored in _source."); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { ParentIdFieldMapper parentMergeWith = (ParentIdFieldMapper) other; diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentJoinFieldMapper.java b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentJoinFieldMapper.java index f1dbe83e95eb8..67217855ceef1 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentJoinFieldMapper.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentJoinFieldMapper.java @@ -347,6 +347,14 @@ protected void parseCreateField(ParseContext context) throws IOException { throw new UnsupportedOperationException("parsing is implemented in parse(), this method should NEVER be called"); } + @Override + protected Object parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value; + } + @Override public void parse(ParseContext context) throws IOException { context.path().add(simpleName()); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 0df62e260c1f1..895c792b53ded 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -367,6 +367,14 @@ public void parse(ParseContext context) throws IOException { processQuery(query, context); } + @Override + protected Object parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value; + } + static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField, QueryBuilder queryBuilder, ParseContext context) throws IOException { try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) { diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index 8796684da5201..722bd0edaf26b 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -577,6 +577,11 @@ protected String contentType() { return CONTENT_TYPE; } + @Override + protected String nullValue() { + return nullValue; + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { ICUCollationKeywordFieldMapper icuMergeWith = (ICUCollationKeywordFieldMapper) other; @@ -731,4 +736,17 @@ protected void parseCreateField(ParseContext context) throws IOException { createFieldNamesField(context); } } + + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + String keywordValue = value.toString(); + if (keywordValue.length() > ignoreAbove) { + return null; + } + return keywordValue; + } } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java index 21a9cce88fe54..b809b0fb87139 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java @@ -26,21 +26,27 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Set; import static org.hamcrest.Matchers.containsString; @@ -483,4 +489,26 @@ public void testUpdateIgnoreAbove() throws IOException { indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + ICUCollationKeywordFieldMapper mapper = new ICUCollationKeywordFieldMapper.Builder("field").build(context); + assertEquals("42", mapper.parseSourceValue(42L, null)); + assertEquals("true", mapper.parseSourceValue(true, null)); + + ICUCollationKeywordFieldMapper ignoreAboveMapper = new ICUCollationKeywordFieldMapper.Builder("field") + .ignoreAbove(4) + .build(context); + assertNull(ignoreAboveMapper.parseSourceValue("value", null)); + assertEquals("42", ignoreAboveMapper.parseSourceValue(42L, null)); + assertEquals("true", ignoreAboveMapper.parseSourceValue(true, null)); + + ICUCollationKeywordFieldMapper nullValueMapper = new ICUCollationKeywordFieldMapper.Builder("field") + .nullValue("NULL") + .build(context); + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of("NULL"), nullValueMapper.lookupValues(sourceLookup, null)); + } } diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 94fd537f44846..dcab066ca8dd5 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -583,6 +583,14 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value.toString(); + } + @Override protected String contentType() { return CONTENT_TYPE; diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index 5acc8c9a82280..48d1ee50e224e 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -28,10 +28,12 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -44,8 +46,11 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; @@ -672,4 +677,19 @@ public void testEmptyName() throws IOException { assertThat(e.getMessage(), containsString("name cannot be empty string")); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + FieldMapper fieldMapper = new AnnotatedTextFieldMapper.Builder("field") + .indexAnalyzer(indexService.getIndexAnalyzers().getDefaultIndexAnalyzer()) + .searchAnalyzer(indexService.getIndexAnalyzers().getDefaultSearchAnalyzer()) + .searchQuoteAnalyzer(indexService.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer()) + .build(context); + AnnotatedTextFieldMapper mapper = (AnnotatedTextFieldMapper) fieldMapper; + + assertEquals("value", mapper.parseSourceValue("value", null)); + assertEquals("42", mapper.parseSourceValue(42L, null)); + assertEquals("true", mapper.parseSourceValue(true, null)); + } } diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index ee3fe9c90d071..ce33d3d25e11d 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -147,6 +147,14 @@ protected void parseCreateField(ParseContext context) } } + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value.toString(); + } + @Override protected boolean indexedByDefault() { return false; @@ -156,5 +164,4 @@ protected boolean indexedByDefault() { protected void mergeOptions(FieldMapper other, List conflicts) { } - } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml new file mode 100644 index 0000000000000..c521123f278ed --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -0,0 +1,216 @@ +setup: + - skip: + version: " - 7.99.99" + reason: "fields retrieval is currently only implemented on master" + +--- +"Test basic field retrieval": + - do: + indices.create: + index: test + body: + mappings: + properties: + keyword: + type: keyword + integer_range: + type: integer_range + + - do: + index: + index: test + id: 1 + body: + keyword: [ "x", "y" ] + integer_range: + gte: 0 + lte: 42 + + - do: + indices.refresh: + index: [ test ] + + - do: + search: + index: test + body: + fields: [keyword, integer_range] + + - is_true: hits.hits.0._id + - is_true: hits.hits.0._source + + - match: { hits.hits.0.fields.keyword.0: x } + - match: { hits.hits.0.fields.keyword.1: y } + + - match: { hits.hits.0.fields.integer_range.0.gte: 0 } + - match: { hits.hits.0.fields.integer_range.0.lte: 42 } + +--- +"Test date formatting": + - do: + indices.create: + index: test + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + keyword: + type: keyword + date: + type: date + + - do: + index: + index: test + id: 1 + body: + keyword: "value" + date: "1990-12-29T22:30:00.000Z" + + - do: + indices.refresh: + index: [ test ] + + - do: + search: + index: test + body: + fields: + - field: date + format: "yyyy/MM/dd" + + - is_true: hits.hits.0._id + - is_true: hits.hits.0._source + - match: { hits.hits.0.fields.date.0: "1990/12/29" } + + - do: + catch: bad_request + search: + index: test + body: + fields: + - field: keyword + format: "yyyy/MM/dd" +--- +"Test disable source": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + mappings: + _source: + enabled: false + properties: + keyword: + type: keyword + + - do: + index: + index: test + id: 1 + body: + keyword: [ "x" ] + + - do: + catch: bad_request + search: + index: test + body: + fields: [keyword] + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "Unable to retrieve the requested [fields] since _source is disabled + in the mappings for index [test]" } + +--- +"Test ignore malformed": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + mappings: + properties: + keyword: + type: keyword + integer: + type: integer + ignore_malformed: true + + - do: + index: + index: test + id: 1 + body: + keyword: "x" + integer: 42 + + - do: + index: + index: test + id: 2 + body: + keyword: "y" + integer: "not an integer" + + - do: + indices.refresh: + index: [ test ] + + - do: + search: + index: test + body: + sort: [ keyword ] + fields: [ integer ] + + - match: { hits.hits.0.fields.integer.0: 42 } + - is_false: hits.hits.1.fields.integer + +--- +"Test disable _source loading": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + mappings: + properties: + keyword: + type: keyword + integer: + type: integer + store: true + + - do: + index: + index: test + id: 1 + refresh: true + body: + keyword: "x" + integer: 42 + + - do: + search: + index: test + body: + fields: [ keyword ] + _source: false + + - match: { hits.hits.0.fields.keyword.0: "x" } + + - do: + search: + index: test + body: + fields: [ keyword ] + stored_fields: [ integer ] + _source: false + + - match: { hits.hits.0.fields.keyword.0: "x" } + - match: { hits.hits.0.fields.integer.0: 42 } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index edb2f093b3ee0..42a56b308dafd 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -303,6 +303,27 @@ public SearchRequestBuilder addDocValueField(String name) { return addDocValueField(name, null); } + /** + * Adds a field to load and return. The field must be present in the document _source. + * + * @param name The field to load + */ + public SearchRequestBuilder addFetchField(String name) { + sourceBuilder().fetchField(name, null); + return this; + } + + /** + * Adds a field to load and return. The field must be present in the document _source. + * + * @param name The field to load + * @param format TODO(jtibs): fill this in + */ + public SearchRequestBuilder addFetchField(String name, String format) { + sourceBuilder().fetchField(name, format); + return this; + } + /** * Adds a stored field to load and return (note, it must be stored) as part of the search request. */ diff --git a/server/src/main/java/org/elasticsearch/common/document/DocumentField.java b/server/src/main/java/org/elasticsearch/common/document/DocumentField.java index 1a69e51c06bdb..06dde9def1b87 100644 --- a/server/src/main/java/org/elasticsearch/common/document/DocumentField.java +++ b/server/src/main/java/org/elasticsearch/common/document/DocumentField.java @@ -105,10 +105,9 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray(name); for (Object value : values) { - // this call doesn't really need to support writing any kind of object. - // Stored fields values are converted using MappedFieldType#valueForDisplay. - // As a result they can either be Strings, Numbers, or Booleans, that's - // all. + // This call doesn't really need to support writing any kind of object, since the values + // here are always serializable to xContent. Each value could be a leaf types like a string, + // number, or boolean, a list of such values, or a map of such values with string keys. builder.value(value); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoJson.java b/server/src/main/java/org/elasticsearch/common/geo/GeoJson.java index 06bd67e7a5f3b..85b35c6393b9b 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoJson.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoJson.java @@ -609,5 +609,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } } - } diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoJsonGeometryFormat.java b/server/src/main/java/org/elasticsearch/common/geo/GeoJsonGeometryFormat.java new file mode 100644 index 0000000000000..fa6614d3b7419 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoJsonGeometryFormat.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.geometry.Geometry; + +import java.io.IOException; +import java.io.UncheckedIOException; + +public class GeoJsonGeometryFormat implements GeometryFormat { + public static final String NAME = "geojson"; + + private final GeoJson geoJsonParser; + + public GeoJsonGeometryFormat(GeoJson geoJsonParser) { + this.geoJsonParser = geoJsonParser; + } + + @Override + public String name() { + return NAME; + } + + @Override + public Geometry fromXContent(XContentParser parser) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + return geoJsonParser.fromXContent(parser); + } + + @Override + public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException { + if (geometry != null) { + return GeoJson.toXContent(geometry, builder, params); + } else { + return builder.nullValue(); + } + } + + @Override + public Object toXContentAsObject(Geometry geometry) { + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + GeoJson.toXContent(geometry, builder, ToXContent.EMPTY_PARAMS); + StreamInput input = BytesReference.bytes(builder).streamInput(); + + try (XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, input)) { + return parser.map(); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeometryFormat.java b/server/src/main/java/org/elasticsearch/common/geo/GeometryFormat.java index 4b12b8fe0ac96..4ce53eaa8f197 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeometryFormat.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeometryFormat.java @@ -31,6 +31,11 @@ */ public interface GeometryFormat { + /** + * The name of the format, for example 'wkt'. + */ + String name(); + /** * Parser JSON representation of a geometry */ @@ -41,4 +46,10 @@ public interface GeometryFormat { */ XContentBuilder toXContent(ParsedFormat geometry, XContentBuilder builder, ToXContent.Params params) throws IOException; + /** + * Serializes the geometry into a standard Java object. + * + * For example, the GeoJson format returns the geometry as a map, while WKT returns a string. + */ + Object toXContentAsObject(ParsedFormat geometry); } diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeometryParser.java b/server/src/main/java/org/elasticsearch/common/geo/GeometryParser.java index d39e7752a2dc3..ce684fd72cbd3 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeometryParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeometryParser.java @@ -22,15 +22,13 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.MapXContentParser; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.utils.StandardValidator; import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.StandardValidator; import org.elasticsearch.geometry.utils.WellKnownText; import java.io.IOException; @@ -66,59 +64,31 @@ public Geometry parse(XContentParser parser) throws IOException, ParseException /** * Returns a geometry format object that can parse and then serialize the object back to the same format. */ - public GeometryFormat geometryFormat(XContentParser parser) { - if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { - return new GeometryFormat() { - @Override - public Geometry fromXContent(XContentParser parser) throws IOException { - return null; - } - - @Override - public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException { - if (geometry != null) { - // We don't know the format of the original geometry - so going with default - return GeoJson.toXContent(geometry, builder, params); - } else { - return builder.nullValue(); - } - } - }; - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - return new GeometryFormat() { - @Override - public Geometry fromXContent(XContentParser parser) throws IOException { - return geoJsonParser.fromXContent(parser); - } + public GeometryFormat geometryFormat(String format) { + if (format.equals(GeoJsonGeometryFormat.NAME)) { + return new GeoJsonGeometryFormat(geoJsonParser); + } else if (format.equals(WKTGeometryFormat.NAME)) { + return new WKTGeometryFormat(wellKnownTextParser); + } else { + throw new IllegalArgumentException("Unrecognized geometry format [" + format + "]."); + } + } - @Override - public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException { - if (geometry != null) { - return GeoJson.toXContent(geometry, builder, params); - } else { - return builder.nullValue(); - } - } - }; + /** + * Returns a geometry format object that can parse and then serialize the object back to the same format. + * This method automatically recognizes the format by examining the provided {@link XContentParser}. + */ + public GeometryFormat geometryFormat(XContentParser parser) { + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + return new GeoJsonGeometryFormat(geoJsonParser); } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - return new GeometryFormat() { - @Override - public Geometry fromXContent(XContentParser parser) throws IOException, ParseException { - return wellKnownTextParser.fromWKT(parser.text()); - } - - @Override - public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException { - if (geometry != null) { - return builder.value(wellKnownTextParser.toWKT(geometry)); - } else { - return builder.nullValue(); - } - } - }; - + return new WKTGeometryFormat(wellKnownTextParser); + } else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + // We don't know the format of the original geometry - so going with default + return new GeoJsonGeometryFormat(geoJsonParser); + } else { + throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates"); } - throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates"); } /** diff --git a/server/src/main/java/org/elasticsearch/common/geo/WKTGeometryFormat.java b/server/src/main/java/org/elasticsearch/common/geo/WKTGeometryFormat.java new file mode 100644 index 0000000000000..dc0671c51e1b8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/geo/WKTGeometryFormat.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.WellKnownText; + +import java.io.IOException; +import java.text.ParseException; + +public class WKTGeometryFormat implements GeometryFormat { + public static final String NAME = "wkt"; + + private final WellKnownText wellKnownTextParser; + + public WKTGeometryFormat(WellKnownText wellKnownTextParser) { + this.wellKnownTextParser = wellKnownTextParser; + } + + @Override + public String name() { + return NAME; + } + + @Override + public Geometry fromXContent(XContentParser parser) throws IOException, ParseException { + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + return wellKnownTextParser.fromWKT(parser.text()); + } + + @Override + public XContentBuilder toXContent(Geometry geometry, XContentBuilder builder, ToXContent.Params params) throws IOException { + if (geometry != null) { + return builder.value(wellKnownTextParser.toWKT(geometry)); + } else { + return builder.nullValue(); + } + } + + @Override + public String toXContentAsObject(Geometry geometry) { + return wellKnownTextParser.toWKT(geometry); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/network/InetAddresses.java b/server/src/main/java/org/elasticsearch/common/network/InetAddresses.java index 21c5b4e04b25b..444c291f36a83 100644 --- a/server/src/main/java/org/elasticsearch/common/network/InetAddresses.java +++ b/server/src/main/java/org/elasticsearch/common/network/InetAddresses.java @@ -384,4 +384,17 @@ public static Tuple parseCidr(String maskedAddress) { throw new IllegalArgumentException("Expected [ip/prefix] but was [" + maskedAddress + "]"); } } + + /** + * Given an address and prefix length, returns the string representation of the range in CIDR notation. + * + * See {@link #toAddrString} for details on how the address is represented. + */ + public static String toCidrString(InetAddress address, int prefixLength) { + return new StringBuilder() + .append(toAddrString(address)) + .append("/") + .append(prefixLength) + .toString(); + } } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index dfbb507365fa9..df5b419e527e9 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -97,6 +97,16 @@ private static void extractRawValues(List values, List part, String[] pa } } + /** + * For the provided path, return its value in the xContent map. + * + * Note that in contrast with {@link XContentMapValues#extractRawValues}, array and object values + * can be returned. + * + * @param path the value's path in the map. + * + * @return the value associated with the path in the map or 'null' if the path does not exist. + */ public static Object extractValue(String path, Map map) { return extractValue(map, path.split("\\.")); } @@ -105,19 +115,51 @@ public static Object extractValue(Map map, String... pathElements) { if (pathElements.length == 0) { return null; } - return extractValue(pathElements, 0, map); + return XContentMapValues.extractValue(pathElements, 0, map, null); } - @SuppressWarnings({"unchecked"}) - private static Object extractValue(String[] pathElements, int index, Object currentValue) { - if (index == pathElements.length) { - return currentValue; - } - if (currentValue == null) { + /** + * For the provided path, return its value in the xContent map. + * + * Note that in contrast with {@link XContentMapValues#extractRawValues}, array and object values + * can be returned. + * + * @param path the value's path in the map. + * @param nullValue a value to return if the path exists, but the value is 'null'. This helps + * in distinguishing between a path that doesn't exist vs. a value of 'null'. + * + * @return the value associated with the path in the map or 'null' if the path does not exist. + */ + public static Object extractValue(String path, Map map, Object nullValue) { + String[] pathElements = path.split("\\."); + if (pathElements.length == 0) { return null; } + return extractValue(pathElements, 0, map, nullValue); + } + + private static Object extractValue(String[] pathElements, + int index, + Object currentValue, + Object nullValue) { + if (currentValue instanceof List) { + List valueList = (List) currentValue; + List newList = new ArrayList<>(valueList.size()); + for (Object o : valueList) { + Object listValue = extractValue(pathElements, index, o, nullValue); + if (listValue != null) { + newList.add(listValue); + } + } + return newList; + } + + if (index == pathElements.length) { + return currentValue != null ? currentValue : nullValue; + } + if (currentValue instanceof Map) { - Map map = (Map) currentValue; + Map map = (Map) currentValue; String key = pathElements[index]; Object mapValue = map.get(key); int nextIndex = index + 1; @@ -126,18 +168,12 @@ private static Object extractValue(String[] pathElements, int index, Object curr mapValue = map.get(key); nextIndex++; } - return extractValue(pathElements, nextIndex, mapValue); - } - if (currentValue instanceof List) { - List valueList = (List) currentValue; - List newList = new ArrayList(valueList.size()); - for (Object o : valueList) { - Object listValue = extractValue(pathElements, index, o); - if (listValue != null) { - newList.add(listValue); - } + + if (map.containsKey(key) == false) { + return null; } - return newList; + + return extractValue(pathElements, nextIndex, mapValue, nullValue); } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index 06aeff823e099..24d0a11ba1729 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -27,19 +27,25 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.geo.GeoJsonGeometryFormat; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.support.MapXContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import java.io.IOException; +import java.io.UncheckedIOException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -76,10 +82,45 @@ public interface Indexer { } /** - * interface representing parser in geometry indexing pipeline + * Interface representing parser in geometry indexing pipeline. */ - public interface Parser { - Parsed parse(XContentParser parser, AbstractGeometryFieldMapper mapper) throws IOException, ParseException; + public abstract static class Parser { + /** + * Parse the given xContent value to an object of type {@link Parsed}. The value can be + * in any supported format. + */ + public abstract Parsed parse(XContentParser parser, AbstractGeometryFieldMapper mapper) throws IOException, ParseException; + + /** + * Given a parsed value and a format string, formats the value into a plain Java object. + * + * Supported formats include 'geojson' and 'wkt'. The different formats are defined + * as subclasses of {@link org.elasticsearch.common.geo.GeometryFormat}. + */ + public abstract Object format(Parsed value, String format); + + /** + * Parses the given value, then formats it according to the 'format' string. + * + * By default, this method simply parses the value using {@link Parser#parse}, then formats + * it with {@link Parser#format}. However some {@link Parser} implementations override this + * as they can avoid parsing the value if it is already in the right format. + */ + public Object parseAndFormatObject(Object value, AbstractGeometryFieldMapper mapper, String format) { + Parsed geometry; + try (XContentParser parser = new MapXContentParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, + Collections.singletonMap("dummy_field", value), XContentType.JSON)) { + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + geometry = parse(parser, mapper); + } catch (IOException e) { + throw new UncheckedIOException(e); + } catch (ParseException e) { + throw new RuntimeException(e); + } + return format(geometry, format); + } } public abstract static class Builder, FT extends AbstractGeometryFieldType> @@ -141,6 +182,17 @@ public Builder ignoreZValue(final boolean ignoreZValue) { } } + @Override + protected Object parseSourceValue(Object value, String format) { + if (format == null) { + format = GeoJsonGeometryFormat.NAME; + } + + AbstractGeometryFieldType mappedFieldType = fieldType(); + Parser geometryParser = mappedFieldType.geometryParser(); + return geometryParser.parseAndFormatObject(value, this, format); + } + public abstract static class TypeParser implements Mapper.TypeParser { protected abstract T newBuilder(String name, Map params); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java index f9c27ae2e01c8..5160d97faf21a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java @@ -23,9 +23,13 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeometryFormat; +import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; import java.io.IOException; import java.text.ParseException; @@ -158,6 +162,7 @@ public interface ParsedPoint { void validate(String fieldName); void normalize(String fieldName); void resetCoords(double x, double y); + Point asGeometry(); default boolean isNormalizable(double coord) { return Double.isNaN(coord) == false && Double.isInfinite(coord) == false; } @@ -178,7 +183,15 @@ protected void parsePointIgnoringMalformed(XContentParser parser, ParsedPoint po } /** A parser implementation that can parse the various point formats */ - public static class PointParser

implements Parser> { + public static class PointParser

extends Parser> { + /** + * Note that this parser is only used for formatting values. + */ + private final GeometryParser geometryParser; + + public PointParser() { + this.geometryParser = new GeometryParser(true, true, true); + } @Override public List

parse(XContentParser parser, AbstractGeometryFieldMapper geometryMapper) throws IOException, ParseException { @@ -238,5 +251,16 @@ public List

parse(XContentParser parser, AbstractGeometryFieldMapper geometry return points; } } + + @Override + public Object format(List

points, String format) { + List result = new ArrayList<>(); + GeometryFormat geometryFormat = geometryParser.geometryFormat(format); + for (ParsedPoint point : points) { + Geometry geometry = point.asGeometry(); + result.add(geometryFormat.toXContentAsObject(geometry)); + } + return result; + } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index 12b62d0de7e24..fd055b495253a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -187,7 +187,14 @@ protected void parseCreateField(ParseContext context) throws IOException { // no doc values createFieldNamesField(context); } + } + @Override + protected Object parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 62a0a433b81a2..1ea216fd5234f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -248,6 +249,20 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + public Boolean parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + if (value instanceof Boolean) { + return (Boolean) value; + } else { + String textValue = value.toString(); + return Booleans.parseBoolean(textValue.toCharArray(), 0, textValue.length(), false); + } + } + @Override public ParametrizedFieldMapper.Builder getMergeBuilder() { return new Builder(simpleName()).init(this); @@ -258,4 +273,8 @@ protected String contentType() { return CONTENT_TYPE; } + @Override + protected Object nullValue() { + return nullValue; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 63444041f2b86..2e57d68274e88 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -531,6 +531,19 @@ private void parse(ParseContext parseContext, Token token, } } + @Override + protected List parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + if (value instanceof List) { + return (List) value; + } else { + return List.of(value); + } + } + static class CompletionInputMetadata { public final String input; public final Map> contexts; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 6f56b28e93759..fa4f11d9a0c45 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -53,6 +53,7 @@ import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -268,6 +269,7 @@ protected DateMathParser dateMathParser() { return dateMathParser; } + // Visible for testing. public long parse(String value) { return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant()); } @@ -495,6 +497,11 @@ protected DateFieldMapper clone() { return (DateFieldMapper) super.clone(); } + @Override + protected String nullValue() { + return nullValueAsString; + } + @Override protected void parseCreateField(ParseContext context) throws IOException { String dateAsString; @@ -541,6 +548,18 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + public String parseSourceValue(Object value, String format) { + String date = value.toString(); + long timestamp = fieldType().parse(date); + + ZonedDateTime dateTime = fieldType().resolution().toInstant(timestamp).atZone(ZoneOffset.UTC); + DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter(); + if (format != null) { + dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale()); + } + return dateTimeFormatter.format(dateTime); + } public boolean getIgnoreMalformed() { return ignoreMalformed; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 30f38ce16d8a3..7581833648603 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -24,6 +24,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -33,6 +34,7 @@ import org.elasticsearch.common.xcontent.support.AbstractXContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.FieldNamesFieldMapper.FieldNamesFieldType; +import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.ArrayList; @@ -222,6 +224,13 @@ public CopyTo copyTo() { return copyTo; } + /** + * A value to use in place of a {@code null} value in the document source. + */ + protected Object nullValue() { + return null; + } + /** * Whether this mapper can handle an array value during document parsing. If true, * when an array is encountered during parsing, the document parser will pass the @@ -269,6 +278,52 @@ public void parse(ParseContext context) throws IOException { */ protected abstract void parseCreateField(ParseContext context) throws IOException; + /** + * Given access to a document's _source, return this field's values. + * + * In addition to pulling out the values, mappers can parse them into a standard form. This + * method delegates parsing to {@link #parseSourceValue} for parsing. Most mappers will choose + * to override {@link #parseSourceValue} -- for example numeric field mappers make sure to + * parse the source value into a number of the right type. Some mappers may need more + * flexibility and can override this entire method instead. + * + * Note that for array values, the order in which values are returned is undefined and should + * not be relied on. + * + * @param lookup a lookup structure over the document's source. + * @param format an optional format string used when formatting values, for example a date format. + * @return a list a standardized field values. + */ + public List lookupValues(SourceLookup lookup, @Nullable String format) { + Object sourceValue = lookup.extractValue(name(), nullValue()); + if (sourceValue == null) { + return List.of(); + } + + List values = new ArrayList<>(); + if (parsesArrayValue()) { + return (List) parseSourceValue(sourceValue, format); + } else { + List sourceValues = sourceValue instanceof List ? (List) sourceValue : List.of(sourceValue); + for (Object value : sourceValues) { + Object parsedValue = parseSourceValue(value, format); + if (parsedValue != null) { + values.add(parsedValue); + } + } + } + return values; + } + + /** + * Given a value that has been extracted from a document's source, parse it into a standard + * format. This parsing logic should closely mirror the value parsing in + * {@link #parseCreateField} or {@link #parse}. + * + * Note that when overriding this method, {@link #lookupValues} should *not* be overridden. + */ + protected abstract Object parseSourceValue(Object value, @Nullable String format); + protected void createFieldNamesField(ParseContext context) { FieldNamesFieldType fieldNamesFieldType = context.docMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType(); if (fieldNamesFieldType != null && fieldNamesFieldType.isEnabled()) { @@ -292,6 +347,7 @@ protected FieldMapper clone() { } } + @Override public FieldMapper merge(Mapper mergeWith) { FieldMapper merged = clone(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index f4529b4643f2b..ad3fc75778980 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -37,6 +37,15 @@ class FieldTypeLookup implements Iterable { private final Map fullNameToFieldType = new HashMap<>(); private final Map aliasToConcreteName = new HashMap<>(); + + /** + * A map from field name to all fields whose content has been copied into it + * through copy_to. A field only be present in the map if some other field + * has listed it as a target of copy_to. + * + * For convenience, the set of copied fields includes the field itself. + */ + private final Map> fieldToCopiedFields = new HashMap<>(); private final DynamicKeyFieldTypeLookup dynamicKeyLookup; FieldTypeLookup() { @@ -45,7 +54,6 @@ class FieldTypeLookup implements Iterable { FieldTypeLookup(Collection fieldMappers, Collection fieldAliasMappers) { - Map dynamicKeyMappers = new HashMap<>(); for (FieldMapper fieldMapper : fieldMappers) { @@ -55,6 +63,17 @@ class FieldTypeLookup implements Iterable { if (fieldMapper instanceof DynamicKeyFieldMapper) { dynamicKeyMappers.put(fieldName, (DynamicKeyFieldMapper) fieldMapper); } + + for (String targetField : fieldMapper.copyTo().copyToFields()) { + Set sourcePath = fieldToCopiedFields.get(targetField); + if (sourcePath == null) { + fieldToCopiedFields.put(targetField, Set.of(targetField, fieldName)); + } else if (sourcePath.contains(fieldName) == false) { + Set newSourcePath = new HashSet<>(sourcePath); + newSourcePath.add(fieldName); + fieldToCopiedFields.put(targetField, Collections.unmodifiableSet(newSourcePath)); + } + } } for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) { @@ -99,6 +118,31 @@ public Set simpleMatchToFullName(String pattern) { return fields; } + /** + * Given a field, returns its possible paths in the _source. + * + * For most fields, the source path is the same as the field itself. However + * there are some exceptions: + * - The 'source path' for a field alias is its target field. + * - For a multi-field, the source path is the parent field. + * - One field's content could have been copied to another through copy_to. + */ + public Set sourcePaths(String field) { + String resolvedField = aliasToConcreteName.getOrDefault(field, field); + + int lastDotIndex = resolvedField.lastIndexOf('.'); + if (lastDotIndex > 0) { + String parentField = resolvedField.substring(0, lastDotIndex); + if (fullNameToFieldType.containsKey(parentField)) { + resolvedField = parentField; + } + } + + return fieldToCopiedFields.containsKey(resolvedField) + ? fieldToCopiedFields.get(resolvedField) + : Set.of(resolvedField); + } + @Override public Iterator iterator() { Iterator concreteFieldTypes = fullNameToFieldType.values().iterator(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index 00e90ddd1081b..1ac9760bb90c3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -29,10 +29,11 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.geometry.Point; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointIndexFieldData; -import org.elasticsearch.index.query.VectorGeoPointShapeQueryProcessor; import org.elasticsearch.index.mapper.GeoPointFieldMapper.ParsedGeoPoint; +import org.elasticsearch.index.query.VectorGeoPointShapeQueryProcessor; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import java.io.IOException; @@ -49,6 +50,7 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper, List> { public static final String CONTENT_TYPE = "geo_point"; public static final FieldType FIELD_TYPE = new FieldType(); + static { FIELD_TYPE.setStored(false); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); @@ -218,6 +220,10 @@ public void resetCoords(double x, double y) { this.reset(y, x); } + public Point asGeometry() { + return new Point(lon(), lat()); + } + @Override public boolean equals(Object other) { double oLat; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index 6ad1de5b868cf..bebf07fa10e70 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -71,7 +71,7 @@ private GeoShapeFieldType buildFieldType(BuilderContext context) { GeoShapeFieldType ft = new GeoShapeFieldType(buildFullName(context), indexed, hasDocValues, meta); GeometryParser geometryParser = new GeometryParser(ft.orientation.getAsBoolean(), coerce().value(), ignoreZValue().value()); - ft.setGeometryParser((parser, mapper) -> geometryParser.parse(parser)); + ft.setGeometryParser(new GeoShapeParser(geometryParser)); ft.setGeometryIndexer(new GeoShapeIndexer(orientation().value().getAsBoolean(), buildFullName(context))); ft.setGeometryQueryBuilder(new VectorGeoShapeQueryProcessor()); ft.setOrientation(orientation == null ? Defaults.ORIENTATION.value() : orientation); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeParser.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeParser.java new file mode 100644 index 0000000000000..9a82646b95dd6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeParser.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.geo.GeometryFormat; +import org.elasticsearch.common.geo.GeometryParser; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.support.MapXContentParser; +import org.elasticsearch.geometry.Geometry; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.text.ParseException; +import java.util.Collections; + +public class GeoShapeParser extends AbstractGeometryFieldMapper.Parser { + private final GeometryParser geometryParser; + + public GeoShapeParser(GeometryParser geometryParser) { + this.geometryParser = geometryParser; + } + + @Override + public Geometry parse(XContentParser parser, AbstractGeometryFieldMapper mapper) throws IOException, ParseException { + return geometryParser.parse(parser); + } + + @Override + public Object format(Geometry value, String format) { + return geometryParser.geometryFormat(format).toXContentAsObject(value); + } + + @Override + public Object parseAndFormatObject(Object value, AbstractGeometryFieldMapper mapper, String format) { + try (XContentParser parser = new MapXContentParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, + Collections.singletonMap("dummy_field", value), XContentType.JSON)) { + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + + GeometryFormat geometryFormat = geometryParser.geometryFormat(parser); + if (geometryFormat.name().equals(format)) { + return value; + } + + Geometry geometry = geometryFormat.fromXContent(parser); + return format(geometry, format); + } catch (IOException e) { + throw new UncheckedIOException(e); + } catch (ParseException e) { + throw new RuntimeException(e); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 2824eb83a6a9f..78e93db5893a7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -348,6 +348,11 @@ protected String contentType() { return fieldType().typeName(); } + @Override + protected Object nullValue() { + return nullValue; + } + @Override protected IpFieldMapper clone() { return (IpFieldMapper) super.clone(); @@ -400,6 +405,21 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + InetAddress address; + if (value instanceof InetAddress) { + address = (InetAddress) value; + } else { + address = InetAddresses.forString(value.toString()); + } + return InetAddresses.toAddrString(address); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { IpFieldMapper mergeWith = (IpFieldMapper) other; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index c1f4983435098..2decc627fc46c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -47,6 +47,7 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -361,25 +362,9 @@ protected void parseCreateField(ParseContext context) throws IOException { return; } - final NamedAnalyzer normalizer = fieldType().normalizer(); + NamedAnalyzer normalizer = fieldType().normalizer(); if (normalizer != null) { - try (TokenStream ts = normalizer.tokenStream(name(), value)) { - final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); - ts.reset(); - if (ts.incrementToken() == false) { - throw new IllegalStateException("The normalization token stream is " - + "expected to produce exactly 1 token, but got 0 for analyzer " - + normalizer + " and input \"" + value + "\""); - } - final String newValue = termAtt.toString(); - if (ts.incrementToken()) { - throw new IllegalStateException("The normalization token stream is " - + "expected to produce exactly 1 token, but got 2+ for analyzer " - + normalizer + " and input \"" + value + "\""); - } - ts.end(); - value = newValue; - } + value = normalizeValue(normalizer, value); } // convert to utf8 only once before feeding postings/dv/stored fields @@ -397,11 +382,60 @@ protected void parseCreateField(ParseContext context) throws IOException { context.doc().add(new SortedSetDocValuesField(fieldType().name(), binaryValue)); } } + + private String normalizeValue(NamedAnalyzer normalizer, String value) throws IOException { + try (TokenStream ts = normalizer.tokenStream(name(), value)) { + final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); + ts.reset(); + if (ts.incrementToken() == false) { + throw new IllegalStateException("The normalization token stream is " + + "expected to produce exactly 1 token, but got 0 for analyzer " + + normalizer + " and input \"" + value + "\""); + } + final String newValue = termAtt.toString(); + if (ts.incrementToken()) { + throw new IllegalStateException("The normalization token stream is " + + "expected to produce exactly 1 token, but got 2+ for analyzer " + + normalizer + " and input \"" + value + "\""); + } + ts.end(); + return newValue; + } + } + + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + String keywordValue = value.toString(); + if (keywordValue.length() > ignoreAbove) { + return null; + } + + NamedAnalyzer normalizer = fieldType().normalizer(); + if (normalizer == null) { + return keywordValue; + } + + try { + return normalizeValue(normalizer, keywordValue); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + @Override protected String contentType() { return CONTENT_TYPE; } + @Override + protected String nullValue() { + return nullValue; + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { KeywordFieldMapper k = (KeywordFieldMapper) other; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java index 5ee6f12232f27..564302a973240 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; @@ -42,11 +43,14 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.query.LegacyGeoShapeQueryProcessor; import org.locationtech.spatial4j.shape.Shape; import java.io.IOException; +import java.text.ParseException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -255,7 +259,7 @@ private GeoShapeFieldType buildFieldType(BuilderContext context) { setupFieldTypeDeprecatedParameters(ft); setupPrefixTrees(ft); ft.setGeometryIndexer(new LegacyGeoShapeIndexer(ft)); - ft.setGeometryParser(ShapeParser::parse); + ft.setGeometryParser(new LegacyGeoShapeParser()); ft.setGeometryQueryBuilder(new LegacyGeoShapeQueryProcessor(ft)); ft.setOrientation(orientation == null ? Defaults.ORIENTATION.value() : orientation); return ft; @@ -277,6 +281,28 @@ public LegacyGeoShapeFieldMapper build(BuilderContext context) { } } + private static class LegacyGeoShapeParser extends Parser> { + /** + * Note that this parser is only used for formatting values. + */ + private final GeometryParser geometryParser; + + private LegacyGeoShapeParser() { + this.geometryParser = new GeometryParser(true, true, true); + } + + @Override + public ShapeBuilder parse(XContentParser parser, AbstractGeometryFieldMapper mapper) throws IOException, ParseException { + return ShapeParser.parse(parser); + } + + @Override + public Object format(ShapeBuilder value, String format) { + Geometry geometry = value.buildGeometry(); + return geometryParser.geometryFormat(format).toXContentAsObject(geometry); + } + } + public static final class GeoShapeFieldType extends AbstractShapeGeometryFieldType, Shape> { private String tree = DeprecatedParameters.Defaults.TREE; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 8fec86e652a34..2d7529495b7ef 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -545,6 +545,14 @@ public Set simpleMatchToFullName(String pattern) { return fieldTypes.simpleMatchToFullName(pattern); } + /** + * Given a field name, returns its possible paths in the _source. For example, + * the 'source path' for a multi-field is the path to its parent field. + */ + public Set sourcePath(String fullName) { + return fieldTypes.sourcePaths(fullName); + } + /** * Returns all mapped field types. */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 05b18099e7b1e..d77826a505d69 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -86,6 +86,11 @@ public void postParse(ParseContext context) throws IOException { // do nothing } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException("The " + typeName() + " field is not stored in _source."); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index e5f1f3fdf9b41..232a13c69f015 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -1036,6 +1036,11 @@ protected NumberFieldMapper clone() { return (NumberFieldMapper) super.clone(); } + @Override + protected Number nullValue() { + return nullValue; + } + @Override protected void parseCreateField(ParseContext context) throws IOException { XContentParser parser = context.parser(); @@ -1085,6 +1090,19 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + protected Number parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + if (value.equals("")) { + return nullValue; + } + + return fieldType().type.parse(value, coerce.value()); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { NumberFieldMapper m = (NumberFieldMapper) other; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index 96e3e72fa73a6..0a3e1f648e1df 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -52,6 +52,7 @@ import java.time.ZoneId; import java.time.ZoneOffset; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -373,6 +374,31 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + @SuppressWarnings("unchecked") + protected Object parseSourceValue(Object value, String format) { + RangeType rangeType = fieldType().rangeType(); + if (!(value instanceof Map)) { + assert rangeType == RangeType.IP; + Tuple ipRange = InetAddresses.parseCidr(value.toString()); + return InetAddresses.toCidrString(ipRange.v1(), ipRange.v2()); + } + + DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter(); + if (format != null) { + dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale()); + } + + Map range = (Map) value; + Map parsedRange = new HashMap<>(); + for (Map.Entry entry : range.entrySet()) { + Object parsedValue = rangeType.parseValue(entry.getValue(), coerce.value(), fieldType().dateMathParser); + Object formattedValue = rangeType.formatValue(parsedValue, dateTimeFormatter); + parsedRange.put(entry.getKey(), formattedValue); + } + return parsedRange; + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { RangeFieldMapper mergeWith = (RangeFieldMapper) other; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeType.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeType.java index 86d79e3171573..f293d4ded6c61 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeType.java @@ -36,14 +36,17 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.net.InetAddress; +import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -69,8 +72,9 @@ public InetAddress parseTo(RangeFieldMapper.RangeFieldType fieldType, XContentPa InetAddress address = InetAddresses.forString(parser.text()); return included ? address : nextDown(address); } + @Override - public InetAddress parse(Object value, boolean coerce) { + public InetAddress parseValue(Object value, boolean coerce, @Nullable DateMathParser dateMathParser) { if (value instanceof InetAddress) { return (InetAddress) value; } else { @@ -80,6 +84,12 @@ public InetAddress parse(Object value, boolean coerce) { return InetAddresses.forString(value.toString()); } } + + @Override + public Object formatValue(Object value, DateFormatter dateFormatter) { + return InetAddresses.toAddrString((InetAddress) value); + } + @Override public InetAddress minValue() { return InetAddressPoint.MIN_VALUE; @@ -170,22 +180,34 @@ private Query createQuery(String field, Object lower, Object upper, boolean incl public Field getRangeField(String name, RangeFieldMapper.Range r) { return new LongRange(name, new long[] {((Number)r.from).longValue()}, new long[] {((Number)r.to).longValue()}); } - private Number parse(DateMathParser dateMathParser, String dateStr) { - return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");}) - .toEpochMilli(); - } @Override public Number parseFrom(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) throws IOException { - Number value = parse(fieldType.dateMathParser, parser.text()); + Number value = parseValue(parser.text(), coerce, fieldType.dateMathParser); return included ? value : nextUp(value); } @Override public Number parseTo(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) throws IOException{ - Number value = parse(fieldType.dateMathParser, parser.text()); + Number value = parseValue(parser.text(), coerce, fieldType.dateMathParser); return included ? value : nextDown(value); } + + @Override + public Long parseValue(Object dateStr, boolean coerce, @Nullable DateMathParser dateMathParser) { + assert dateMathParser != null; + return dateMathParser.parse(dateStr.toString(), () -> { + throw new IllegalArgumentException("now is not used at indexing time"); + }).toEpochMilli(); + } + + @Override + public Object formatValue(Object value, DateFormatter dateFormatter) { + long timestamp = (long) value; + ZonedDateTime dateTime = Instant.ofEpochMilli(timestamp).atZone(ZoneOffset.UTC); + return dateFormatter.format(dateTime); + } + @Override public Long minValue() { return Long.MIN_VALUE; @@ -243,6 +265,7 @@ public Query rangeQuery(String field, boolean hasDocValues, Object lowerTerm, Ob return createRangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation); } + @Override public Query withinQuery(String field, Object from, Object to, boolean includeLower, boolean includeUpper) { return LONG.withinQuery(field, from, to, includeLower, includeUpper); @@ -598,6 +621,15 @@ public List createFields(ParseContext context, String name, Rang } return fields; } + + public Object parseValue(Object value, boolean coerce, @Nullable DateMathParser dateMathParser) { + return numberType.parse(value, coerce); + } + + public Object formatValue(Object value, DateFormatter formatter) { + return value; + } + /** parses from value. rounds according to included flag */ public Object parseFrom(RangeFieldMapper.RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) throws IOException { @@ -618,15 +650,12 @@ public Object parseTo(RangeFieldMapper.RangeFieldType fieldType, XContentParser public abstract Query withinQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo); public abstract Query containsQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo); public abstract Query intersectsQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo); - public Object parse(Object value, boolean coerce) { - return numberType.parse(value, coerce); - } public Query rangeQuery(String field, boolean hasDocValues, Object from, Object to, boolean includeFrom, boolean includeTo, ShapeRelation relation, @Nullable ZoneId timeZone, @Nullable DateMathParser dateMathParser, QueryShardContext context) { - Object lower = from == null ? minValue() : parse(from, false); - Object upper = to == null ? maxValue() : parse(to, false); + Object lower = from == null ? minValue() : parseValue(from, false, dateMathParser); + Object upper = to == null ? maxValue() : parseValue(to, false, dateMathParser); return createRangeQuery(field, hasDocValues, lower, upper, includeFrom, includeTo, relation); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 312b6b2e6ba6e..bada725c33b40 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -496,6 +496,11 @@ protected void parseCreateField(ParseContext context) throws IOException { throw new UnsupportedOperationException(); } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException(); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { @@ -522,6 +527,11 @@ protected void parseCreateField(ParseContext context) { throw new UnsupportedOperationException(); } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException(); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { @@ -824,6 +834,14 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value.toString(); + } + @Override public Iterator iterator() { List subIterators = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 4c189c92f7c74..31c30aac86655 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -33,8 +33,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 1ff399860a0f0..357313a355fe5 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -56,6 +56,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; @@ -111,6 +112,7 @@ final class DefaultSearchContext extends SearchContext { private ScriptFieldsContext scriptFields; private FetchSourceContext fetchSourceContext; private FetchDocValuesContext docValuesContext; + private FetchFieldsContext fetchFieldsContext; private int from = -1; private int size = -1; private SortAndFormats sort; @@ -454,6 +456,17 @@ public SearchContext docValuesContext(FetchDocValuesContext docValuesContext) { return this; } + @Override + public FetchFieldsContext fetchFieldsContext() { + return fetchFieldsContext; + } + + @Override + public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) { + this.fetchFieldsContext = fetchFieldsContext; + return this; + } + @Override public ContextIndexSearcher searcher() { return this.searcher; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index ff5a01254fa4c..b3c2a7c729e00 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -220,6 +220,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.ExplainPhase; import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase; +import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase; import org.elasticsearch.search.fetch.subphase.FetchScorePhase; import org.elasticsearch.search.fetch.subphase.FetchSourcePhase; import org.elasticsearch.search.fetch.subphase.FetchVersionPhase; @@ -731,6 +732,7 @@ private void registerFetchSubPhases(List plugins) { registerFetchSubPhase(new FetchDocValuesPhase()); registerFetchSubPhase(new ScriptFieldsPhase()); registerFetchSubPhase(new FetchSourcePhase()); + registerFetchSubPhase(new FetchFieldsPhase()); registerFetchSubPhase(new FetchVersionPhase()); registerFetchSubPhase(new SeqNoPrimaryTermPhase()); registerFetchSubPhase(new MatchedQueriesPhase()); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index c7f537579a8a5..412c75b3100c1 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -86,6 +86,7 @@ import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.AliasFilter; @@ -918,6 +919,12 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(context.mapperService(), source.docValueFields()); context.docValuesContext(docValuesContext); } + if (source.fetchFields() != null) { + String indexName = context.indexShard().shardId().getIndexName(); + FetchFieldsContext fetchFieldsContext = FetchFieldsContext.create( + indexName, context.mapperService(), source.fetchFields()); + context.fetchFieldsContext(fetchFieldsContext); + } if (source.highlighter() != null) { HighlightBuilder highlightBuilder = source.highlighter(); try { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index d10b226c6293b..9ca3cb639478c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -38,8 +38,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java index a0a480fbc55c3..ffee18e81cc24 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java @@ -26,8 +26,8 @@ import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 9bf4abe0b67e2..98066ad732379 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.builder; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -45,8 +46,8 @@ import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.fetch.StoredFieldsContext; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.rescore.RescorerBuilder; @@ -89,6 +90,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R public static final ParseField _SOURCE_FIELD = new ParseField("_source"); public static final ParseField STORED_FIELDS_FIELD = new ParseField("stored_fields"); public static final ParseField DOCVALUE_FIELDS_FIELD = new ParseField("docvalue_fields"); + public static final ParseField FETCH_FIELDS_FIELD = new ParseField("fields"); public static final ParseField SCRIPT_FIELDS_FIELD = new ParseField("script_fields"); public static final ParseField SCRIPT_FIELD = new ParseField("script"); public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure"); @@ -165,6 +167,7 @@ public static HighlightBuilder highlight() { private List docValueFields; private List scriptFields; private FetchSourceContext fetchSourceContext; + private List fetchFields; private AggregatorFactories.Builder aggregations; @@ -239,6 +242,12 @@ public SearchSourceBuilder(StreamInput in) throws IOException { sliceBuilder = in.readOptionalWriteable(SliceBuilder::new); collapse = in.readOptionalWriteable(CollapseBuilder::new); trackTotalHitsUpTo = in.readOptionalInt(); + + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + if (in.readBoolean()) { + fetchFields = in.readList(FieldAndFormat::new); + } + } } @Override @@ -293,6 +302,13 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(sliceBuilder); out.writeOptionalWriteable(collapse); out.writeOptionalInt(trackTotalHitsUpTo); + + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeBoolean(fetchFields != null); + if (fetchFields != null) { + out.writeList(fetchFields); + } + } } /** @@ -821,6 +837,33 @@ public SearchSourceBuilder docValueField(String name) { return docValueField(name, null); } + /** + * Gets the fields to load and return as part of the search request. + */ + public List fetchFields() { + return fetchFields; + } + + /** + * Adds a field to load and return as part of the search request. + */ + public SearchSourceBuilder fetchField(String name) { + return fetchField(name, null); + } + + /** + * Adds a field to load and return as part of the search request. + * @param name the field name. + * @param format an optional format string used when formatting values, for example a date format. + */ + public SearchSourceBuilder fetchField(String name, @Nullable String format) { + if (fetchFields == null) { + fetchFields = new ArrayList<>(); + } + fetchFields.add(new FieldAndFormat(name, format)); + return this; + } + /** * Adds a script field under the given name with the provided script. * @@ -1116,6 +1159,11 @@ public void parseXContent(XContentParser parser, boolean checkTrailingTokens) th while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { docValueFields.add(FieldAndFormat.fromXContent(parser)); } + } else if (FETCH_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + fetchFields = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + fetchFields.add(FieldAndFormat.fromXContent(parser)); + } } else if (INDICES_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { indexBoosts.add(new IndexBoost(parser)); @@ -1213,12 +1261,15 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t if (docValueFields != null) { builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName()); for (FieldAndFormat docValueField : docValueFields) { - builder.startObject() - .field("field", docValueField.field); - if (docValueField.format != null) { - builder.field("format", docValueField.format); - } - builder.endObject(); + docValueField.toXContent(builder, params); + } + builder.endArray(); + } + + if (fetchFields != null) { + builder.startArray(FETCH_FIELDS_FIELD.getPreferredName()); + for (FieldAndFormat docValueField : fetchFields) { + docValueField.toXContent(builder, params); } builder.endArray(); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 3ebde562919f1..54ec113cf61c4 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -102,7 +102,8 @@ public void execute(SearchContext context) { if (!context.hasScriptFields() && !context.hasFetchSourceContext()) { context.fetchSourceContext(new FetchSourceContext(true)); } - fieldsVisitor = new FieldsVisitor(context.sourceRequested()); + boolean loadSource = context.sourceRequested() || context.fetchFieldsContext() != null; + fieldsVisitor = new FieldsVisitor(loadSource); } else if (storedFieldsContext.fetchFields() == false) { // disable stored fields entirely fieldsVisitor = null; @@ -131,7 +132,7 @@ public void execute(SearchContext context) { } } } - boolean loadSource = context.sourceRequested(); + boolean loadSource = context.sourceRequested() || context.fetchFieldsContext() != null; if (storedToRequestedFields.isEmpty()) { // empty list specified, default to disable _source if no explicit indication fieldsVisitor = new FieldsVisitor(loadSource); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java index c4449d0137779..3ab3003a27f6b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java @@ -18,96 +18,18 @@ */ package org.elasticsearch.search.fetch.subphase; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; -import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.Objects; /** * All the required context to pull a field from the doc values. */ public class FetchDocValuesContext { - /** - * Wrapper around a field name and the format that should be used to - * display values of this field. - */ - public static final class FieldAndFormat implements Writeable { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("docvalues_field", - a -> new FieldAndFormat((String) a[0], (String) a[1])); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("field")); - PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("format")); - } - - /** - * Parse a {@link FieldAndFormat} from some {@link XContent}. - */ - public static FieldAndFormat fromXContent(XContentParser parser) throws IOException { - Token token = parser.currentToken(); - if (token.isValue()) { - return new FieldAndFormat(parser.text(), null); - } else { - return PARSER.apply(parser, null); - } - } - - /** The name of the field. */ - public final String field; - - /** The format of the field, or {@code null} if defaults should be used. */ - public final String format; - - /** Sole constructor. */ - public FieldAndFormat(String field, @Nullable String format) { - this.field = Objects.requireNonNull(field); - this.format = format; - } - - /** Serialization constructor. */ - public FieldAndFormat(StreamInput in) throws IOException { - this.field = in.readString(); - format = in.readOptionalString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(field); - out.writeOptionalString(format); - } - - @Override - public int hashCode() { - int h = field.hashCode(); - h = 31 * h + Objects.hashCode(format); - return h; - } - - @Override - public boolean equals(Object obj) { - if (obj == null || getClass() != obj.getClass()) { - return false; - } - FieldAndFormat other = (FieldAndFormat) obj; - return field.equals(other.field) && Objects.equals(format, other.format); - } - - } - private final List fields; public static FetchDocValuesContext create(MapperService mapperService, diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index 81838e447d280..576ae1d112582 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -33,7 +33,6 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java new file mode 100644 index 0000000000000..79b33003da788 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.fetch.subphase; + +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MapperService; + +import java.util.List; + +/** + * The context needed to retrieve fields. + */ +public class FetchFieldsContext { + + private FieldValueRetriever fieldValueRetriever; + + public static FetchFieldsContext create(String indexName, + MapperService mapperService, + List fields) { + DocumentMapper documentMapper = mapperService.documentMapper(); + if (documentMapper.sourceMapper().enabled() == false) { + throw new IllegalArgumentException("Unable to retrieve the requested [fields] since _source is " + + "disabled in the mappings for index [" + indexName + "]"); + } + + FieldValueRetriever fieldValueRetriever = FieldValueRetriever.create(mapperService, fields); + return new FetchFieldsContext(fieldValueRetriever); + } + + private FetchFieldsContext(FieldValueRetriever fieldValueRetriever) { + this.fieldValueRetriever = fieldValueRetriever; + } + + public FieldValueRetriever fieldValueRetriever() { + return fieldValueRetriever; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java new file mode 100644 index 0000000000000..e14cf24726dac --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.subphase; + +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.lookup.SourceLookup; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * A fetch sub-phase for high-level field retrieval. Given a list of fields, it + * retrieves the field values from _source and returns them as document fields. + */ +public final class FetchFieldsPhase implements FetchSubPhase { + + @Override + public void hitExecute(SearchContext context, HitContext hitContext) { + FetchFieldsContext fetchFieldsContext = context.fetchFieldsContext(); + if (fetchFieldsContext == null) { + return; + } + + SearchHit hit = hitContext.hit(); + SourceLookup sourceLookup = context.lookup().source(); + FieldValueRetriever fieldValueRetriever = fetchFieldsContext.fieldValueRetriever(); + + Set ignoredFields = getIgnoredFields(hit); + Map documentFields = fieldValueRetriever.retrieve(sourceLookup, ignoredFields); + for (Map.Entry entry : documentFields.entrySet()) { + hit.setDocumentField(entry.getKey(), entry.getValue()); + } + } + + private Set getIgnoredFields(SearchHit hit) { + DocumentField field = hit.field(IgnoredFieldMapper.NAME); + if (field == null) { + return Set.of(); + } + + Set ignoredFields = new HashSet<>(); + for (Object value : field.getValues()) { + ignoredFields.add((String) value); + } + return ignoredFields; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java new file mode 100644 index 0000000000000..cf4edd13f5cd8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.subphase; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +/** + * Wrapper around a field name and the format that should be used to + * display values of this field. + */ +public final class FieldAndFormat implements Writeable, ToXContentObject { + private static final ParseField FIELD_FIELD = new ParseField("field"); + private static final ParseField FORMAT_FIELD = new ParseField("format"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("fetch_field_and_format", + a -> new FieldAndFormat((String) a[0], (String) a[1])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD_FIELD); + PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FORMAT_FIELD); + } + + /** + * Parse a {@link FieldAndFormat} from some {@link XContent}. + */ + public static FieldAndFormat fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + if (token.isValue()) { + return new FieldAndFormat(parser.text(), null); + } else { + return PARSER.apply(parser, null); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FIELD_FIELD.getPreferredName(), field); + if (format != null) { + builder.field(FORMAT_FIELD.getPreferredName(), format); + } + builder.endObject(); + return builder; + } + + /** The name of the field. */ + public final String field; + + /** The format of the field, or {@code null} if defaults should be used. */ + public final String format; + + /** Sole constructor. */ + public FieldAndFormat(String field, @Nullable String format) { + this.field = Objects.requireNonNull(field); + this.format = format; + } + + /** Serialization constructor. */ + public FieldAndFormat(StreamInput in) throws IOException { + this.field = in.readString(); + format = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeOptionalString(format); + } + + @Override + public int hashCode() { + int h = field.hashCode(); + h = 31 * h + Objects.hashCode(format); + return h; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + FieldAndFormat other = (FieldAndFormat) obj; + return field.equals(other.field) && Objects.equals(format, other.format); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldValueRetriever.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldValueRetriever.java new file mode 100644 index 0000000000000..45284c67ec6e7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldValueRetriever.java @@ -0,0 +1,107 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.subphase; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.index.mapper.DocumentFieldMappers; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.search.lookup.SourceLookup; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * A helper class to {@link FetchFieldsPhase} that's initialized with a list of field patterns to fetch. + * Then given a specific document, it can retrieve the corresponding fields from the document's source. + */ +public class FieldValueRetriever { + private final DocumentFieldMappers fieldMappers; + private final List fieldContexts; + + public static FieldValueRetriever create(MapperService mapperService, + Collection fieldAndFormats) { + DocumentFieldMappers fieldMappers = mapperService.documentMapper().mappers(); + List fields = new ArrayList<>(); + + for (FieldAndFormat fieldAndFormat : fieldAndFormats) { + String fieldPattern = fieldAndFormat.field; + String format = fieldAndFormat.format; + + Collection concreteFields = mapperService.simpleMatchToFullName(fieldPattern); + for (String field : concreteFields) { + if (fieldMappers.getMapper(field) != null && mapperService.isMetadataField(field) == false) { + Set sourcePath = mapperService.sourcePath(field); + fields.add(new FieldContext(field, sourcePath, format)); + } + } + } + + return new FieldValueRetriever(fieldMappers, fields); + } + + + private FieldValueRetriever(DocumentFieldMappers fieldMappers, + List fieldContexts) { + this.fieldMappers = fieldMappers; + this.fieldContexts = fieldContexts; + } + + public Map retrieve(SourceLookup sourceLookup, Set ignoredFields) { + Map documentFields = new HashMap<>(); + for (FieldContext context : fieldContexts) { + String field = context.fieldName; + if (ignoredFields.contains(field)) { + continue; + } + + List parsedValues = new ArrayList<>(); + for (String path : context.sourcePath) { + FieldMapper fieldMapper = (FieldMapper) fieldMappers.getMapper(path); + List values = fieldMapper.lookupValues(sourceLookup, context.format); + parsedValues.addAll(values); + } + + if (parsedValues.isEmpty() == false) { + documentFields.put(field, new DocumentField(field, parsedValues)); + } + } + return documentFields; + } + + private static class FieldContext { + final String fieldName; + final Set sourcePath; + final @Nullable String format; + + FieldContext(String fieldName, + Set sourcePath, + @Nullable String format) { + this.fieldName = fieldName; + this.sourcePath = sourcePath; + this.format = format; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 6c0579020489d..3199cfc4ff604 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; @@ -201,6 +202,16 @@ public InnerHitsContext innerHits() { public abstract SearchContext docValuesContext(FetchDocValuesContext docValuesContext); + /** + * The context related to retrieving fields. + */ + public abstract FetchFieldsContext fetchFieldsContext(); + + /** + * Sets the context related to retrieving fields. + */ + public abstract SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext); + public abstract ContextIndexSearcher searcher(); public abstract IndexShard indexShard(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index a539a77d66be8..ecd99a27ddf22 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; @@ -59,6 +60,7 @@ public class SubSearchContext extends FilteredSearchContext { private ScriptFieldsContext scriptFields; private FetchSourceContext fetchSourceContext; private FetchDocValuesContext docValuesContext; + private FetchFieldsContext fetchFieldsContext; private SearchContextHighlight highlight; private boolean explain; @@ -160,6 +162,17 @@ public SearchContext docValuesContext(FetchDocValuesContext docValuesContext) { return this; } + @Override + public FetchFieldsContext fetchFieldsContext() { + return fetchFieldsContext; + } + + @Override + public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) { + this.fetchFieldsContext = fetchFieldsContext; + return this; + } + @Override public void timeout(TimeValue timeout) { throw new UnsupportedOperationException("Not supported"); diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java index e547f02ef0347..d63caed14adb0 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java @@ -21,6 +21,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.XContentHelper; @@ -132,6 +133,22 @@ public List extractRawValues(String path) { return XContentMapValues.extractRawValues(path, loadSourceIfNeeded()); } + /** + * For the provided path, return its value in the source. + * + * Note that in contrast with {@link SourceLookup#extractRawValues}, array and object values + * can be returned. + * + * @param path the value's path in the source. + * @param nullValue a value to return if the path exists, but the value is 'null'. This helps + * in distinguishing between a path that doesn't exist vs. a value of 'null'. + * + * @return the value associated with the path in the source or 'null' if the path does not exist. + */ + public Object extractValue(String path, @Nullable Object nullValue) { + return XContentMapValues.extractValue(path, loadSourceIfNeeded(), nullValue); + } + public Object filter(FetchSourceContext context) { return context.getFilter().apply(loadSourceIfNeeded()); } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java index d83000bd66956..957316d99dad8 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java @@ -164,6 +164,35 @@ public void testExtractValue() throws Exception { assertThat(XContentMapValues.extractValue("path1.xxx.path2.yyy.test", map).toString(), equalTo("value")); } + public void testExtractValueWithNullValue() throws Exception { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject() + .field("field", "value") + .nullField("other_field") + .array("array", "value1", null, "value2") + .startObject("object1") + .startObject("object2").nullField("field").endObject() + .endObject() + .startArray("object_array") + .startObject().nullField("field").endObject() + .startObject().field("field", "value").endObject() + .endArray() + .endObject(); + + Map map; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { + map = parser.map(); + } + assertEquals("value", XContentMapValues.extractValue("field", map, "NULL")); + assertNull(XContentMapValues.extractValue("missing", map, "NULL")); + assertNull(XContentMapValues.extractValue("field.missing", map, "NULL")); + assertNull(XContentMapValues.extractValue("object1.missing", map, "NULL")); + + assertEquals("NULL", XContentMapValues.extractValue("other_field", map, "NULL")); + assertEquals(List.of("value1", "NULL", "value2"), XContentMapValues.extractValue("array", map, "NULL")); + assertEquals(List.of("NULL", "value"), XContentMapValues.extractValue("object_array.field", map, "NULL")); + assertEquals("NULL", XContentMapValues.extractValue("object1.object2.field", map, "NULL")); + } + public void testExtractRawValue() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder().startObject() .field("test", "value") diff --git a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java index 691f0ed9de14c..647ce2f3405b5 100644 --- a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java @@ -35,6 +35,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; @@ -121,11 +122,27 @@ public static Tuple randomDocumentField(XContentTy } return Tuple.tuple(documentField, documentField); } else { - String fieldName = randomAlphaOfLengthBetween(3, 10); - Tuple, List> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType); - DocumentField input = new DocumentField(fieldName, tuple.v1()); - DocumentField expected = new DocumentField(fieldName, tuple.v2()); - return Tuple.tuple(input, expected); + switch (randomIntBetween(0, 2)) { + case 0: + String fieldName = randomAlphaOfLengthBetween(3, 10); + Tuple, List> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType); + DocumentField input = new DocumentField(fieldName, tuple.v1()); + DocumentField expected = new DocumentField(fieldName, tuple.v2()); + return Tuple.tuple(input, expected); + case 1: + List listValues = randomList(1, 5, () -> randomList(1, 5, ESTestCase::randomInt)); + DocumentField listField = new DocumentField(randomAlphaOfLength(5), listValues); + return Tuple.tuple(listField, listField); + case 2: + List objectValues = randomList(1, 5, () -> + Map.of(randomAlphaOfLength(5), randomInt(), + randomAlphaOfLength(5), randomBoolean(), + randomAlphaOfLength(5), randomAlphaOfLength(10))); + DocumentField objectField = new DocumentField(randomAlphaOfLength(5), objectValues); + return Tuple.tuple(objectField, objectField); + default: + throw new IllegalStateException(); + } } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index 0d5ed6d24803a..253084ec4ddfa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -33,9 +33,12 @@ import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -44,6 +47,7 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; @@ -51,6 +55,9 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.containsString; @@ -293,4 +300,22 @@ public void testBoosts() throws Exception { assertEquals(new BoostQuery(new TermQuery(new Term("field", "T")), 2.0f), ft.termQuery("true", null)); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + BooleanFieldMapper mapper = new BooleanFieldMapper.Builder("field").build(context); + assertTrue(mapper.parseSourceValue(true, null)); + assertFalse(mapper.parseSourceValue("false", null)); + assertFalse(mapper.parseSourceValue("", null)); + + Map mapping = Map.of("type", "boolean", "null_value", true); + BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder("field"); + builder.parse("field", null, new HashMap<>(mapping)); + BooleanFieldMapper nullValueMapper = builder.build(context); + + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of(true), nullValueMapper.lookupValues(sourceLookup, null)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 66af8f178e345..6564cdf6236fe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; @@ -31,6 +32,8 @@ import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -42,6 +45,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.test.ESSingleNodeTestCase; import org.hamcrest.FeatureMatcher; @@ -50,6 +54,7 @@ import org.hamcrest.core.CombinableMatcher; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.function.Function; @@ -933,6 +938,21 @@ public void testLimitOfContextMappings() throws Throwable { CompletionFieldMapper.COMPLETION_CONTEXTS_LIMIT + "] has been exceeded")); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()); + CompletionFieldMapper mapper = new CompletionFieldMapper.Builder("completion", defaultAnalyzer).build(context); + + assertEquals(List.of("value"), mapper.parseSourceValue("value", null)); + + List list = List.of("first", "second"); + assertEquals(list, mapper.parseSourceValue(list, null)); + + Map object = Map.of("input", List.of("first", "second"), "weight", "2.718"); + assertEquals(List.of(object), mapper.parseSourceValue(object, null)); + } + private Matcher suggestField(String value) { return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)), Matchers.instanceOf(SuggestField.class)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index ac0cd388f13a4..d0b95f1b1bf23 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -21,17 +21,22 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JavaVersion; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DateFieldMapper.Resolution; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; @@ -42,6 +47,9 @@ import java.time.ZonedDateTime; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; @@ -446,4 +454,72 @@ public void testMeta() throws Exception { assertEquals(mapping3, mapper.mappingSource().toString()); } + public void testParseSourceValue() { + DateFieldMapper mapper = createMapper(Resolution.MILLISECONDS, null); + String date = "2020-05-15T21:33:02.000Z"; + assertEquals(date, mapper.parseSourceValue(date, null)); + assertEquals(date, mapper.parseSourceValue(1589578382000L, null)); + + DateFieldMapper mapperWithFormat = createMapper(Resolution.MILLISECONDS, "yyyy/MM/dd||epoch_millis"); + String dateInFormat = "1990/12/29"; + assertEquals(dateInFormat, mapperWithFormat.parseSourceValue(dateInFormat, null)); + assertEquals(dateInFormat, mapperWithFormat.parseSourceValue(662428800000L, null)); + + DateFieldMapper mapperWithMillis = createMapper(Resolution.MILLISECONDS, "epoch_millis"); + String dateInMillis = "662428800000"; + assertEquals(dateInMillis, mapperWithMillis.parseSourceValue(dateInMillis, null)); + assertEquals(dateInMillis, mapperWithMillis.parseSourceValue(662428800000L, null)); + + String nullValueDate = "2020-05-15T21:33:02.000Z"; + DateFieldMapper nullValueMapper = createMapper(Resolution.MILLISECONDS, null, nullValueDate); + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of(nullValueDate), nullValueMapper.lookupValues(sourceLookup, null)); + } + + public void testParseSourceValueWithFormat() { + DateFieldMapper mapper = createMapper(Resolution.NANOSECONDS, "strict_date_time", "1970-12-29T00:00:00.000Z"); + String date = "1990-12-29T00:00:00.000Z"; + assertEquals("1990/12/29", mapper.parseSourceValue(date, "yyyy/MM/dd")); + assertEquals("662428800000", mapper.parseSourceValue(date, "epoch_millis")); + + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of("1970/12/29"), mapper.lookupValues(sourceLookup, "yyyy/MM/dd")); + } + + public void testParseSourceValueNanos() { + DateFieldMapper mapper = createMapper(Resolution.NANOSECONDS, "strict_date_time||epoch_millis"); + String date = "2020-05-15T21:33:02.123456789Z"; + assertEquals("2020-05-15T21:33:02.123456789Z", mapper.parseSourceValue(date, null)); + assertEquals("2020-05-15T21:33:02.123Z", mapper.parseSourceValue(1589578382123L, null)); + + String nullValueDate = "2020-05-15T21:33:02.123456789Z"; + DateFieldMapper nullValueMapper = createMapper(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", nullValueDate); + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of(nullValueDate), nullValueMapper.lookupValues(sourceLookup, null)); + } + + private DateFieldMapper createMapper(Resolution resolution, String format) { + return createMapper(resolution, format, null); + } + + private DateFieldMapper createMapper(Resolution resolution, String format, String nullValue) { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + Map mapping = new HashMap<>(); + mapping.put("type", "date_nanos"); + if (format != null) { + mapping.put("format", format); + } + if (nullValue != null) { + mapping.put("null_value", nullValue); + } + + DateFieldMapper.Builder builder = new DateFieldMapper.Builder("field", resolution, null, false); + builder.parse("field", null, mapping); + return builder.build(context); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java index 38db532d9fb0b..fe621f6579aad 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java @@ -102,6 +102,11 @@ static class FakeFieldMapper extends FieldMapper { protected void parseCreateField(ParseContext context) throws IOException { } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException(); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java index d8b8b0d213a3b..44ee214e75594 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java @@ -200,6 +200,11 @@ protected void parseCreateField(ParseContext context) throws IOException { throw new UnsupportedOperationException(); } + @Override + protected Object parseSourceValue(Object value, String format) { + return value; + } + @Override public Iterator iterator() { return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java b/server/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java index a86a4a6d63f77..8c68014522eaf 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java @@ -133,6 +133,11 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + protected String parseSourceValue(Object value, String format) { + return value.toString(); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 3190dfbeca357..2b8b1c2679f95 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -25,8 +25,10 @@ import java.util.Collection; import java.util.Collections; import java.util.Iterator; +import java.util.Set; import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; public class FieldTypeLookupTests extends ESTestCase { @@ -77,6 +79,59 @@ public void testSimpleMatchToFullName() { assertTrue(names.contains("barometer")); } + public void testSourcePathWithMultiFields() { + Mapper.BuilderContext context = new Mapper.BuilderContext( + MockFieldMapper.DEFAULT_SETTINGS, new ContentPath()); + + MockFieldMapper field = new MockFieldMapper.Builder("field") + .addMultiField(new MockFieldMapper.Builder("field.subfield1")) + .addMultiField(new MockFieldMapper.Builder("field.subfield2")) + .build(context); + + FieldTypeLookup lookup = new FieldTypeLookup(singletonList(field), emptyList()); + + assertEquals(Set.of("field"), lookup.sourcePaths("field")); + assertEquals(Set.of("field"), lookup.sourcePaths("field.subfield1")); + assertEquals(Set.of("field"), lookup.sourcePaths("field.subfield2")); + } + + public void testSourcePathWithAliases() { + Mapper.BuilderContext context = new Mapper.BuilderContext( + MockFieldMapper.DEFAULT_SETTINGS, new ContentPath()); + + MockFieldMapper field = new MockFieldMapper.Builder("field") + .addMultiField(new MockFieldMapper.Builder("field.subfield")) + .build(context); + + FieldAliasMapper alias1 = new FieldAliasMapper("alias1", "alias1", "field"); + FieldAliasMapper alias2 = new FieldAliasMapper("alias2", "alias2", "field.subfield"); + + FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(field), Arrays.asList(alias1, alias2)); + + assertEquals(Set.of("field"), lookup.sourcePaths("alias1")); + assertEquals(Set.of("field"), lookup.sourcePaths("alias2")); + } + + public void testSourcePathsWithCopyTo() { + Mapper.BuilderContext context = new Mapper.BuilderContext( + MockFieldMapper.DEFAULT_SETTINGS, new ContentPath()); + + MockFieldMapper field = new MockFieldMapper.Builder("field") + .addMultiField(new MockFieldMapper.Builder("field.subfield1")) + .build(context); + + MockFieldMapper otherField = new MockFieldMapper.Builder("other_field") + .copyTo(new FieldMapper.CopyTo.Builder() + .add("field") + .build()) + .build(context); + + FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(field, otherField), emptyList()); + + assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field")); + assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field.subfield1")); + } + public void testIteratorImmutable() { MockFieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(f1), emptyList()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index abe4805bce088..2d3a0a3a9851c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -20,23 +20,30 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.geo.RandomGeoGenerator; import org.hamcrest.CoreMatchers; import java.io.IOException; import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.Set; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -586,6 +593,39 @@ public void testInvalidGeopointValuesIgnored() throws Exception { ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + AbstractGeometryFieldMapper mapper = new GeoPointFieldMapper.Builder("field").build(context); + SourceLookup sourceLookup = new SourceLookup(); + + Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(42.0, 27.1)); + Map otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0)); + String wktPoint = "POINT (42.0 27.1)"; + String otherWktPoint = "POINT (30.0 50.0)"; + + // Test a single point in [lon, lat] array format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(42.0, 27.1))); + assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a single point in "lat, lon" string format. + sourceLookup.setSource(Collections.singletonMap("field", "27.1,42.0")); + assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a list of points in [lon, lat] array format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)))); + assertEquals(List.of(jsonPoint, otherJsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktPoint, otherWktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a single point in well-known text format. + sourceLookup.setSource(Collections.singletonMap("field", "POINT (42.0 27.1)")); + assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + } + @Override protected GeoPointFieldMapper.Builder newBuilder() { return new GeoPointFieldMapper.Builder("geo"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 98369d5f1a15b..8e1a88fe6b9c9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -19,16 +19,20 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.junit.Before; @@ -36,6 +40,8 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.Set; import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE; @@ -356,4 +362,38 @@ public String toXContentString(GeoShapeFieldMapper mapper, boolean includeDefaul public String toXContentString(GeoShapeFieldMapper mapper) throws IOException { return toXContentString(mapper, true); } + + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + GeoShapeFieldMapper mapper = new GeoShapeFieldMapper.Builder("field").build(context); + SourceLookup sourceLookup = new SourceLookup(); + + Map jsonLineString = Map.of("type", "LineString", "coordinates", + List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); + Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); + String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; + String wktPoint = "POINT (14.0 15.0)"; + + // Test a single shape in geojson format. + sourceLookup.setSource(Collections.singletonMap("field", jsonLineString)); + assertEquals(List.of(jsonLineString), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a list of shapes in geojson format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(jsonLineString, jsonPoint))); + assertEquals(List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a single shape in wkt format. + sourceLookup.setSource(Collections.singletonMap("field", wktLineString)); + assertEquals(List.of(jsonLineString), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a list of shapes in wkt format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(wktLineString, wktPoint))); + assertEquals(List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index 07ed508a624a1..405efdecb8248 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -26,10 +26,13 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -37,12 +40,15 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.io.IOException; import java.net.InetAddress; import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Set; import static org.hamcrest.Matchers.containsString; @@ -297,6 +303,23 @@ public void testEmptyName() throws IOException { assertThat(e.getMessage(), containsString("name cannot be empty string")); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + IpFieldMapper mapper = new IpFieldMapper.Builder("field").build(context); + assertEquals("2001:db8::2:1", mapper.parseSourceValue("2001:db8::2:1", null)); + assertEquals("2001:db8::2:1", mapper.parseSourceValue("2001:db8:0:0:0:0:2:1", null)); + assertEquals("::1", mapper.parseSourceValue("0:0:0:0:0:0:0:1", null)); + + IpFieldMapper nullValueMapper = new IpFieldMapper.Builder("field") + .nullValue(InetAddresses.forString("2001:db8:0:0:0:0:2:7")) + .build(context); + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of("2001:db8::2:7"), nullValueMapper.lookupValues(sourceLookup, null)); + } + @Override protected IpFieldMapper.Builder newBuilder() { return new IpFieldMapper.Builder("ip"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java index 2f861ca6fc79c..663fec56b5e6a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java @@ -20,10 +20,13 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -80,4 +83,14 @@ public void testStoreCidr() throws Exception { assertThat(storedField.stringValue(), containsString(strVal)); } } + + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP).build(context); + Map range = Map.of("gte", "2001:db8:0:0:0:0:2:1"); + assertEquals(Map.of("gte", "2001:db8::2:1"), mapper.parseSourceValue(range, null)); + assertEquals("2001:db8::2:1/32", mapper.parseSourceValue("2001:db8:0:0:0:0:2:1/32", null)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 53fe049b8d964..ecc3e507af7d3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -29,6 +29,8 @@ import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BooleanSimilarity; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -44,6 +46,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; @@ -166,6 +169,9 @@ public void testDefaults() throws Exception { // used by TermVectorsService assertArrayEquals(new String[] { "1234" }, TermVectorsService.getValues(doc.rootDoc().getFields("field"))); + + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + assertEquals("1234", fieldMapper.parseSourceValue("1234", null)); } public void testIgnoreAbove() throws IOException { @@ -624,4 +630,38 @@ public void testMeta() throws Exception { new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping3, mapper.mappingSource().toString()); } + + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + KeywordFieldMapper mapper = new KeywordFieldMapper.Builder("field").build(context); + assertEquals("value", mapper.parseSourceValue("value", null)); + assertEquals("42", mapper.parseSourceValue(42L, null)); + assertEquals("true", mapper.parseSourceValue(true, null)); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapper.parseSourceValue(true, "format")); + assertEquals("Field [field] of type [keyword] doesn't support formats.", e.getMessage()); + + KeywordFieldMapper ignoreAboveMapper = new KeywordFieldMapper.Builder("field") + .ignoreAbove(4) + .build(context); + assertNull(ignoreAboveMapper.parseSourceValue("value", null)); + assertEquals("42", ignoreAboveMapper.parseSourceValue(42L, null)); + assertEquals("true", ignoreAboveMapper.parseSourceValue(true, null)); + + KeywordFieldMapper normalizerMapper = new KeywordFieldMapper.Builder("field") + .normalizer(indexService.getIndexAnalyzers(), "lowercase") + .build(context); + assertEquals("value", normalizerMapper.parseSourceValue("VALUE", null)); + assertEquals("42", normalizerMapper.parseSourceValue(42L, null)); + assertEquals("value", normalizerMapper.parseSourceValue("value", null)); + + KeywordFieldMapper nullValueMapper = new KeywordFieldMapper.Builder("field") + .nullValue("NULL") + .build(context); + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of("NULL"), nullValueMapper.lookupValues(sourceLookup, null)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java index 56d33659db52e..bd54d663541fe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java @@ -25,6 +25,8 @@ import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -33,6 +35,7 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -40,6 +43,7 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.junit.Before; @@ -47,6 +51,8 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.Set; import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE; @@ -839,4 +845,38 @@ public String toXContentString(LegacyGeoShapeFieldMapper mapper, boolean include public String toXContentString(LegacyGeoShapeFieldMapper mapper) throws IOException { return toXContentString(mapper, true); } + + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + LegacyGeoShapeFieldMapper mapper = new LegacyGeoShapeFieldMapper.Builder("field").build(context); + SourceLookup sourceLookup = new SourceLookup(); + + Map jsonLineString = Map.of("type", "LineString", "coordinates", + List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); + Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); + String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; + String wktPoint = "POINT (14.0 15.0)"; + + // Test a single shape in geojson format. + sourceLookup.setSource(Collections.singletonMap("field", jsonLineString)); + assertEquals(List.of(jsonLineString), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a list of shapes in geojson format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(jsonLineString, jsonPoint))); + assertEquals(List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a single shape in wkt format. + sourceLookup.setSource(Collections.singletonMap("field", wktLineString)); + assertEquals(List.of(jsonLineString), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a list of shapes in wkt format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(wktLineString, wktPoint))); + assertEquals(List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index 2514f93131398..c2a60272225bb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -22,11 +22,14 @@ import com.carrotsearch.randomizedtesting.annotations.Timeout; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -35,11 +38,13 @@ import org.elasticsearch.index.mapper.NumberFieldTypeTests.OutOfRangeSpec; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.lookup.SourceLookup; import java.io.ByteArrayInputStream; import java.io.IOException; import java.math.BigInteger; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -401,6 +406,24 @@ public void testEmptyName() throws IOException { } } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + NumberFieldMapper mapper = new NumberFieldMapper.Builder("field", NumberType.INTEGER).build(context); + assertEquals(3, mapper.parseSourceValue(3.14, null)); + assertEquals(42, mapper.parseSourceValue("42.9", null)); + + NumberFieldMapper nullValueMapper = new NumberFieldMapper.Builder("field", NumberType.FLOAT) + .nullValue(2.71f) + .build(context); + assertEquals(2.71f, (float) nullValueMapper.parseSourceValue("", null), 0.00001); + + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of(2.71f), nullValueMapper.lookupValues(sourceLookup, null)); + } + @Timeout(millis = 30000) public void testOutOfRangeValues() throws IOException { final List> inputs = Arrays.asList( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index 40fe0bafbce79..d4249ceacc714 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -184,6 +184,11 @@ protected void parseCreateField(ParseContext context) { } + @Override + protected Object parseSourceValue(Object value, String format) { + return null; + } + @Override protected String contentType() { return "test_mapper"; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java index 88d2db04f2b73..b7f4f8cc7d962 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -22,10 +22,13 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -40,6 +43,7 @@ import java.util.Collection; import java.util.HashSet; import java.util.Locale; +import java.util.Map; import java.util.Set; import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD; @@ -49,6 +53,7 @@ import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; + public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { @Override @@ -486,4 +491,35 @@ public void testIllegalFormatField() throws Exception { assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage()); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + RangeFieldMapper longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG).build(context); + Map longRange = Map.of("gte", 3.14, "lt", "42.9"); + assertEquals(Map.of("gte", 3L, "lt", 42L), longMapper.parseSourceValue(longRange, null)); + + RangeFieldMapper dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE) + .format("yyyy/MM/dd||epoch_millis") + .build(context); + Map dateRange = Map.of("lt", "1990/12/29", "gte", 597429487111L); + assertEquals(Map.of("lt", "1990/12/29", "gte", "1988/12/06"), + dateMapper.parseSourceValue(dateRange, null)); + } + + public void testParseSourceValueWithFormat() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + RangeFieldMapper longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG).build(context); + Map longRange = Map.of("gte", 3.14, "lt", "42.9"); + assertEquals(Map.of("gte", 3L, "lt", 42L), longMapper.parseSourceValue(longRange, null)); + + RangeFieldMapper dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE) + .format("strict_date_time") + .build(context); + Map dateRange = Map.of("lt", "1990-12-29T00:00:00.000Z"); + assertEquals(Map.of("lt", "1990/12/29"), dateMapper.parseSourceValue(dateRange, "yyy/MM/dd")); + assertEquals(Map.of("lt", "662428800000"), dateMapper.parseSourceValue(dateRange, "epoch_millis")); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index f9feb59700a42..04ac4276c7a89 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -237,13 +237,13 @@ public void testDateRangeQueryUsingMappingFormat() { assertEquals(1466062190000L, formatter.parseMillis(to)); RangeFieldType fieldType = new RangeFieldType(FIELDNAME, true, true, formatter, Collections.emptyMap()); - final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, null, context); + final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); assertEquals("field:", query.toString()); // compare lower and upper bounds with what we would get on a `date` field DateFieldType dateFieldType = new DateFieldType(FIELDNAME, true, true, formatter, DateFieldMapper.Resolution.MILLISECONDS, Collections.emptyMap()); - final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, null, context); + final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString()); } @@ -464,9 +464,9 @@ private Object nextTo(Object from) throws Exception { } public void testParseIp() { - assertEquals(InetAddresses.forString("::1"), RangeType.IP.parse(InetAddresses.forString("::1"), randomBoolean())); - assertEquals(InetAddresses.forString("::1"), RangeType.IP.parse("::1", randomBoolean())); - assertEquals(InetAddresses.forString("::1"), RangeType.IP.parse(new BytesRef("::1"), randomBoolean())); + assertEquals(InetAddresses.forString("::1"), RangeType.IP.parseValue(InetAddresses.forString("::1"), randomBoolean(), null)); + assertEquals(InetAddresses.forString("::1"), RangeType.IP.parseValue("::1", randomBoolean(), null)); + assertEquals(InetAddresses.forString("::1"), RangeType.IP.parseValue(new BytesRef("::1"), randomBoolean(), null)); } public void testTermQuery() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 8a4eebf3ffe76..e3d632f409639 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -49,7 +49,9 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -1331,4 +1333,16 @@ public void testMeta() throws Exception { new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping3, mapper.mappingSource().toString()); } + + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + FieldMapper fieldMapper = newBuilder().build(context); + TextFieldMapper mapper = (TextFieldMapper) fieldMapper; + + assertEquals("value", mapper.parseSourceValue("value", null)); + assertEquals("42", mapper.parseSourceValue(42L, null)); + assertEquals("true", mapper.parseSourceValue(true, null)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index bc2b303a8e817..1d0cc23cbb32b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -32,8 +32,8 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.sort.SortBuilder; diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 30a65ac783e16..0a6bef86af27a 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -183,8 +183,8 @@ public void testFromXContentLenientParsing() throws IOException { XContentType xContentType = randomFrom(XContentType.values()); SearchHit searchHit = createTestItem(xContentType, true, true); BytesReference originalBytes = toXContent(searchHit, xContentType, true); - Predicate pathsToExclude = path -> (path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") - || path.contains("inner_hits") || path.isEmpty()); + Predicate pathsToExclude = path -> path.endsWith("highlight") || path.contains("fields") || path.contains("_source") + || path.contains("inner_hits") || path.isEmpty(); BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random()); SearchHit parsed; diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldValueRetrieverTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldValueRetrieverTests.java new file mode 100644 index 0000000000000..3826e0ef09c07 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldValueRetrieverTests.java @@ -0,0 +1,367 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.subphase; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.search.lookup.SourceLookup; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItems; + +public class FieldValueRetrieverTests extends ESSingleNodeTestCase { + + public void testLeafValues() throws IOException { + MapperService mapperService = createMapperService(); + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .array("field", "first", "second") + .startObject("object") + .field("field", "third") + .endObject() + .endObject(); + + List fieldAndFormats = List.of( + new FieldAndFormat("field", null), + new FieldAndFormat("object.field", null)); + Map fields = retrieveFields(mapperService, source, fieldAndFormats); + assertThat(fields.size(), equalTo(2)); + + DocumentField field = fields.get("field"); + assertNotNull(field); + assertThat(field.getValues().size(), equalTo(2)); + assertThat(field.getValues(), hasItems("first", "second")); + + DocumentField objectField = fields.get("object.field"); + assertNotNull(objectField); + assertThat(objectField.getValues().size(), equalTo(1)); + assertThat(objectField.getValues(), hasItems("third")); + } + + public void testObjectValues() throws IOException { + MapperService mapperService = createMapperService(); + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .startObject("float_range") + .field("gte", 0.0f) + .field("lte", 2.718f) + .endObject() + .endObject(); + + Map fields = retrieveFields(mapperService, source, "float_range"); + assertThat(fields.size(), equalTo(1)); + + DocumentField rangeField = fields.get("float_range"); + assertNotNull(rangeField); + assertThat(rangeField.getValues().size(), equalTo(1)); + assertThat(rangeField.getValue(), equalTo(Map.of("gte", 0.0f, "lte", 2.718f))); + } + + public void testNonExistentField() throws IOException { + MapperService mapperService = createMapperService(); + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .field("field", "value") + .endObject(); + + Map fields = retrieveFields(mapperService, source, "non-existent"); + assertThat(fields.size(), equalTo(0)); + } + + public void testMetadataFields() throws IOException { + MapperService mapperService = createMapperService(); + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .field("field", "value") + .endObject(); + + Map fields = retrieveFields(mapperService, source, "_routing"); + assertTrue(fields.isEmpty()); + } + + public void testRetrieveAllFields() throws IOException { + MapperService mapperService = createMapperService(); + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .field("field", "value") + .startObject("object") + .field("field", "other-value") + .endObject() + .endObject(); + + Map fields = retrieveFields(mapperService, source, "*"); + assertThat(fields.size(), equalTo(2)); + } + + public void testArrayValueMappers() throws IOException { + MapperService mapperService = createMapperService(); + + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .array("geo_point", 27.1, 42.0) + .endObject(); + + Map fields = retrieveFields(mapperService, source, "geo_point"); + assertThat(fields.size(), equalTo(1)); + + DocumentField field = fields.get("geo_point"); + assertNotNull(field); + assertThat(field.getValues().size(), equalTo(1)); + + // Test a field with multiple geo-points. + source = XContentFactory.jsonBuilder().startObject() + .startArray("geo_point") + .startArray().value(27.1).value(42.0).endArray() + .startArray().value(31.4).value(42.0).endArray() + .endArray() + .endObject(); + + fields = retrieveFields(mapperService, source, "geo_point"); + assertThat(fields.size(), equalTo(1)); + + field = fields.get("geo_point"); + assertNotNull(field); + assertThat(field.getValues().size(), equalTo(2)); + } + + public void testFieldNamesWithWildcard() throws IOException { + MapperService mapperService = createMapperService();; + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .array("field", "first", "second") + .field("integer_field", 333) + .startObject("object") + .field("field", "fourth") + .endObject() + .endObject(); + + Map fields = retrieveFields(mapperService, source, "*field"); + assertThat(fields.size(), equalTo(3)); + + DocumentField field = fields.get("field"); + assertNotNull(field); + assertThat(field.getValues().size(), equalTo(2)); + assertThat(field.getValues(), hasItems("first", "second")); + + DocumentField otherField = fields.get("integer_field"); + assertNotNull(otherField); + assertThat(otherField.getValues().size(), equalTo(1)); + assertThat(otherField.getValues(), hasItems(333)); + + DocumentField objectField = fields.get("object.field"); + assertNotNull(objectField); + assertThat(objectField.getValues().size(), equalTo(1)); + assertThat(objectField.getValues(), hasItems("fourth")); + } + + public void testDateFormat() throws IOException { + MapperService mapperService = createMapperService(); + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .field("field", "value") + .field("date_field", "1990-12-29T00:00:00.000Z") + .endObject(); + + Map fields = retrieveFields(mapperService, source, List.of( + new FieldAndFormat("field", null), + new FieldAndFormat("date_field", "yyyy/MM/dd"))); + assertThat(fields.size(), equalTo(2)); + + DocumentField field = fields.get("field"); + assertNotNull(field); + + DocumentField dateField = fields.get("date_field"); + assertNotNull(dateField); + assertThat(dateField.getValues().size(), equalTo(1)); + assertThat(dateField.getValue(), equalTo("1990/12/29")); + } + + public void testIgnoreAbove() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("field") + .field("type", "keyword") + .field("ignore_above", 20) + .endObject() + .endObject() + .endObject(); + + IndexService indexService = createIndex("index", Settings.EMPTY, mapping); + MapperService mapperService = indexService.mapperService(); + + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .array("field", "value", "other_value", "really_really_long_value") + .endObject(); + Map fields = retrieveFields(mapperService, source, "field"); + DocumentField field = fields.get("field"); + assertThat(field.getValues().size(), equalTo(2)); + + source = XContentFactory.jsonBuilder().startObject() + .array("field", "really_really_long_value") + .endObject(); + fields = retrieveFields(mapperService, source, "field"); + assertFalse(fields.containsKey("field")); + } + + public void testFieldAliases() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("field").field("type", "keyword").endObject() + .startObject("alias_field") + .field("type", "alias") + .field("path", "field") + .endObject() + .endObject() + .endObject(); + + IndexService indexService = createIndex("index", Settings.EMPTY, mapping); + MapperService mapperService = indexService.mapperService(); + + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .field("field", "value") + .endObject(); + + Map fields = retrieveFields(mapperService, source, "alias_field"); + assertThat(fields.size(), equalTo(1)); + + DocumentField field = fields.get("alias_field"); + assertNotNull(field); + assertThat(field.getValues().size(), equalTo(1)); + assertThat(field.getValues(), hasItems("value")); + + fields = retrieveFields(mapperService, source, "*field"); + assertThat(fields.size(), equalTo(2)); + assertTrue(fields.containsKey("alias_field")); + assertTrue(fields.containsKey("field")); + } + + public void testMultiFields() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("field") + .field("type", "integer") + .startObject("fields") + .startObject("keyword").field("type", "keyword").endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + + IndexService indexService = createIndex("index", Settings.EMPTY, mapping); + MapperService mapperService = indexService.mapperService(); + + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .field("field", 42) + .endObject(); + + Map fields = retrieveFields(mapperService, source, "field.keyword"); + assertThat(fields.size(), equalTo(1)); + + DocumentField field = fields.get("field.keyword"); + assertNotNull(field); + assertThat(field.getValues().size(), equalTo(1)); + assertThat(field.getValues(), hasItems(42)); + + fields = retrieveFields(mapperService, source, "field*"); + assertThat(fields.size(), equalTo(2)); + assertTrue(fields.containsKey("field")); + assertTrue(fields.containsKey("field.keyword")); + } + + public void testCopyTo() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("field") + .field("type", "keyword") + .endObject() + .startObject("other_field") + .field("type", "integer") + .field("copy_to", "field") + .endObject() + .endObject() + .endObject(); + + IndexService indexService = createIndex("index", Settings.EMPTY, mapping); + MapperService mapperService = indexService.mapperService(); + + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .array("field", "one", "two", "three") + .array("other_field", 1, 2, 3) + .endObject(); + + Map fields = retrieveFields(mapperService, source, "field"); + assertThat(fields.size(), equalTo(1)); + + DocumentField field = fields.get("field"); + assertNotNull(field); + assertThat(field.getValues().size(), equalTo(6)); + assertThat(field.getValues(), hasItems("one", "two", "three", 1, 2, 3)); + } + + public void testObjectFields() throws IOException { + MapperService mapperService = createMapperService();; + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .array("field", "first", "second") + .startObject("object") + .field("field", "third") + .endObject() + .endObject(); + + Map fields = retrieveFields(mapperService, source, "object"); + assertFalse(fields.containsKey("object")); + } + + private Map retrieveFields(MapperService mapperService, XContentBuilder source, String fieldPattern) { + List fields = List.of(new FieldAndFormat(fieldPattern, null)); + return retrieveFields(mapperService, source, fields); + } + + private Map retrieveFields(MapperService mapperService, XContentBuilder source, List fields) { + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(BytesReference.bytes(source)); + + FieldValueRetriever fetchFieldsLookup = FieldValueRetriever.create(mapperService, fields); + return fetchFieldsLookup.retrieve(sourceLookup, Set.of()); + } + + public MapperService createMapperService() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("field").field("type", "keyword").endObject() + .startObject("integer_field").field("type", "integer").endObject() + .startObject("date_field").field("type", "date").endObject() + .startObject("geo_point").field("type", "geo_point").endObject() + .startObject("float_range").field("type", "float_range").endObject() + .startObject("object") + .startObject("properties") + .startObject("field").field("type", "keyword").endObject() + .endObject() + .endObject() + .startObject("field_that_does_not_match").field("type", "keyword").endObject() + .endObject() + .endObject(); + + IndexService indexService = createIndex("index", Settings.EMPTY, mapping); + return indexService.mapperService(); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java index 99a5bc211ecc1..24978c7ab670b 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java @@ -87,9 +87,9 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { // also there can be inner search hits fields inside this option, we need to exclude another couple of paths // where we cannot add random stuff. We also exclude the root level, this is done for SearchHits as all unknown fields // for SearchHit on a root level are interpreted as meta-fields and will be kept - Predicate excludeFilter = (path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) - || path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits") - || path.isEmpty()); + Predicate excludeFilter = (path) -> path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) + || path.endsWith("highlight") || path.contains("fields") || path.contains("_source") || path.contains("inner_hits") + || path.isEmpty(); mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); } else { mutated = originalBytes; diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java index 8e7991a02f4f9..289e09e113dc4 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java @@ -105,9 +105,10 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { // exclude "options" which contain SearchHits, // on root level of SearchHit fields are interpreted as meta-fields and will be kept Predicate excludeFilter = ( - path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight") - || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits") + path -> path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight") + || path.contains("fields") || path.contains("_source") || path.contains("inner_hits") || path.contains("options")); + mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); } else { mutated = originalBytes; diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java index cf3d33559c1a4..290f0a580989c 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java @@ -124,10 +124,10 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { // - the root object should be excluded since it contains the named suggestion arrays // We also exclude options that contain SearchHits, as all unknown fields // on a root level of SearchHit are interpreted as meta-fields and will be kept. - Predicate excludeFilter = path -> (path.isEmpty() + Predicate excludeFilter = path -> path.isEmpty() || path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight") - || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits") - || path.contains("options")); + || path.contains("fields") || path.contains("_source") || path.contains("inner_hits") + || path.contains("options"); mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); } else { mutated = originalBytes; diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java index 223af014c955e..05a9466f109ce 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java @@ -24,6 +24,9 @@ import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; @@ -32,6 +35,9 @@ // this sucks how much must be overridden just do get a dummy field mapper... public class MockFieldMapper extends FieldMapper { + static Settings DEFAULT_SETTINGS = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id) + .build(); public MockFieldMapper(String fullName) { this(new FakeFieldType(fullName)); @@ -42,6 +48,13 @@ public MockFieldMapper(MappedFieldType fieldType) { MultiFields.empty(), new CopyTo.Builder().build()); } + public MockFieldMapper(String fullName, + MappedFieldType fieldType, + MultiFields multifields, + CopyTo copyTo) { + super(findSimpleName(fullName), new FieldType(), fieldType, multifields, copyTo); + } + static String findSimpleName(String fullName) { int ndx = fullName.lastIndexOf('.'); return fullName.substring(ndx + 1); @@ -76,8 +89,29 @@ protected String contentType() { protected void parseCreateField(ParseContext context) throws IOException { } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException(); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { } + + public static class Builder extends FieldMapper.Builder { + private MappedFieldType fieldType; + + protected Builder(String name) { + super(name, new FieldType()); + this.fieldType = new FakeFieldType(name); + this.builder = this; + } + + @Override + public MockFieldMapper build(BuilderContext context) { + MultiFields multiFields = multiFieldsBuilder.build(this, context); + return new MockFieldMapper(name(), fieldType, multiFields, copyTo); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 605023fafd2fd..58b57fc807d32 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -46,6 +46,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; @@ -272,6 +273,16 @@ public SearchContext docValuesContext(FetchDocValuesContext docValuesContext) { return null; } + @Override + public FetchFieldsContext fetchFieldsContext() { + return null; + } + + @Override + public SearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext) { + return null; + } + @Override public ContextIndexSearcher searcher() { return searcher; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index 8b51d5cd1b1bf..eccda49a1a9e4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -164,6 +164,14 @@ protected void parseCreateField(ParseContext context) throws IOException { throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called"); } + @Override + protected Object parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value; + } + public static class HistogramFieldType extends MappedFieldType { public HistogramFieldType(String name, boolean hasDocValues, Map meta) { diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index 0b16b4feb3ba8..9184e28ac4086 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.TypeParsers; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.time.ZoneId; @@ -262,6 +263,22 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + public List lookupValues(SourceLookup lookup, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + return fieldType().value == null + ? List.of() + : List.of(fieldType().value); + } + + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException("This should never be called, since lookupValues is implemented directly."); + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { ConstantKeywordFieldType newConstantKeywordFT = (ConstantKeywordFieldType) other.fieldType(); diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index c1e183d7c804c..0552c63f26a68 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -13,18 +13,21 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapperTestCase; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.junit.Before; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Set; public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase { @@ -133,4 +136,27 @@ public void testMeta() throws Exception { new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping3, mapper.mappingSource().toString()); } + + public void testLookupValues() throws Exception { + IndexService indexService = createIndex("test"); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") + .startObject("properties").startObject("field").field("type", "constant_keyword") + .endObject().endObject().endObject().endObject()); + DocumentMapper mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + assertEquals(mapping, mapper.mappingSource().toString()); + + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + List values = fieldMapper.lookupValues(new SourceLookup(), null); + assertTrue(values.isEmpty()); + + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") + .startObject("properties").startObject("field").field("type", "constant_keyword") + .field("value", "foo").endObject().endObject().endObject().endObject()); + mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); + + fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + values = fieldMapper.lookupValues(new SourceLookup(), null); + assertEquals(1, values.size()); + assertEquals("foo", values.get(0)); + } } diff --git a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java index c67073dcafb4c..bd793d710287f 100644 --- a/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java +++ b/x-pack/plugin/mapper-flattened/src/main/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapper.java @@ -514,6 +514,11 @@ protected String contentType() { return CONTENT_TYPE; } + @Override + protected String nullValue() { + return nullValue; + } + @Override protected void mergeOptions(FieldMapper mergeWith, List conflicts) { FlatObjectFieldMapper other = ((FlatObjectFieldMapper) mergeWith); @@ -564,6 +569,14 @@ protected void parseCreateField(ParseContext context) throws IOException { } } + @Override + protected Object parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value; + } + @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapperTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapperTests.java index 7a0e1a26d79bb..21f12b3a16d0e 100644 --- a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapperTests.java +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/xpack/flattened/mapper/FlatObjectFieldMapperTests.java @@ -9,24 +9,30 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapperTestCase; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.flattened.FlattenedMapperPlugin; import org.elasticsearch.xpack.flattened.mapper.FlatObjectFieldMapper.KeyedFlatObjectFieldType; @@ -36,6 +42,9 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.Set; import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; @@ -506,4 +515,19 @@ public void testSplitQueriesOnWhitespace() throws IOException { new String[] {"Hello", "World"}); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + Map sourceValue = Map.of("key", "value"); + FlatObjectFieldMapper mapper = new FlatObjectFieldMapper.Builder("field").build(context); + assertEquals(sourceValue, mapper.parseSourceValue(sourceValue, null)); + + FlatObjectFieldMapper nullValueMapper = new FlatObjectFieldMapper.Builder("field") + .nullValue("NULL") + .build(context); + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of("NULL"), nullValueMapper.lookupValues(sourceLookup, null)); + } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java index c8e9d06d5f4d9..728711df05581 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.script.Script; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext.FieldAndFormat; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import java.util.LinkedHashMap; import java.util.LinkedHashSet; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianPoint.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianPoint.java index 000ca6a1b2451..aa96ec40455d5 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianPoint.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/common/CartesianPoint.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.Collections; import java.util.Locale; +import java.util.Objects; import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE; @@ -36,18 +37,18 @@ public class CartesianPoint implements ToXContentFragment { private static final ParseField Y_FIELD = new ParseField("y"); private static final ParseField Z_FIELD = new ParseField("z"); - protected float x; - protected float y; + protected double x; + protected double y; public CartesianPoint() { } - public CartesianPoint(float x, float y) { + public CartesianPoint(double x, double y) { this.x = x; this.y = y; } - public CartesianPoint reset(float x, float y) { + public CartesianPoint reset(double x, double y) { this.x = x; this.y = y; return this; @@ -68,11 +69,11 @@ public CartesianPoint resetFromCoordinates(String value, final boolean ignoreZVa throw new ElasticsearchParseException("failed to parse [{}], expected 2 or 3 coordinates " + "but found: [{}]", vals, vals.length); } - final float x; - final float y; + final double x; + final double y; try { - x = Float.parseFloat(vals[0].trim()); - if (Float.isFinite(x) == false) { + x = Double.parseDouble(vals[0].trim()); + if (Double.isFinite(x) == false) { throw new ElasticsearchParseException("invalid [{}] value [{}]; " + "must be between -3.4028234663852886E38 and 3.4028234663852886E38", X_FIELD.getPreferredName(), @@ -82,8 +83,8 @@ public CartesianPoint resetFromCoordinates(String value, final boolean ignoreZVa throw new ElasticsearchParseException("[{}]] must be a number", X_FIELD.getPreferredName()); } try { - y = Float.parseFloat(vals[1].trim()); - if (Float.isFinite(y) == false) { + y = Double.parseDouble(vals[1].trim()); + if (Double.isFinite(y) == false) { throw new ElasticsearchParseException("invalid [{}] value [{}]; " + "must be between -3.4028234663852886E38 and 3.4028234663852886E38", Y_FIELD.getPreferredName(), @@ -94,7 +95,7 @@ public CartesianPoint resetFromCoordinates(String value, final boolean ignoreZVa } if (vals.length > 2) { try { - CartesianPoint.assertZValue(ignoreZValue, Float.parseFloat(vals[2].trim())); + CartesianPoint.assertZValue(ignoreZValue, Double.parseDouble(vals[2].trim())); } catch (NumberFormatException ex) { throw new ElasticsearchParseException("[{}]] must be a number", Y_FIELD.getPreferredName()); } @@ -115,14 +116,14 @@ private CartesianPoint resetFromWKT(String value, boolean ignoreZValue) { "but found {}", PointFieldMapper.CONTENT_TYPE, geometry.type()); } org.elasticsearch.geometry.Point point = (org.elasticsearch.geometry.Point) geometry; - return reset((float) point.getX(), (float) point.getY()); + return reset(point.getX(), point.getY()); } - public float getX() { + public double getX() { return this.x; } - public float getY() { + public double getY() { return this.y; } @@ -133,21 +134,15 @@ public boolean equals(Object o) { CartesianPoint point = (CartesianPoint) o; - if (Float.compare(point.x, x) != 0) return false; - if (Float.compare(point.y, y) != 0) return false; + if (Double.compare(point.x, x) != 0) return false; + if (Double.compare(point.y, y) != 0) return false; return true; } @Override public int hashCode() { - int result; - int temp; - temp = x != +0.0f ? Float.floatToIntBits(x) : 0; - result = Integer.hashCode(temp); - temp = y != +0.0f ? Float.floatToIntBits(y) : 0; - result = 31 * result + Integer.hashCode(temp); - return result; + return Objects.hash(x, y); } @Override @@ -162,8 +157,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public static CartesianPoint parsePoint(XContentParser parser, CartesianPoint point, boolean ignoreZvalue) throws IOException, ElasticsearchParseException { - float x = Float.NaN; - float y = Float.NaN; + double x = Double.NaN; + double y = Double.NaN; NumberFormatException numberFormatException = null; if(parser.currentToken() == XContentParser.Token.START_OBJECT) { @@ -177,7 +172,7 @@ public static CartesianPoint parsePoint(XContentParser parser, CartesianPoint po case VALUE_NUMBER: case VALUE_STRING: try { - x = subParser.floatValue(true); + x = subParser.doubleValue(true); } catch (NumberFormatException e) { numberFormatException = e; } @@ -192,7 +187,7 @@ public static CartesianPoint parsePoint(XContentParser parser, CartesianPoint po case VALUE_NUMBER: case VALUE_STRING: try { - y = subParser.floatValue(true); + y = subParser.doubleValue(true); } catch (NumberFormatException e) { numberFormatException = e; } @@ -207,7 +202,7 @@ public static CartesianPoint parsePoint(XContentParser parser, CartesianPoint po case VALUE_NUMBER: case VALUE_STRING: try { - CartesianPoint.assertZValue(ignoreZvalue, subParser.floatValue(true)); + CartesianPoint.assertZValue(ignoreZvalue, subParser.doubleValue(true)); } catch (NumberFormatException e) { numberFormatException = e; } @@ -227,12 +222,12 @@ public static CartesianPoint parsePoint(XContentParser parser, CartesianPoint po } } if (numberFormatException != null) { - throw new ElasticsearchParseException("[{}] and [{}] must be valid float values", numberFormatException, + throw new ElasticsearchParseException("[{}] and [{}] must be valid double values", numberFormatException, X_FIELD.getPreferredName(), Y_FIELD.getPreferredName()); - } else if (Float.isNaN(x)) { + } else if (Double.isNaN(x)) { throw new ElasticsearchParseException("field [{}] missing", X_FIELD.getPreferredName()); - } else if (Float.isNaN(y)) { + } else if (Double.isNaN(y)) { throw new ElasticsearchParseException("field [{}] missing", Y_FIELD.getPreferredName()); } else { return point.reset(x, y); @@ -245,9 +240,9 @@ public static CartesianPoint parsePoint(XContentParser parser, CartesianPoint po if (subParser.currentToken() == XContentParser.Token.VALUE_NUMBER) { element++; if (element == 1) { - x = subParser.floatValue(); + x = subParser.doubleValue(); } else if (element == 2) { - y = subParser.floatValue(); + y = subParser.doubleValue(); } else { throw new ElasticsearchParseException("[{}}] field type does not accept > 2 dimensions", PointFieldMapper.CONTENT_TYPE); @@ -282,12 +277,12 @@ public static CartesianPoint parsePoint(Object value, CartesianPoint point, bool } } - public static double assertZValue(final boolean ignoreZValue, float zValue) { + public static double assertZValue(final boolean ignoreZValue, double zValue) { if (ignoreZValue == false) { throw new ElasticsearchParseException("Exception parsing coordinates: found Z value [{}] but [{}] " + "parameter is [{}]", zValue, IGNORE_Z_VALUE, ignoreZValue); } - if (Float.isFinite(zValue) == false) { + if (Double.isFinite(zValue) == false) { throw new ElasticsearchParseException("invalid [{}] value [{}]; " + "must be between -3.4028234663852886E38 and 3.4028234663852886E38", Z_FIELD.getPreferredName(), diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 644b5b5713aff..34750b8989027 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.index.mapper.GeoShapeParser; import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; @@ -90,7 +91,7 @@ public GeoShapeWithDocValuesFieldMapper build(BuilderContext context) { // @todo check coerce GeometryParser geometryParser = new GeometryParser(ft.orientation().getAsBoolean(), coerce().value(), ignoreZValue().value()); - ft.setGeometryParser((parser, mapper) -> geometryParser.parse(parser)); + ft.setGeometryParser(new GeoShapeParser(geometryParser)); ft.setGeometryIndexer(new GeoShapeIndexer(orientation().value().getAsBoolean(), ft.name())); ft.setGeometryQueryBuilder(new VectorGeoShapeQueryProcessor()); ft.setOrientation(orientation().value()); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index 556bb4c636d05..e27a54be2d8b2 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -12,12 +12,13 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.AbstractPointGeometryFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.xpack.spatial.common.CartesianPoint; -import org.elasticsearch.xpack.spatial.index.query.ShapeQueryPointProcessor; import org.elasticsearch.xpack.spatial.index.mapper.PointFieldMapper.ParsedCartesianPoint; +import org.elasticsearch.xpack.spatial.index.query.ShapeQueryPointProcessor; import java.io.IOException; import java.util.ArrayList; @@ -69,10 +70,10 @@ protected ParsedPoint parseNullValue(Object nullValue, boolean ignoreZValue, boo ParsedCartesianPoint point = new ParsedCartesianPoint(); CartesianPoint.parsePoint(nullValue, point, ignoreZValue); if (ignoreMalformed == false) { - if (Float.isFinite(point.getX()) == false) { + if (Double.isFinite(point.getX()) == false) { throw new IllegalArgumentException("illegal x value [" + point.getX() + "]"); } - if (Float.isFinite(point.getY()) == false) { + if (Double.isFinite(point.getY()) == false) { throw new IllegalArgumentException("illegal y value [" + point.getY() + "]"); } } @@ -106,7 +107,7 @@ protected void addStoredFields(ParseContext context, List points, List fields, ParseContext context) { for (CartesianPoint point : points) { - context.doc().add(new XYDocValuesField(fieldType().name(), point.getX(), point.getY())); + context.doc().add(new XYDocValuesField(fieldType().name(), (float) point.getX(), (float) point.getY())); } } @@ -141,10 +142,10 @@ public String typeName() { protected static class ParsedCartesianPoint extends CartesianPoint implements AbstractPointGeometryFieldMapper.ParsedPoint { @Override public void validate(String fieldName) { - if (Float.isFinite(getX()) == false) { + if (Double.isFinite(getX()) == false) { throw new IllegalArgumentException("illegal x value [" + getX() + "] for " + fieldName); } - if (Float.isFinite(getY()) == false) { + if (Double.isFinite(getY()) == false) { throw new IllegalArgumentException("illegal y value [" + getY() + "] for " + fieldName); } } @@ -161,7 +162,12 @@ public boolean isNormalizable(double coord) { @Override public void resetCoords(double x, double y) { - this.reset((float)x, (float)y); + this.reset(x, y); + } + + @Override + public Point asGeometry() { + return new Point(getX(), getY()); } @Override @@ -216,7 +222,7 @@ public Class> processedClass() { public List indexShape(ParseContext context, List points) { ArrayList fields = new ArrayList<>(1); for (CartesianPoint point : points) { - fields.add(new XYPointField(fieldType.name(), point.getX(), point.getY())); + fields.add(new XYPointField(fieldType.name(), (float) point.getX(), (float) point.getY())); } return fields; } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 09cdd1ed63fe4..27b1214d9c2d8 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; +import org.elasticsearch.index.mapper.GeoShapeParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; @@ -53,7 +54,7 @@ public ShapeFieldMapper build(BuilderContext context) { ShapeFieldType ft = new ShapeFieldType(buildFullName(context), indexed, hasDocValues, meta); GeometryParser geometryParser = new GeometryParser(orientation().value().getAsBoolean(), coerce().value(), ignoreZValue().value()); - ft.setGeometryParser((parser, mapper) -> geometryParser.parse(parser)); + ft.setGeometryParser(new GeoShapeParser(geometryParser)); ft.setGeometryIndexer(new ShapeIndexer(ft.name())); ft.setGeometryQueryBuilder(new ShapeQueryProcessor()); ft.setOrientation(orientation().value()); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java index 2c1998f12aa64..56f2d3fd8d33c 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java @@ -6,20 +6,29 @@ package org.elasticsearch.xpack.spatial.index.mapper; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.xpack.spatial.common.CartesianPoint; import org.hamcrest.CoreMatchers; import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.index.mapper.AbstractPointGeometryFieldMapper.Names.IGNORE_Z_VALUE; import static org.elasticsearch.index.mapper.AbstractPointGeometryFieldMapper.Names.NULL_VALUE; @@ -295,4 +304,37 @@ public void testIgnoreZValue() throws IOException { ignoreZValue = ((PointFieldMapper)fieldMapper).ignoreZValue().value(); assertThat(ignoreZValue, equalTo(false)); } + + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + AbstractGeometryFieldMapper mapper = new PointFieldMapper.Builder("field").build(context); + SourceLookup sourceLookup = new SourceLookup(); + + Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(42.0, 27.1)); + String wktPoint = "POINT (42.0 27.1)"; + Map otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0)); + String otherWktPoint = "POINT (30.0 50.0)"; + + // Test a single point in [x, y] array format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(42.0, 27.1))); + assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a single point in "x, y" string format. + sourceLookup.setSource(Collections.singletonMap("field", "42.0,27.1")); + assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a list of points in [x, y] array format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)))); + assertEquals(List.of(jsonPoint, otherJsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktPoint, otherWktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a single point in well-known text format. + sourceLookup.setSource(Collections.singletonMap("field", "POINT (42.0 27.1)")); + assertEquals(List.of(jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java index 496c7b6df6e6f..a6b6a3fe3840e 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java @@ -6,25 +6,32 @@ package org.elasticsearch.xpack.spatial.index.mapper; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.index.mapper.AbstractPointGeometryFieldMapper.Names.IGNORE_Z_VALUE; import static org.hamcrest.Matchers.equalTo; @@ -324,4 +331,38 @@ public String toXContentString(ShapeFieldMapper mapper, boolean includeDefaults) public String toXContentString(ShapeFieldMapper mapper) throws IOException { return toXContentString(mapper, true); } + + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + ShapeFieldMapper mapper = new ShapeFieldMapper.Builder("field").build(context); + SourceLookup sourceLookup = new SourceLookup(); + + Map jsonLineString = Map.of("type", "LineString", "coordinates", + List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); + Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.3, 15.0)); + String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; + String wktPoint = "POINT (14.3 15.0)"; + + // Test a single shape in geojson format. + sourceLookup.setSource(Collections.singletonMap("field", jsonLineString)); + assertEquals(List.of(jsonLineString), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a list of shapes in geojson format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(jsonLineString, jsonPoint))); + assertEquals(List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a single shape in wkt format. + sourceLookup.setSource(Collections.singletonMap("field", wktLineString)); + assertEquals(List.of(jsonLineString), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString), mapper.lookupValues(sourceLookup, "wkt")); + + // Test a list of shapes in wkt format. + sourceLookup.setSource(Collections.singletonMap("field", List.of(wktLineString, wktPoint))); + assertEquals(List.of(jsonLineString, jsonPoint), mapper.lookupValues(sourceLookup, null)); + assertEquals(List.of(wktLineString, wktPoint), mapper.lookupValues(sourceLookup, "wkt")); + } } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java index ce861d8f81a38..5441d4e94cd76 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java @@ -17,8 +17,8 @@ import org.elasticsearch.license.License.OperationMode; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.transport.nio.NioTransportPlugin; @@ -154,7 +154,7 @@ public void testSqlTranslateActionLicense() throws Exception { .query("SELECT * FROM test").get(); SearchSourceBuilder source = response.source(); assertThat(source.docValueFields(), Matchers.contains( - new FetchDocValuesContext.FieldAndFormat("count", null))); + new FieldAndFormat("count", null))); FetchSourceContext fetchSource = source.fetchSource(); assertThat(fetchSource.includes(), Matchers.arrayContaining("data")); } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java index 251028be6b2f4..c6d6ce24b1435 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java @@ -8,8 +8,8 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.sort.SortBuilders; import static java.util.Collections.singletonList; @@ -35,7 +35,7 @@ public void testSqlTranslateAction() { assertTrue(fetch.fetchSource()); assertArrayEquals(new String[] { "data", "count" }, fetch.includes()); assertEquals( - singletonList(new FetchDocValuesContext.FieldAndFormat("date", "epoch_millis")), + singletonList(new FieldAndFormat("date", "epoch_millis")), source.docValueFields()); assertEquals(singletonList(SortBuilders.fieldSort("count").missing("_last").unmappedType("long")), source.sorts()); } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/constant_keyword/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/constant_keyword/10_basic.yml index ad2e2677bf610..7a6c2a0d53b0d 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/constant_keyword/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/constant_keyword/10_basic.yml @@ -184,3 +184,18 @@ setup: - match: {hits.hits.0._index: test1 } - match: {hits.hits.1._index: test1 } - match: {hits.hits.2._index: test2 } + +--- +"Field retrieval": + + - do: + search: + index: test* + body: + fields: [ foo ] + sort: [ { _index: asc } ] + + - match: { "hits.total.value": 3 } + - match: {hits.hits.0.fields.foo.0: bar } + - match: {hits.hits.1.fields.foo.0: bar } + - match: {hits.hits.2.fields.foo.0: baz } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java index f28b1a56a3b66..51561a475b9a4 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java @@ -205,6 +205,14 @@ public void parse(ParseContext context) throws IOException { context.doc().addWithKey(fieldType().name(), field); } + @Override + protected Object parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + return value; + } + @Override protected boolean indexedByDefault() { return false; diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/SparseVectorFieldMapper.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/SparseVectorFieldMapper.java index 51cafa01ecdcc..91cdeb63d8577 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/SparseVectorFieldMapper.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/SparseVectorFieldMapper.java @@ -137,12 +137,16 @@ public void parse(ParseContext context) { throw new UnsupportedOperationException(ERROR_MESSAGE_7X); } - @Override protected void parseCreateField(ParseContext context) { throw new IllegalStateException("parse is implemented directly"); } + @Override + protected Object parseSourceValue(Object value, String format) { + throw new UnsupportedOperationException(ERROR_MESSAGE_7X); + } + @Override protected String contentType() { return CONTENT_TYPE; diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index b434f75ba9892..fb44835e336fd 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -950,6 +950,19 @@ protected void parseCreateField(ParseContext context) throws IOException { parseDoc.addAll(fields); } + @Override + protected String parseSourceValue(Object value, String format) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + } + + String keywordValue = value.toString(); + if (keywordValue.length() > ignoreAbove) { + return null; + } + return keywordValue; + } + void createFields(String value, Document parseDoc, Listfields) throws IOException { if (value == null || value.length() > ignoreAbove) { return; @@ -980,6 +993,11 @@ protected String contentType() { return CONTENT_TYPE; } + @Override + protected String nullValue() { + return nullValue; + } + @Override protected void mergeOptions(FieldMapper other, List conflicts) { this.ignoreAbove = ((WildcardFieldMapper) other).ignoreAbove; diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index e1d75fbf3c5eb..4b4bc9c77fe7f 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -55,6 +55,7 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -64,7 +65,9 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.function.BiFunction; import static org.hamcrest.Matchers.equalTo; @@ -774,6 +777,30 @@ protected String convertToRandomRegex(String randomValue) { return result.toString(); } + public void testParseSourceValue() { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); + + WildcardFieldMapper mapper = new WildcardFieldMapper.Builder("field").build(context); + assertEquals("value", mapper.parseSourceValue("value", null)); + assertEquals("42", mapper.parseSourceValue(42L, null)); + assertEquals("true", mapper.parseSourceValue(true, null)); + + WildcardFieldMapper ignoreAboveMapper = new WildcardFieldMapper.Builder("field") + .ignoreAbove(4) + .build(context); + assertNull(ignoreAboveMapper.parseSourceValue("value", null)); + assertEquals("42", ignoreAboveMapper.parseSourceValue(42L, null)); + assertEquals("true", ignoreAboveMapper.parseSourceValue(true, null)); + + WildcardFieldMapper nullValueMapper = new WildcardFieldMapper.Builder("field") + .nullValue("NULL") + .build(context); + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSource(Collections.singletonMap("field", null)); + assertEquals(List.of("NULL"), nullValueMapper.lookupValues(sourceLookup, null)); + } + protected MappedFieldType provideMappedFieldType(String name) { if (name.equals(WILDCARD_FIELD_NAME)) { return wildcardFieldType.fieldType();