Skip to content

Commit

Permalink
Address Christoph's feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
mayya-sharipova committed Jun 3, 2020
1 parent 3de6272 commit 3a56fd2
Show file tree
Hide file tree
Showing 12 changed files with 58 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@
package org.elasticsearch.index.mapper;

import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
Expand All @@ -44,7 +46,7 @@ public void setup() {
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class);
}

public void testBasics() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject()
Expand All @@ -55,4 +57,14 @@ public void testBasics() throws Exception {
assertEquals(mapping, mapper.mappingSource().toString());
assertNotNull(mapper.metadataMapper(RankFeatureMetaFieldMapper.class));
}

public void testDocumentParsingFailsOnMetaField() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject());
DocumentMapper mapper = parser.parse("_doc", new CompressedXContent(mapping));
String rfMetaField = RankFeatureMetaFieldMapper.CONTENT_TYPE;
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(new SourceToParse("test", "1", bytes, XContentType.JSON)));
assertTrue(e.getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document."));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import org.apache.lucene.util.BitSetIterator;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
Expand All @@ -56,12 +55,11 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {

@Override
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
innerHitsExecute(context.query(), context.searcher(), context.mapperService(), hits);
innerHitsExecute(context.query(), context.searcher(), hits);
}

static void innerHitsExecute(Query mainQuery,
IndexSearcher indexSearcher,
MapperService mapperService,
SearchHit[] hits) throws IOException {
List<PercolateQuery> percolateQueries = locatePercolatorQuery(mainQuery);
if (percolateQueries.isEmpty()) {
Expand Down Expand Up @@ -102,8 +100,8 @@ static void innerHitsExecute(Query mainQuery,
}

IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot);
hit.setField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())),
mapperService.isMetadataField(fieldName));
// _percolator_document_slot fields are document fields and should be under "fields" section in a hit
hit.setDocumentField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,20 +34,16 @@
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ESTestCase;
import org.mockito.Mockito;

import java.util.Collections;
import java.util.stream.IntStream;

public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {

public void testHitsExecute() throws Exception {
MapperService mapperService = Mockito.mock(MapperService.class);
Mockito.when(mapperService.isMetadataField(Mockito.anyString())).thenReturn(false);
try (Directory directory = newDirectory()) {
// Need a one doc index:
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Expand All @@ -68,7 +64,7 @@ public void testHitsExecute() throws Exception {
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());

PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, mapperService, hits);
PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits);
assertNotNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX));
assertEquals(0, (int) hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX).getValue());
}
Expand All @@ -83,7 +79,7 @@ public void testHitsExecute() throws Exception {
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());

PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, mapperService, hits);
PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits);
assertNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX));
}

Expand All @@ -97,7 +93,7 @@ public void testHitsExecute() throws Exception {
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());

PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, mapperService, hits);
PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits);
assertNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ public void hitExecute(SearchContext context, HitContext hitContext) {
DocumentField hitField = hitContext.hit().getFields().get(NAME);
if (hitField == null) {
hitField = new DocumentField(NAME, new ArrayList<>(1));
hitContext.hit().setField(NAME, hitField, false);
hitContext.hit().setDocumentField(NAME, hitField);
}
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(),
hitContext.hit().getId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -659,7 +659,6 @@ public void testSearchFieldsMetadata() throws Exception {

assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue());
assertThat(searchResponse.getHits().getAt(0).isMetadataField("_routing"), equalTo(true));
assertThat(searchResponse.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1"));
}

Expand Down Expand Up @@ -1186,7 +1185,6 @@ public void testLoadMetadata() throws Exception {
Map<String, DocumentField> fields = response.getHits().getAt(0).getFields();

assertThat(fields.get("field1"), nullValue());
assertThat(response.getHits().getAt(0).isMetadataField("_routing"), equalTo(true));
assertThat(fields.get("_routing").getValue().toString(), equalTo("1"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -634,7 +634,9 @@ public void close() throws IOException {
}

/**
* @return Whether a field is a metadata field for older indexes
* @return Whether a field is a metadata field
* Deserialization of SearchHit objects sent from pre 7.8 nodes and GetResults objects sent from pre 7.3 nodes,
* uses this method to divide fields into meta and document fields.
* TODO: remove in v 9.0
* @deprecated Use an instance method isMetadataField instead
*/
Expand All @@ -643,7 +645,7 @@ public static boolean isMetadataFieldStatic(String fieldName) {
if (IndicesModule.getBuiltInMetadataFields().contains(fieldName)) {
return true;
}
// adding _size field as meta-field for bwc
// if a node had Size Plugin installed, _size field should also be considered a meta-field
return fieldName.equals("_size");
}

Expand Down
17 changes: 4 additions & 13 deletions server/src/main/java/org/elasticsearch/search/SearchHit.java
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
private BytesReference source;

private Map<String, DocumentField> documentFields;
private Map<String, DocumentField> metaFields;
private final Map<String, DocumentField> metaFields;

private Map<String, HighlightField> highlightFields = null;

Expand Down Expand Up @@ -450,22 +450,13 @@ public DocumentField field(String fieldName) {
}
}

public boolean isMetadataField(String fieldName) {
return metaFields.containsKey(fieldName);
}

/*
* Adds a new DocumentField to the map in case both parameters are not null.
* */
public void setField(String fieldName, DocumentField field, boolean isMetaField) {
public void setDocumentField(String fieldName, DocumentField field) {
if (fieldName == null || field == null) return;
if (isMetaField) {
if (metaFields.size() == 0) this.metaFields = new HashMap<>();
this.metaFields.put(fieldName, field);
} else {
if (documentFields.size() == 0) this.documentFields = new HashMap<>();
this.documentFields.put(fieldName, field);
}
if (documentFields.size() == 0) this.documentFields = new HashMap<>();
this.documentFields.put(fieldName, field);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,20 +203,20 @@ private SearchHit createSearchHit(SearchContext context,
int subDocId,
Map<String, Set<String>> storedToRequestedFields,
LeafReaderContext subReaderContext) {
Map<String, DocumentField> docFields = emptyMap();
Map<String, DocumentField> metaFields = emptyMap();
if (fieldsVisitor == null) {
return new SearchHit(docId, null, docFields, metaFields);
return new SearchHit(docId, null, null, null);
}

loadStoredFields(context.shardTarget(), subReaderContext, fieldsVisitor, subDocId);
fieldsVisitor.postProcess(context.mapperService());
SearchHit searchHit;
if (fieldsVisitor.fields().isEmpty() == false) {
docFields = new HashMap<>();
metaFields = new HashMap<>();
Map<String, DocumentField> docFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
fillDocAndMetaFields(context, fieldsVisitor, storedToRequestedFields, docFields, metaFields);
searchHit = new SearchHit(docId, fieldsVisitor.id(), docFields, metaFields);
} else {
searchHit = new SearchHit(docId, fieldsVisitor.id(), emptyMap(), emptyMap());
}
SearchHit searchHit = new SearchHit(docId, fieldsVisitor.id(), docFields, metaFields);
// Set _source if requested.
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument(subReaderContext, subDocId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,9 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept
DocumentField hitField = hit.field(field);
if (hitField == null) {
hitField = new DocumentField(field, new ArrayList<>(2));
// docValues field is put under "fields" even if they are meta-data fields
hit.setField(field, hitField, false);
// even if we request a doc values of a meta-field (e.g. _routing),
// docValues fields will still be document fields, and put under "fields" section of a hit.
hit.setDocumentField(field, hitField);
}
final List<Object> values = hitField.getValues();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept
}
hitField = new DocumentField(scriptFieldName, values);
// script fields are never meta-fields
hit.setField(scriptFieldName, hitField, false);
hit.setDocumentField(scriptFieldName, hitField);

}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,7 @@
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects;
Expand All @@ -41,6 +35,7 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.Predicate;
import java.util.function.Supplier;

import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
Expand Down Expand Up @@ -104,15 +99,13 @@ private static DocumentField mutateDocumentField(DocumentField documentField) {
}

public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType) {
return randomDocumentField(xContentType, randomBoolean());
return randomDocumentField(xContentType, randomBoolean(), fieldName -> false); // don't exclude any meta-fields
}

public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType, boolean isMetafield) {
public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType, boolean isMetafield,
Predicate<String> excludeMetaFieldFilter) {
if (isMetafield) {
String metaField = randomValueOtherThanMany(field -> field.equals(TypeFieldMapper.NAME)
|| field.equals(IndexFieldMapper.NAME) || field.equals(IdFieldMapper.NAME)
|| field.equals(VersionFieldMapper.NAME) || field.equals(SourceFieldMapper.NAME)
|| field.equals(SeqNoFieldMapper.NAME),
String metaField = randomValueOtherThanMany(excludeMetaFieldFilter,
() -> randomFrom(IndicesModule.getBuiltInMetadataFields()));
DocumentField documentField;
if (metaField.equals(IgnoredFieldMapper.NAME)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects;

Expand All @@ -38,6 +44,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import java.util.function.Supplier;

import static java.util.Collections.singletonList;
Expand Down Expand Up @@ -242,8 +249,14 @@ public static Tuple<Map<String, DocumentField>,Map<String, DocumentField>> rando
int numFields = isMetaFields? randomIntBetween(1, 3) : randomIntBetween(2, 10);
Map<String, DocumentField> fields = new HashMap<>(numFields);
Map<String, DocumentField> expectedFields = new HashMap<>(numFields);
// As we are using this to construct a GetResult object that already contains
// index, type, id, version, seqNo, and source fields, we need to exclude them from random fields
Predicate<String> excludeMetaFieldFilter = field ->
field.equals(TypeFieldMapper.NAME) || field.equals(IndexFieldMapper.NAME) ||
field.equals(IdFieldMapper.NAME) || field.equals(VersionFieldMapper.NAME) ||
field.equals(SourceFieldMapper.NAME) || field.equals(SeqNoFieldMapper.NAME) ;
while (fields.size() < numFields) {
Tuple<DocumentField, DocumentField> tuple = randomDocumentField(xContentType, isMetaFields);
Tuple<DocumentField, DocumentField> tuple = randomDocumentField(xContentType, isMetaFields, excludeMetaFieldFilter);
DocumentField getField = tuple.v1();
DocumentField expectedGetField = tuple.v2();
if (fields.putIfAbsent(getField.getName(), getField) == null) {
Expand Down

0 comments on commit 3a56fd2

Please sign in to comment.