Skip to content

Commit

Permalink
Split document and metadata fields in GetResult (elastic#38373)
Browse files Browse the repository at this point in the history
This commit makes creators of GetField split the fields into document fields and metadata fields. It is part of larger refactoring that aims to remove the calls to static methods of MapperService related to metadata fields, as discussed in elastic#24422.
  • Loading branch information
sandmannn authored and Gurkan Kaymak committed May 27, 2019
1 parent 564db41 commit 50e6cca
Show file tree
Hide file tree
Showing 21 changed files with 184 additions and 114 deletions.
4 changes: 2 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,8 @@ task verifyVersions {
* after the backport of the backcompat code is complete.
*/

boolean bwc_tests_enabled = true
final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */
boolean bwc_tests_enabled = false
final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/38373" /* place a PR link here when committing bwc changes */
if (bwc_tests_enabled == false) {
if (bwc_tests_disabled_issue.isEmpty()) {
throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,12 +154,12 @@ protected GetResponse executeGet(GetRequest getRequest) {
if (indexedDocumentExists) {
return new GetResponse(
new GetResult(indexedDocumentIndex, MapperService.SINGLE_MAPPING_NAME, indexedDocumentId, 0, 1, 0L, true,
documentSource.iterator().next(), Collections.emptyMap())
documentSource.iterator().next(), Collections.emptyMap(), Collections.emptyMap())
);
} else {
return new GetResponse(
new GetResult(indexedDocumentIndex, MapperService.SINGLE_MAPPING_NAME, indexedDocumentId, UNASSIGNED_SEQ_NO, 0, -1,
false, null, Collections.emptyMap())
false, null, Collections.emptyMap(), Collections.emptyMap())
);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ public static GetResult extractGetResult(final UpdateRequest request, String con

// TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType)
return new GetResult(concreteIndex, request.type(), request.id(), seqNo, primaryTerm, version, true, sourceFilteredAsBytes,
Collections.emptyMap());
Collections.emptyMap(), Collections.emptyMap());
}

public static class Result {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,8 @@ public UpdateResponse build() {
if (getResult != null) {
update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(),
getResult.getSeqNo(), getResult.getPrimaryTerm(), update.getVersion(),
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
getResult.isExists(), getResult.internalSourceRef(), getResult.getDocumentFields(),
getResult.getMetadataFields()));
}
update.setForcedRefresh(forcedRefresh);
return update;
Expand Down
141 changes: 91 additions & 50 deletions server/src/main/java/org/elasticsearch/index/get/GetResult.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.index.get;

import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory;
Expand All @@ -36,11 +37,9 @@
import org.elasticsearch.search.lookup.SourceLookup;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;

Expand All @@ -67,7 +66,8 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
private long seqNo;
private long primaryTerm;
private boolean exists;
private Map<String, DocumentField> fields;
private Map<String, DocumentField> documentFields;
private Map<String, DocumentField> metaFields;
private Map<String, Object> sourceAsMap;
private BytesReference source;
private byte[] sourceAsBytes;
Expand All @@ -76,7 +76,7 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
}

public GetResult(String index, String type, String id, long seqNo, long primaryTerm, long version, boolean exists,
BytesReference source, Map<String, DocumentField> fields) {
BytesReference source, Map<String, DocumentField> documentFields, Map<String, DocumentField> metaFields) {
this.index = index;
this.type = type;
this.id = id;
Expand All @@ -89,9 +89,13 @@ public GetResult(String index, String type, String id, long seqNo, long primaryT
this.version = version;
this.exists = exists;
this.source = source;
this.fields = fields;
if (this.fields == null) {
this.fields = emptyMap();
this.documentFields = documentFields;
if (this.documentFields == null) {
this.documentFields = emptyMap();
}
this.metaFields = metaFields;
if (this.metaFields == null) {
this.metaFields = emptyMap();
}
}

Expand Down Expand Up @@ -222,20 +226,31 @@ public Map<String, Object> getSource() {
return sourceAsMap();
}


public Map<String, DocumentField> getMetadataFields() {
return metaFields;
}

public Map<String, DocumentField> getDocumentFields() {
return documentFields;
}

public Map<String, DocumentField> getFields() {
Map<String, DocumentField> fields = new HashMap<>();
fields.putAll(metaFields);
fields.putAll(documentFields);
return fields;
}

public DocumentField field(String name) {
return fields.get(name);
return getFields().get(name);
}

@Override
public Iterator<DocumentField> iterator() {
if (fields == null) {
return Collections.emptyIterator();
}
return fields.values().iterator();
// need to join the fields and metadata fields
Map<String, DocumentField> allFields = this.getFields();
return allFields.values().iterator();
}

public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException {
Expand All @@ -244,21 +259,7 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params
builder.field(_PRIMARY_TERM, primaryTerm);
}

List<DocumentField> metaFields = new ArrayList<>();
List<DocumentField> otherFields = new ArrayList<>();
if (fields != null && !fields.isEmpty()) {
for (DocumentField field : fields.values()) {
if (field.getValues().isEmpty()) {
continue;
}
if (field.isMetadataField()) {
metaFields.add(field);
} else {
otherFields.add(field);
}
}
}
for (DocumentField field : metaFields) {
for (DocumentField field : metaFields.values()) {
// TODO: can we avoid having an exception here?
if (field.getName().equals(IgnoredFieldMapper.NAME)) {
builder.field(field.getName(), field.getValues());
Expand All @@ -273,9 +274,9 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params
XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params);
}

if (!otherFields.isEmpty()) {
if (!documentFields.isEmpty()) {
builder.startObject(FIELDS);
for (DocumentField field : otherFields) {
for (DocumentField field : documentFields.values()) {
field.toXContent(builder, params);
}
builder.endObject();
Expand Down Expand Up @@ -317,7 +318,8 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index
long primaryTerm = UNASSIGNED_PRIMARY_TERM;
Boolean found = null;
BytesReference source = null;
Map<String, DocumentField> fields = new HashMap<>();
Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
Expand All @@ -337,7 +339,8 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index
} else if (FOUND.equals(currentFieldName)) {
found = parser.booleanValue();
} else {
fields.put(currentFieldName, new DocumentField(currentFieldName, Collections.singletonList(parser.objectText())));
metaFields.put(currentFieldName, new DocumentField(currentFieldName,
Collections.singletonList(parser.objectText())));
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (SourceFieldMapper.NAME.equals(currentFieldName)) {
Expand All @@ -350,20 +353,20 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index
} else if (FIELDS.equals(currentFieldName)) {
while(parser.nextToken() != XContentParser.Token.END_OBJECT) {
DocumentField getField = DocumentField.fromXContent(parser);
fields.put(getField.getName(), getField);
documentFields.put(getField.getName(), getField);
}
} else {
parser.skipChildren(); // skip potential inner objects for forward compatibility
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (IgnoredFieldMapper.NAME.equals(currentFieldName)) {
fields.put(currentFieldName, new DocumentField(currentFieldName, parser.list()));
metaFields.put(currentFieldName, new DocumentField(currentFieldName, parser.list()));
} else {
parser.skipChildren(); // skip potential inner arrays for forward compatibility
}
}
}
return new GetResult(index, type, id, seqNo, primaryTerm, version, found, source, fields);
return new GetResult(index, type, id, seqNo, primaryTerm, version, found, source, documentFields, metaFields);
}

public static GetResult fromXContent(XContentParser parser) throws IOException {
Expand All @@ -379,6 +382,35 @@ public static GetResult readGetResult(StreamInput in) throws IOException {
return result;
}

private Map<String, DocumentField> readFields(StreamInput in) throws IOException {
Map<String, DocumentField> fields = null;
int size = in.readVInt();
if (size == 0) {
fields = new HashMap<>();
} else {
fields = new HashMap<>(size);
for (int i = 0; i < size; i++) {
DocumentField field = DocumentField.readDocumentField(in);
fields.put(field.getName(), field);
}
}
return fields;
}

static void splitFieldsByMetadata(Map<String, DocumentField> fields, Map<String, DocumentField> outOther,
Map<String, DocumentField> outMetadata) {
if (fields == null) {
return;
}
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
if (fieldEntry.getValue().isMetadataField()) {
outMetadata.put(fieldEntry.getKey(), fieldEntry.getValue());
} else {
outOther.put(fieldEntry.getKey(), fieldEntry.getValue());
}
}
}

@Override
public void readFrom(StreamInput in) throws IOException {
index = in.readString();
Expand All @@ -393,15 +425,14 @@ public void readFrom(StreamInput in) throws IOException {
if (source.length() == 0) {
source = null;
}
int size = in.readVInt();
if (size == 0) {
fields = emptyMap();
if (in.getVersion().onOrAfter(Version.V_7_3_0)) {
documentFields = readFields(in);
metaFields = readFields(in);
} else {
fields = new HashMap<>(size);
for (int i = 0; i < size; i++) {
DocumentField field = DocumentField.readDocumentField(in);
fields.put(field.getName(), field);
}
Map<String, DocumentField> fields = readFields(in);
documentFields = new HashMap<>();
metaFields = new HashMap<>();
splitFieldsByMetadata(fields, documentFields, metaFields);
}
}
}
Expand All @@ -417,13 +448,22 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(exists);
if (exists) {
out.writeBytesReference(source);
if (fields == null) {
out.writeVInt(0);
if (out.getVersion().onOrAfter(Version.V_7_3_0)) {
writeFields(out, documentFields);
writeFields(out, metaFields);
} else {
out.writeVInt(fields.size());
for (DocumentField field : fields.values()) {
field.writeTo(out);
}
writeFields(out, this.getFields());
}
}
}

private void writeFields(StreamOutput out, Map<String, DocumentField> fields) throws IOException {
if (fields == null) {
out.writeVInt(0);
} else {
out.writeVInt(fields.size());
for (DocumentField field : fields.values()) {
field.writeTo(out);
}
}
}
Expand All @@ -444,13 +484,14 @@ public boolean equals(Object o) {
Objects.equals(index, getResult.index) &&
Objects.equals(type, getResult.type) &&
Objects.equals(id, getResult.id) &&
Objects.equals(fields, getResult.fields) &&
Objects.equals(documentFields, getResult.documentFields) &&
Objects.equals(metaFields, getResult.metaFields) &&
Objects.equals(sourceAsMap(), getResult.sourceAsMap());
}

@Override
public int hashCode() {
return Objects.hash(version, seqNo, primaryTerm, exists, index, type, id, fields, sourceAsMap());
return Objects.hash(version, seqNo, primaryTerm, exists, index, type, id, documentFields, metaFields, sourceAsMap());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ public GetResult getForUpdate(String type, String id, long ifSeqNo, long ifPrima
public GetResult get(Engine.GetResult engineGetResult, String id, String type,
String[] fields, FetchSourceContext fetchSourceContext) {
if (!engineGetResult.exists()) {
return new GetResult(shardId.getIndexName(), type, id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null);
return new GetResult(shardId.getIndexName(), type, id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null, null);
}

currentMetric.inc();
Expand Down Expand Up @@ -174,7 +174,7 @@ private GetResult innerGet(String type, String id, String[] gFields, boolean rea
}

if (get == null || get.exists() == false) {
return new GetResult(shardId.getIndexName(), type, id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null);
return new GetResult(shardId.getIndexName(), type, id, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, -1, false, null, null, null);
}

try {
Expand All @@ -187,7 +187,8 @@ private GetResult innerGet(String type, String id, String[] gFields, boolean rea

private GetResult innerGetLoadFromStoredFields(String type, String id, String[] gFields, FetchSourceContext fetchSourceContext,
Engine.GetResult get, MapperService mapperService) {
Map<String, DocumentField> fields = null;
Map<String, DocumentField> documentFields = null;
Map<String, DocumentField> metaDataFields = null;
BytesReference source = null;
DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
FieldsVisitor fieldVisitor = buildFieldsVisitors(gFields, fetchSourceContext);
Expand All @@ -201,9 +202,14 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[]

if (!fieldVisitor.fields().isEmpty()) {
fieldVisitor.postProcess(mapperService);
fields = new HashMap<>(fieldVisitor.fields().size());
documentFields = new HashMap<>();
metaDataFields = new HashMap<>();
for (Map.Entry<String, List<Object>> entry : fieldVisitor.fields().entrySet()) {
fields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));
if (MapperService.isMetadataField(entry.getKey())) {
metaDataFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));
} else {
documentFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));
}
}
}
}
Expand Down Expand Up @@ -240,7 +246,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[]
}

return new GetResult(shardId.getIndexName(), type, id, get.docIdAndVersion().seqNo, get.docIdAndVersion().primaryTerm,
get.version(), get.exists(), source, fields);
get.version(), get.exists(), source, documentFields, metaDataFields);
}

private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ protected ExplainResponse createTestInstance() {
0, 1, randomNonNegativeLong(),
true,
RandomObjects.randomSource(random()),
singletonMap(fieldName, new DocumentField(fieldName, values)));
singletonMap(fieldName, new DocumentField(fieldName, values)), null);
return new ExplainResponse(index, type, id, exist, explanation, getResult);
}

Expand All @@ -87,7 +87,7 @@ public void testToXContent() throws IOException {
Explanation explanation = Explanation.match(1.0f, "description", Collections.emptySet());
GetResult getResult = new GetResult(null, null, null, 0, 1, -1, true, new BytesArray("{ \"field1\" : " +
"\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1",
singletonList("value1"))));
singletonList("value1"))), null);
ExplainResponse response = new ExplainResponse(index, type, id, exist, explanation, getResult);

XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
Expand Down
Loading

0 comments on commit 50e6cca

Please sign in to comment.