Skip to content

Commit

Permalink
Search hit refactoring (#41656)
Browse files Browse the repository at this point in the history
Refactor SearchHit to have separate document and meta fields.
This is a part of bigger refactoring of issue #24422 to remove
dependency on MapperService to check if a field is metafield.

Relates to PR: #38373
Relates to issue #24422
  • Loading branch information
sandmannn authored Mar 30, 2020
1 parent 40067d0 commit 128bcc5
Show file tree
Hide file tree
Showing 34 changed files with 222 additions and 119 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ static List<SearchHit> randomEvents() {
if (randomBoolean()) {
hits = new ArrayList<>();
for (int i = 0; i < size; i++) {
hits.add(new SearchHit(i, randomAlphaOfLength(10), new HashMap<>()));
hits.add(new SearchHit(i, randomAlphaOfLength(10), new HashMap<>(), new HashMap<>()));
}
}
if (randomBoolean()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ public void testSourceToXContent() throws IOException {
}

public void testSearchResponseToXContent() throws IOException {
SearchHit hit = new SearchHit(1, "id", Collections.emptyMap());
SearchHit hit = new SearchHit(1, "id", Collections.emptyMap(), Collections.emptyMap());
hit.score(2.0f);
SearchHit[] hits = new SearchHit[] { hit };

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept
shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot);
shardContext.lookup().source().setSource(document);
hitContext.reset(
new SearchHit(slot, "unknown", Collections.emptyMap()),
new SearchHit(slot, "unknown", Collections.emptyMap(), Collections.emptyMap()),
percolatorLeafReaderContext, slot, percolatorIndexSearcher
);
hitContext.cache().clear();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ static void innerHitsExecute(Query mainQuery,
hit.fields(fields);
}
IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot);
fields.put(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())));
hit.setField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public void testDCGAt() {
SearchHit[] hits = new SearchHit[6];
for (int i = 0; i < 6; i++) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap());
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
Expand Down Expand Up @@ -122,7 +122,7 @@ public void testDCGAtSixMissingRatings() {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
}
}
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap());
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
Expand Down Expand Up @@ -179,7 +179,7 @@ public void testDCGAtFourMoreRatings() {
// only create four hits
SearchHit[] hits = new SearchHit[4];
for (int i = 0; i < 4; i++) {
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap());
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ private SearchHit[] createSearchHits(List<RatedDocument> rated, Integer[] releva
if (relevanceRatings[i] != null) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
}
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap());
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
return hits;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ public void testXContentParsingIsNotLenient() throws IOException {
private static SearchHit[] createSearchHits(int from, int to, String index) {
SearchHit[] hits = new SearchHit[to + 1 - from];
for (int i = from; i <= to; i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap());
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
}
return hits;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ public void testIgnoreUnlabeled() {
rated.add(createRatedDoc("test", "1", RELEVANT_RATING));
// add an unlabeled search hit
SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3);
searchHits[2] = new SearchHit(2, "2", Collections.emptyMap());
searchHits[2] = new SearchHit(2, "2", Collections.emptyMap(), Collections.emptyMap());
searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));

EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated);
Expand All @@ -131,7 +131,7 @@ public void testIgnoreUnlabeled() {
public void testNoRatedDocs() throws Exception {
SearchHit[] hits = new SearchHit[5];
for (int i = 0; i < 5; i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap());
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
Expand Down Expand Up @@ -253,7 +253,7 @@ private static PrecisionAtK mutate(PrecisionAtK original) {
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap());
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
}
return hits;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ public void testToXContent() throws IOException {
}

private static RatedSearchHit searchHit(String index, int docId, Integer rating) {
SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap());
SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap(), Collections.emptyMap());
hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
hit.score(1.0f);
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ public class RatedSearchHitTests extends ESTestCase {
public static RatedSearchHit randomRatedSearchHit() {
OptionalInt rating = randomBoolean() ? OptionalInt.empty()
: OptionalInt.of(randomIntBetween(0, 5));
SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), Collections.emptyMap());
SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), Collections.emptyMap(),
Collections.emptyMap());
RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating);
return ratedSearchHit;
}
Expand All @@ -52,7 +53,8 @@ private static RatedSearchHit mutateTestItem(RatedSearchHit original) {
rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5));
break;
case 1:
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap());
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap(),
Collections.emptyMap());
break;
default:
throw new IllegalStateException("The test should only allow two parameters mutated");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ public void testNoRatedDocs() throws Exception {
int k = 5;
SearchHit[] hits = new SearchHit[k];
for (int i = 0; i < k; i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap());
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
}

Expand Down Expand Up @@ -236,7 +236,7 @@ private static RecallAtK mutate(RecallAtK original) {
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap());
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
}
return hits;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ protected RequestWrapper<?> buildRequest(Hit doc) {
action.start();

// create a simulated response.
SearchHit hit = new SearchHit(0, "id", emptyMap()).sourceRef(new BytesArray("{}"));
SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
SearchHits hits = new SearchHits(IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new),
new TotalHits(0, TotalHits.Relation.EQUAL_TO),0);
InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ public void testScrollKeepAlive() {

private SearchResponse createSearchResponse() {
// create a simulated response.
SearchHit hit = new SearchHit(0, "id", emptyMap()).sourceRef(new BytesArray("{}"));
SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
SearchHits hits = new SearchHits(IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new),
new TotalHits(0, TotalHits.Relation.EQUAL_TO),0);
InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1);
Expand Down
Loading

0 comments on commit 128bcc5

Please sign in to comment.