Skip to content

Commit 63afc61

Browse files
committed
Introduce FetchContext (#62357)
We currently pass a SearchContext around to share configuration among FetchSubPhases. With the introduction of runtime fields, it would be useful to start storing some state on this context to be shared between different subphases (for example, stored fields or search lookups can be loaded lazily but referred to by many different subphases). However, SearchContext is a very large and unwieldy class, and adding more methods or state here feels like a bridge too far. This commit introduces a new FetchContext class that exposes only those methods on SearchContext that are required for fetch phases. This reduces the API surface area for fetch phases considerably, and should give us some leeway to add further state.
1 parent d091c12 commit 63afc61

25 files changed

+327
-181
lines changed

modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -26,15 +26,14 @@
2626
import org.elasticsearch.common.bytes.BytesReference;
2727
import org.elasticsearch.common.document.DocumentField;
2828
import org.elasticsearch.common.text.Text;
29-
import org.elasticsearch.index.query.QueryShardContext;
3029
import org.elasticsearch.search.SearchHit;
30+
import org.elasticsearch.search.fetch.FetchContext;
3131
import org.elasticsearch.search.fetch.FetchSubPhase;
3232
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
3333
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
3434
import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase;
3535
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
3636
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
37-
import org.elasticsearch.search.internal.SearchContext;
3837
import org.elasticsearch.search.lookup.SearchLookup;
3938
import org.elasticsearch.search.lookup.SourceLookup;
4039

@@ -57,11 +56,11 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
5756
}
5857

5958
@Override
60-
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {
61-
if (searchContext.highlight() == null) {
59+
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext, SearchLookup lookup) {
60+
if (fetchContext.highlight() == null) {
6261
return null;
6362
}
64-
List<PercolateQuery> percolateQueries = locatePercolatorQuery(searchContext.query());
63+
List<PercolateQuery> percolateQueries = locatePercolatorQuery(fetchContext.query());
6564
if (percolateQueries.isEmpty()) {
6665
return null;
6766
}
@@ -70,7 +69,7 @@ public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLo
7069
LeafReaderContext ctx;
7170

7271
@Override
73-
public void setNextReader(LeafReaderContext readerContext) throws IOException {
72+
public void setNextReader(LeafReaderContext readerContext) {
7473
this.ctx = readerContext;
7574
}
7675

@@ -111,10 +110,8 @@ public void process(HitContext hit) throws IOException {
111110
);
112111
subContext.sourceLookup().setSource(document);
113112
// force source because MemoryIndex does not store fields
114-
SearchHighlightContext highlight = new SearchHighlightContext(searchContext.highlight().fields(), true);
115-
QueryShardContext shardContext = new QueryShardContext(searchContext.getQueryShardContext());
116-
FetchSubPhaseProcessor processor = highlightPhase.getProcessor(shardContext, searchContext.shardTarget(),
117-
highlight, query);
113+
SearchHighlightContext highlight = new SearchHighlightContext(fetchContext.highlight().fields(), true);
114+
FetchSubPhaseProcessor processor = highlightPhase.getProcessor(fetchContext, highlight, query);
118115
processor.process(subContext);
119116
for (Map.Entry<String, HighlightField> entry : subContext.hit().getHighlightFields().entrySet()) {
120117
if (percolateQuery.getDocuments().size() == 1) {

modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,9 @@
3434
import org.elasticsearch.Version;
3535
import org.elasticsearch.common.document.DocumentField;
3636
import org.elasticsearch.common.lucene.search.Queries;
37+
import org.elasticsearch.search.fetch.FetchContext;
3738
import org.elasticsearch.search.fetch.FetchSubPhase;
3839
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
39-
import org.elasticsearch.search.internal.SearchContext;
4040
import org.elasticsearch.search.lookup.SearchLookup;
4141

4242
import java.io.IOException;
@@ -58,10 +58,10 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
5858
static final String FIELD_NAME_PREFIX = "_percolator_document_slot";
5959

6060
@Override
61-
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {
61+
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext, SearchLookup lookup) throws IOException {
6262

6363
List<PercolateContext> percolateContexts = new ArrayList<>();
64-
List<PercolateQuery> percolateQueries = locatePercolatorQuery(searchContext.query());
64+
List<PercolateQuery> percolateQueries = locatePercolatorQuery(fetchContext.query());
6565
boolean singlePercolateQuery = percolateQueries.size() == 1;
6666
for (PercolateQuery pq : percolateQueries) {
6767
percolateContexts.add(new PercolateContext(pq, singlePercolateQuery));

modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -28,33 +28,33 @@
2828
import org.elasticsearch.common.bytes.BytesArray;
2929
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
3030
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
31+
import org.elasticsearch.search.fetch.FetchContext;
3132
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
32-
import org.elasticsearch.search.internal.SearchContext;
3333
import org.elasticsearch.test.ESTestCase;
3434
import org.mockito.Mockito;
3535

36-
import java.io.IOException;
3736
import java.util.Arrays;
3837
import java.util.Collections;
3938

4039
import static java.util.Collections.emptyMap;
4140
import static org.hamcrest.Matchers.containsInAnyOrder;
4241
import static org.hamcrest.Matchers.equalTo;
4342
import static org.hamcrest.Matchers.sameInstance;
43+
import static org.mockito.Mockito.mock;
4444

4545
public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
4646

47-
public void testHitsExecutionNeeded() throws IOException {
47+
public void testHitsExecutionNeeded() {
4848
PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")),
4949
new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery());
5050
PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(emptyMap());
51-
SearchContext searchContext = Mockito.mock(SearchContext.class);
52-
Mockito.when(searchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
53-
Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery());
51+
FetchContext fetchContext = mock(FetchContext.class);
52+
Mockito.when(fetchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
53+
Mockito.when(fetchContext.query()).thenReturn(new MatchAllDocsQuery());
5454

55-
assertNull(subFetchPhase.getProcessor(searchContext, null));
56-
Mockito.when(searchContext.query()).thenReturn(percolateQuery);
57-
assertNotNull(subFetchPhase.getProcessor(searchContext, null));
55+
assertNull(subFetchPhase.getProcessor(fetchContext, null));
56+
Mockito.when(fetchContext.query()).thenReturn(percolateQuery);
57+
assertNotNull(subFetchPhase.getProcessor(fetchContext, null));
5858
}
5959

6060
public void testLocatePercolatorQuery() {

modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import org.apache.lucene.index.RandomIndexWriter;
2727
import org.apache.lucene.index.Term;
2828
import org.apache.lucene.index.memory.MemoryIndex;
29-
import org.apache.lucene.search.IndexSearcher;
3029
import org.apache.lucene.search.MatchAllDocsQuery;
3130
import org.apache.lucene.search.MatchNoDocsQuery;
3231
import org.apache.lucene.search.ScoreDoc;
@@ -37,9 +36,9 @@
3736
import org.apache.lucene.util.FixedBitSet;
3837
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
3938
import org.elasticsearch.search.SearchHit;
39+
import org.elasticsearch.search.fetch.FetchContext;
4040
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
4141
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
42-
import org.elasticsearch.search.internal.SearchContext;
4342
import org.elasticsearch.search.lookup.SourceLookup;
4443
import org.elasticsearch.test.ESTestCase;
4544

@@ -63,7 +62,6 @@ public void testHitsExecute() throws Exception {
6362
PercolatorMatchedSlotSubFetchPhase phase = new PercolatorMatchedSlotSubFetchPhase();
6463

6564
try (DirectoryReader reader = DirectoryReader.open(directory)) {
66-
IndexSearcher indexSearcher = new IndexSearcher(reader);
6765
LeafReaderContext context = reader.leaves().get(0);
6866
// A match:
6967
{
@@ -75,7 +73,7 @@ public void testHitsExecute() throws Exception {
7573
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
7674
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());
7775

78-
SearchContext sc = mock(SearchContext.class);
76+
FetchContext sc = mock(FetchContext.class);
7977
when(sc.query()).thenReturn(percolateQuery);
8078

8179
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
@@ -96,7 +94,7 @@ public void testHitsExecute() throws Exception {
9694
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
9795
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());
9896

99-
SearchContext sc = mock(SearchContext.class);
97+
FetchContext sc = mock(FetchContext.class);
10098
when(sc.query()).thenReturn(percolateQuery);
10199

102100
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
@@ -116,7 +114,7 @@ public void testHitsExecute() throws Exception {
116114
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
117115
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());
118116

119-
SearchContext sc = mock(SearchContext.class);
117+
FetchContext sc = mock(FetchContext.class);
120118
when(sc.query()).thenReturn(percolateQuery);
121119

122120
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);

rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,10 @@ setup:
115115
body:
116116
keyword: [ "a" ]
117117

118+
- do:
119+
indices.refresh:
120+
index: [ test ]
121+
118122
- do:
119123
catch: bad_request
120124
search:

server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java

Lines changed: 12 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -19,26 +19,22 @@
1919

2020
package org.elasticsearch.search.fetch;
2121

22-
import org.apache.logging.log4j.LogManager;
2322
import org.apache.lucene.index.LeafReaderContext;
2423
import org.apache.lucene.index.PostingsEnum;
24+
import org.apache.lucene.index.Terms;
2525
import org.apache.lucene.index.TermsEnum;
2626
import org.apache.lucene.util.BytesRef;
2727
import org.elasticsearch.action.search.SearchResponse;
28-
import org.elasticsearch.action.termvectors.TermVectorsRequest;
29-
import org.elasticsearch.action.termvectors.TermVectorsResponse;
3028
import org.elasticsearch.common.ParsingException;
3129
import org.elasticsearch.common.document.DocumentField;
3230
import org.elasticsearch.common.io.stream.StreamInput;
3331
import org.elasticsearch.common.io.stream.StreamOutput;
3432
import org.elasticsearch.common.xcontent.XContentBuilder;
3533
import org.elasticsearch.common.xcontent.XContentParser;
36-
import org.elasticsearch.index.termvectors.TermVectorsService;
3734
import org.elasticsearch.plugins.Plugin;
3835
import org.elasticsearch.plugins.SearchPlugin;
3936
import org.elasticsearch.search.SearchExtBuilder;
4037
import org.elasticsearch.search.builder.SearchSourceBuilder;
41-
import org.elasticsearch.search.internal.SearchContext;
4238
import org.elasticsearch.search.lookup.SearchLookup;
4339
import org.elasticsearch.test.ESIntegTestCase;
4440
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@@ -121,21 +117,21 @@ private static final class TermVectorsFetchSubPhase implements FetchSubPhase {
121117
private static final String NAME = "term_vectors_fetch";
122118

123119
@Override
124-
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
120+
public FetchSubPhaseProcessor getProcessor(FetchContext searchContext, SearchLookup lookup) {
125121
return new FetchSubPhaseProcessor() {
126122
@Override
127123
public void setNextReader(LeafReaderContext readerContext) {
128124

129125
}
130126

131127
@Override
132-
public void process(HitContext hitContext) {
128+
public void process(HitContext hitContext) throws IOException {
133129
hitExecute(searchContext, hitContext);
134130
}
135131
};
136132
}
137133

138-
private void hitExecute(SearchContext context, HitContext hitContext) {
134+
private void hitExecute(FetchContext context, HitContext hitContext) throws IOException {
139135
TermVectorsFetchBuilder fetchSubPhaseBuilder = (TermVectorsFetchBuilder)context.getSearchExt(NAME);
140136
if (fetchSubPhaseBuilder == null) {
141137
return;
@@ -146,19 +142,18 @@ private void hitExecute(SearchContext context, HitContext hitContext) {
146142
hitField = new DocumentField(NAME, new ArrayList<>(1));
147143
hitContext.hit().setDocumentField(NAME, hitField);
148144
}
149-
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(),
150-
hitContext.hit().getType(), hitContext.hit().getId());
151-
TermVectorsResponse termVector = TermVectorsService.getTermVectors(context.indexShard(), termVectorsRequest);
152-
try {
145+
Terms terms = hitContext.reader().getTermVector(hitContext.docId(), field);
146+
if (terms != null) {
147+
TermsEnum te = terms.iterator();
153148
Map<String, Integer> tv = new HashMap<>();
154-
TermsEnum terms = termVector.getFields().terms(field).iterator();
155149
BytesRef term;
156-
while ((term = terms.next()) != null) {
157-
tv.put(term.utf8ToString(), terms.postings(null, PostingsEnum.ALL).freq());
150+
PostingsEnum pe = null;
151+
while ((term = te.next()) != null) {
152+
pe = te.postings(pe, PostingsEnum.FREQS);
153+
pe.nextDoc();
154+
tv.put(term.utf8ToString(), pe.freq());
158155
}
159156
hitField.getValues().add(tv);
160-
} catch (IOException e) {
161-
LogManager.getLogger(FetchSubPhasePluginIT.class).info("Swallowed exception", e);
162157
}
163158
}
164159
}

0 commit comments

Comments
 (0)