From f3153aa44834e64c4ca79d193ef2256284cf8689 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 10 Sep 2020 14:05:26 +0100 Subject: [PATCH 01/10] WIP --- .../search/fetch/FetchContext.java | 86 +++++++++++++++++++ .../search/fetch/FetchPhase.java | 4 +- .../search/fetch/FetchSubPhase.java | 4 +- .../search/fetch/subphase/ExplainPhase.java | 3 +- .../fetch/subphase/FetchDocValuesPhase.java | 5 +- .../fetch/subphase/FetchSourcePhase.java | 9 +- .../fetch/subphase/MatchedQueriesPhase.java | 7 +- 7 files changed, 104 insertions(+), 14 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java new file mode 100644 index 0000000000000..bbdeb5e4e47dd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch; + +import org.apache.lucene.search.Query; +import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.rescore.RescoreContext; + +import java.util.List; + +public class FetchContext { + + public FetchContext(SearchContext searchContext) { + this.searchContext = searchContext; + } + + public static FetchContext fromSearchContext(SearchContext context) { + return new FetchContext(context); + } + + private final SearchContext searchContext; + + public String getIndexName() { + return searchContext.indexShard().shardId().getIndexName(); + } + + public int getFetchSize() { + return searchContext.docIdsToLoadSize(); + } + + public ContextIndexSearcher searcher() { + return searchContext.searcher(); + } + + public Query query() { + return searchContext.query(); + } + + public ParsedQuery parsedQuery() { + return searchContext.parsedQuery(); + } + + public ParsedQuery parsedPostFilter() { + return searchContext.parsedPostFilter(); + } + + public boolean sourceRequested() { + return searchContext.sourceRequested(); + } + + public List rescore() { + return searchContext.rescore(); + } + + public FetchSourceContext fetchSourceContext() { + return searchContext.fetchSourceContext(); + } + + public boolean hasOnlySuggest() { + return searchContext.hasOnlySuggest(); + } + + public boolean explain() { + return searchContext.explain(); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index d87e012aed157..7cb0d6bbe9c8f 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -92,6 +92,8 @@ public void execute(SearchContext context) { Map> storedToRequestedFields = new HashMap<>(); FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); + FetchContext fetchContext = FetchContext.fromSearchContext(context); + try { DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()]; for (int index = 0; index < context.docIdsToLoadSize(); index++) { @@ -104,7 +106,7 @@ public void execute(SearchContext context) { List processors = new ArrayList<>(); for (FetchSubPhase fsp : fetchSubPhases) { - FetchSubPhaseProcessor processor = fsp.getProcessor(context); + FetchSubPhaseProcessor processor = fsp.getProcessor(fetchContext); if (processor != null) { processors.add(processor); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java index 2a6c9bdf713df..9156b7fa04f94 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java @@ -95,8 +95,8 @@ public Map cache() { /** * Returns a {@link FetchSubPhaseProcessor} for this sub phase. * - * If nothing should be executed for the provided {@link SearchContext}, then the + * If nothing should be executed for the provided {@code FetchContext}, then the * implementation should return {@code null} */ - FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException; + FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java index 69bde888854e1..e9800c6ca3469 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; @@ -34,7 +35,7 @@ public final class ExplainPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext context) { + public FetchSubPhaseProcessor getProcessor(FetchContext context) { if (context.explain() == false || context.hasOnlySuggest()) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index a3a5df6885a4e..a05e25c17e258 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.internal.SearchContext; @@ -48,7 +49,7 @@ public final class FetchDocValuesPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext context) throws IOException { + public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { if (context.collapse() != null) { // retrieve the `doc_value` associated with the collapse field String name = context.collapse().getFieldName(); @@ -217,7 +218,7 @@ void setValues(int doc, DocumentField hitField) throws IOException { } - private static DocValueField buildField(SearchContext context, FieldAndFormat fieldAndFormat) { + private static DocValueField buildField(FetchContext context, FieldAndFormat fieldAndFormat) { MappedFieldType fieldType = context.mapperService().fieldType(fieldAndFormat.field); if (fieldType == null) { return null; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index 9801f6d577827..4e083b313aec3 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.internal.SearchContext; @@ -36,12 +37,12 @@ public final class FetchSourcePhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) { - if (searchContext.sourceRequested() == false) { + public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { + if (fetchContext.sourceRequested() == false) { return null; } - String index = searchContext.indexShard().shardId().getIndexName(); - FetchSourceContext fetchSourceContext = searchContext.fetchSourceContext(); + String index = fetchContext.getIndexName(); + FetchSourceContext fetchSourceContext = fetchContext.fetchSourceContext(); assert fetchSourceContext.fetchSource(); return new FetchSubPhaseProcessor() { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java index 56c1561a7e09e..520d59b7e9d97 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.internal.SearchContext; @@ -38,10 +39,8 @@ public final class MatchedQueriesPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext context) throws IOException { - if (context.docIdsToLoadSize() == 0 || - // in case the request has only suggest, parsed query is null - context.parsedQuery() == null) { + public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { + if (context.getFetchSize() == 0 || context.hasOnlySuggest()) { return null; } Map namedQueries = new HashMap<>(context.parsedQuery().namedFilters()); From 7966f1e4c2baedca0f5252745f5876cdf8fdfac5 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 10 Sep 2020 14:39:18 +0100 Subject: [PATCH 02/10] Rationalise fetch phase exceptions --- .../search/fetch/FetchPhase.java | 88 ++++---- .../search/fetch/subphase/ExplainPhase.java | 21 +- .../highlight/FastVectorHighlighter.java | 208 +++++++++--------- .../subphase/highlight/HighlightPhase.java | 3 +- .../fetch/subphase/highlight/Highlighter.java | 4 +- .../subphase/highlight/PlainHighlighter.java | 88 +++----- .../highlight/UnifiedHighlighter.java | 119 +++++----- 7 files changed, 249 insertions(+), 282 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index d87e012aed157..507b126bbf2d2 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -30,7 +30,6 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; @@ -48,7 +47,6 @@ import org.elasticsearch.search.SearchContextSourcePrinter; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; @@ -92,31 +90,25 @@ public void execute(SearchContext context) { Map> storedToRequestedFields = new HashMap<>(); FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); - try { - DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()]; - for (int index = 0; index < context.docIdsToLoadSize(); index++) { - docs[index] = new DocIdToIndex(context.docIdsToLoad()[context.docIdsToLoadFrom() + index], index); - } - Arrays.sort(docs); + DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()]; + for (int index = 0; index < context.docIdsToLoadSize(); index++) { + docs[index] = new DocIdToIndex(context.docIdsToLoad()[context.docIdsToLoadFrom() + index], index); + } + Arrays.sort(docs); - SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; - Map sharedCache = new HashMap<>(); + SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; + Map sharedCache = new HashMap<>(); - List processors = new ArrayList<>(); - for (FetchSubPhase fsp : fetchSubPhases) { - FetchSubPhaseProcessor processor = fsp.getProcessor(context); - if (processor != null) { - processors.add(processor); - } - } + List processors = getProcessors(context); - int currentReaderIndex = -1; - LeafReaderContext currentReaderContext = null; - for (int index = 0; index < context.docIdsToLoadSize(); index++) { - if (context.isCancelled()) { - throw new TaskCancelledException("cancelled"); - } - int docId = docs[index].docId; + int currentReaderIndex = -1; + LeafReaderContext currentReaderContext = null; + for (int index = 0; index < context.docIdsToLoadSize(); index++) { + if (context.isCancelled()) { + throw new TaskCancelledException("cancelled"); + } + int docId = docs[index].docId; + try { int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); if (currentReaderIndex != readerIndex) { currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); @@ -126,22 +118,37 @@ public void execute(SearchContext context) { } } assert currentReaderContext != null; - HitContext hit = prepareHitContext(context, fieldsVisitor, docId, storedToRequestedFields, currentReaderContext, sharedCache); for (FetchSubPhaseProcessor processor : processors) { processor.process(hit); } hits[docs[index].index] = hit.hit(); + } catch (IOException e) { + throw new FetchPhaseExecutionException(context.shardTarget(), "Error running fetch phase for doc [" + docId + "]", e); } - if (context.isCancelled()) { - throw new TaskCancelledException("cancelled"); - } + } + if (context.isCancelled()) { + throw new TaskCancelledException("cancelled"); + } - TotalHits totalHits = context.queryResult().getTotalHits(); - context.fetchResult().hits(new SearchHits(hits, totalHits, context.queryResult().getMaxScore())); + TotalHits totalHits = context.queryResult().getTotalHits(); + context.fetchResult().hits(new SearchHits(hits, totalHits, context.queryResult().getMaxScore())); + + } + + List getProcessors(SearchContext context) { + try { + List processors = new ArrayList<>(); + for (FetchSubPhase fsp : fetchSubPhases) { + FetchSubPhaseProcessor processor = fsp.getProcessor(context); + if (processor != null) { + processors.add(processor); + } + } + return processors; } catch (IOException e) { - throw ExceptionsHelper.convertToElastic(e); + throw new FetchPhaseExecutionException(context.shardTarget(), "Error building fetch sub-phases", e); } } @@ -243,14 +250,14 @@ private HitContext prepareNonNestedHitContext(SearchContext context, int docId, Map> storedToRequestedFields, LeafReaderContext subReaderContext, - Map sharedCache) { + Map sharedCache) throws IOException { int subDocId = docId - subReaderContext.docBase; if (fieldsVisitor == null) { SearchHit hit = new SearchHit(docId, null, null, null); return new HitContext(hit, subReaderContext, subDocId, context.searcher(), sharedCache); } else { SearchHit hit; - loadStoredFields(context.shardTarget(), context.mapperService(), subReaderContext, fieldsVisitor, subDocId); + loadStoredFields(context.mapperService(), subReaderContext, fieldsVisitor, subDocId); if (fieldsVisitor.fields().isEmpty() == false) { Map docFields = new HashMap<>(); Map metaFields = new HashMap<>(); @@ -306,7 +313,7 @@ private HitContext prepareNestedHitContext(SearchContext context, } } else { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); - loadStoredFields(context.shardTarget(), context.mapperService(), subReaderContext, rootFieldsVisitor, rootDocId); + loadStoredFields(context.mapperService(), subReaderContext, rootFieldsVisitor, rootDocId); rootId = rootFieldsVisitor.id(); if (needSource) { @@ -321,7 +328,7 @@ private HitContext prepareNestedHitContext(SearchContext context, Map metaFields = emptyMap(); if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); - loadStoredFields(context.shardTarget(), context.mapperService(), subReaderContext, nestedFieldsVisitor, nestedDocId); + loadStoredFields(context.mapperService(), subReaderContext, nestedFieldsVisitor, nestedDocId); if (nestedFieldsVisitor.fields().isEmpty() == false) { docFields = new HashMap<>(); metaFields = new HashMap<>(); @@ -449,16 +456,11 @@ private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context return nestedIdentity; } - private void loadStoredFields(SearchShardTarget shardTarget, - MapperService mapperService, + private void loadStoredFields(MapperService mapperService, LeafReaderContext readerContext, - FieldsVisitor fieldVisitor, int docId) { + FieldsVisitor fieldVisitor, int docId) throws IOException { fieldVisitor.reset(); - try { - readerContext.reader().document(docId, fieldVisitor); - } catch (IOException e) { - throw new FetchPhaseExecutionException(shardTarget, "Failed to fetch doc id [" + docId + "]", e); - } + readerContext.reader().document(docId, fieldVisitor); fieldVisitor.postProcess(mapperService); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java index 69bde888854e1..010b9a0e12894 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java @@ -20,7 +20,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; -import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.internal.SearchContext; @@ -45,21 +44,15 @@ public void setNextReader(LeafReaderContext readerContext) { } @Override - public void process(HitContext hitContext) { - try { - final int topLevelDocId = hitContext.hit().docId(); - Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); + public void process(HitContext hitContext) throws IOException { + final int topLevelDocId = hitContext.hit().docId(); + Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); - for (RescoreContext rescore : context.rescore()) { - explanation = rescore.rescorer().explain(topLevelDocId, context.searcher(), rescore, explanation); - } - // we use the top level doc id, since we work with the top level searcher - hitContext.hit().explanation(explanation); - } - catch (IOException e) { // TODO move this try-catch up into FetchPhase - throw new FetchPhaseExecutionException(context.shardTarget(), - "Failed to explain doc [" + hitContext.hit().getId() + "]", e); + for (RescoreContext rescore : context.rescore()) { + explanation = rescore.rescorer().explain(topLevelDocId, context.searcher(), rescore, explanation); } + // we use the top level doc id, since we work with the top level searcher + hitContext.hit().explanation(explanation); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index 039837758b343..7e38d73de45d6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.vectorhighlight.BreakIteratorBoundaryScanner; import org.apache.lucene.search.vectorhighlight.CustomFieldQuery; import org.apache.lucene.search.vectorhighlight.FieldFragList; -import org.apache.lucene.search.vectorhighlight.FieldPhraseList.WeightedPhraseInfo; import org.apache.lucene.search.vectorhighlight.FieldQuery; import org.apache.lucene.search.vectorhighlight.FragListBuilder; import org.apache.lucene.search.vectorhighlight.FragmentsBuilder; @@ -39,11 +38,11 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; -import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.Field; import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.FieldOptions; +import java.io.IOException; import java.text.BreakIterator; import java.util.Collections; import java.util.HashMap; @@ -68,7 +67,7 @@ public FastVectorHighlighter(Settings settings) { } @Override - public HighlightField highlight(FieldHighlightContext fieldContext) { + public HighlightField highlight(FieldHighlightContext fieldContext) throws IOException { SearchHighlightContext.Field field = fieldContext.field; FetchSubPhase.HitContext hitContext = fieldContext.hitContext; MappedFieldType fieldType = fieldContext.fieldType; @@ -87,126 +86,119 @@ public HighlightField highlight(FieldHighlightContext fieldContext) { hitContext.cache().put(CACHE_KEY, new HighlighterEntry()); } HighlighterEntry cache = (HighlighterEntry) hitContext.cache().get(CACHE_KEY); - - try { - FieldHighlightEntry entry = cache.fields.get(fieldType); - if (entry == null) { - FragListBuilder fragListBuilder; - BaseFragmentsBuilder fragmentsBuilder; - - final BoundaryScanner boundaryScanner = getBoundaryScanner(field); - if (field.fieldOptions().numberOfFragments() == 0) { - fragListBuilder = new SingleFragListBuilder(); - + FieldHighlightEntry entry = cache.fields.get(fieldType); + if (entry == null) { + FragListBuilder fragListBuilder; + BaseFragmentsBuilder fragmentsBuilder; + + final BoundaryScanner boundaryScanner = getBoundaryScanner(field); + if (field.fieldOptions().numberOfFragments() == 0) { + fragListBuilder = new SingleFragListBuilder(); + + if (!forceSource && tsi.isStored()) { + fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), boundaryScanner); + } else { + fragmentsBuilder = new SourceSimpleFragmentsBuilder(fieldType, hitContext.sourceLookup(), + field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); + } + } else { + fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ? + new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset()); + if (field.fieldOptions().scoreOrdered()) { if (!forceSource && tsi.isStored()) { - fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(), - field.fieldOptions().postTags(), boundaryScanner); + fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(), + field.fieldOptions().postTags(), boundaryScanner); } else { - fragmentsBuilder = new SourceSimpleFragmentsBuilder(fieldType, hitContext.sourceLookup(), - field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); + fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(fieldType, hitContext.sourceLookup(), + field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } } else { - fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ? - new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset()); - if (field.fieldOptions().scoreOrdered()) { - if (!forceSource && tsi.isStored()) { - fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(), - field.fieldOptions().postTags(), boundaryScanner); - } else { - fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(fieldType, hitContext.sourceLookup(), - field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); - } + if (!forceSource && tsi.isStored()) { + fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), boundaryScanner); } else { - if (!forceSource && tsi.isStored()) { - fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(), - field.fieldOptions().postTags(), boundaryScanner); - } else { - fragmentsBuilder = - new SourceSimpleFragmentsBuilder(fieldType, hitContext.sourceLookup(), - field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); - } + fragmentsBuilder = + new SourceSimpleFragmentsBuilder(fieldType, hitContext.sourceLookup(), + field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } } - fragmentsBuilder.setDiscreteMultiValueHighlighting(termVectorMultiValue); - entry = new FieldHighlightEntry(); - if (field.fieldOptions().requireFieldMatch()) { - /** - * we use top level reader to rewrite the query against all readers, - * with use caching it across hits (and across readers...) - */ - entry.fieldMatchFieldQuery = new CustomFieldQuery(fieldContext.query, - hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); - } else { - /** - * we use top level reader to rewrite the query against all readers, - * with use caching it across hits (and across readers...) - */ - entry.noFieldMatchFieldQuery = new CustomFieldQuery(fieldContext.query, - hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); - } - entry.fragListBuilder = fragListBuilder; - entry.fragmentsBuilder = fragmentsBuilder; - if (cache.fvh == null) { - // parameters to FVH are not requires since: - // first two booleans are not relevant since they are set on the CustomFieldQuery - // (phrase and fieldMatch) fragment builders are used explicitly - cache.fvh = new org.apache.lucene.search.vectorhighlight.FastVectorHighlighter(); - } - CustomFieldQuery.highlightFilters.set(field.fieldOptions().highlightFilter()); - cache.fields.put(fieldType, entry); } - final FieldQuery fieldQuery; + fragmentsBuilder.setDiscreteMultiValueHighlighting(termVectorMultiValue); + entry = new FieldHighlightEntry(); if (field.fieldOptions().requireFieldMatch()) { - fieldQuery = entry.fieldMatchFieldQuery; + /* + * we use top level reader to rewrite the query against all readers, + * with use caching it across hits (and across readers...) + */ + entry.fieldMatchFieldQuery = new CustomFieldQuery(fieldContext.query, + hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); } else { - fieldQuery = entry.noFieldMatchFieldQuery; + /* + * we use top level reader to rewrite the query against all readers, + * with use caching it across hits (and across readers...) + */ + entry.noFieldMatchFieldQuery = new CustomFieldQuery(fieldContext.query, + hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); } - cache.fvh.setPhraseLimit(field.fieldOptions().phraseLimit()); - - String[] fragments; - - // a HACK to make highlighter do highlighting, even though its using the single frag list builder - int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? - Integer.MAX_VALUE : field.fieldOptions().numberOfFragments(); - int fragmentCharSize = field.fieldOptions().numberOfFragments() == 0 ? - Integer.MAX_VALUE : field.fieldOptions().fragmentCharSize(); - // we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible - // Only send matched fields if they were requested to save time. - if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), - fieldType.name(), field.fieldOptions().matchedFields(), fragmentCharSize, - numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), - field.fieldOptions().postTags(), encoder); - } else { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), - fieldType.name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, - entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); + entry.fragListBuilder = fragListBuilder; + entry.fragmentsBuilder = fragmentsBuilder; + if (cache.fvh == null) { + // parameters to FVH are not requires since: + // first two booleans are not relevant since they are set on the CustomFieldQuery + // (phrase and fieldMatch) fragment builders are used explicitly + cache.fvh = new org.apache.lucene.search.vectorhighlight.FastVectorHighlighter(); } + CustomFieldQuery.highlightFilters.set(field.fieldOptions().highlightFilter()); + cache.fields.put(fieldType, entry); + } + final FieldQuery fieldQuery; + if (field.fieldOptions().requireFieldMatch()) { + fieldQuery = entry.fieldMatchFieldQuery; + } else { + fieldQuery = entry.noFieldMatchFieldQuery; + } + cache.fvh.setPhraseLimit(field.fieldOptions().phraseLimit()); + + String[] fragments; + + // a HACK to make highlighter do highlighting, even though its using the single frag list builder + int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? + Integer.MAX_VALUE : field.fieldOptions().numberOfFragments(); + int fragmentCharSize = field.fieldOptions().numberOfFragments() == 0 ? + Integer.MAX_VALUE : field.fieldOptions().fragmentCharSize(); + // we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible + // Only send matched fields if they were requested to save time. + if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), + fieldType.name(), field.fieldOptions().matchedFields(), fragmentCharSize, + numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), encoder); + } else { + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), + fieldType.name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, + entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); + } + + if (CollectionUtils.isEmpty(fragments) == false) { + return new HighlightField(fieldContext.fieldName, Text.convertFromStringArray(fragments)); + } + int noMatchSize = fieldContext.field.fieldOptions().noMatchSize(); + if (noMatchSize > 0) { + // Essentially we just request that a fragment is built from 0 to noMatchSize using + // the normal fragmentsBuilder + FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/); + fieldFragList.add(0, noMatchSize, Collections.emptyList()); + fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), + fieldType.name(), fieldFragList, 1, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), encoder); if (CollectionUtils.isEmpty(fragments) == false) { return new HighlightField(fieldContext.fieldName, Text.convertFromStringArray(fragments)); } - - int noMatchSize = fieldContext.field.fieldOptions().noMatchSize(); - if (noMatchSize > 0) { - // Essentially we just request that a fragment is built from 0 to noMatchSize using - // the normal fragmentsBuilder - FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/); - fieldFragList.add(0, noMatchSize, Collections.emptyList()); - fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), - fieldType.name(), fieldFragList, 1, field.fieldOptions().preTags(), - field.fieldOptions().postTags(), encoder); - if (CollectionUtils.isEmpty(fragments) == false) { - return new HighlightField(fieldContext.fieldName, Text.convertFromStringArray(fragments)); - } - } - - return null; - - } catch (Exception e) { - throw new FetchPhaseExecutionException(fieldContext.shardTarget, - "Failed to highlight field [" + fieldContext.fieldName + "]", e); } + + return null; } @Override @@ -244,14 +236,14 @@ private static BoundaryScanner getBoundaryScanner(Field field) { } } - private class FieldHighlightEntry { + private static class FieldHighlightEntry { public FragListBuilder fragListBuilder; public FragmentsBuilder fragmentsBuilder; public FieldQuery noFieldMatchFieldQuery; public FieldQuery fieldMatchFieldQuery; } - private class HighlighterEntry { + private static class HighlighterEntry { public org.apache.lucene.search.vectorhighlight.FastVectorHighlighter fvh; public Map fields = new HashMap<>(); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index c97de89a65b16..4c169bc2d2167 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -32,6 +32,7 @@ import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.internal.SearchContext; +import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -65,7 +66,7 @@ public void setNextReader(LeafReaderContext readerContext) { } @Override - public void process(HitContext hitContext) { + public void process(HitContext hitContext) throws IOException { Map highlightFields = new HashMap<>(); for (String field : contextBuilders.keySet()) { FieldHighlightContext fieldContext = contextBuilders.get(field).apply(hitContext); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java index bcc964ae9aa12..0861865fdcdd9 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java @@ -20,12 +20,14 @@ import org.elasticsearch.index.mapper.MappedFieldType; +import java.io.IOException; + /** * Highlights a search result. */ public interface Highlighter { - HighlightField highlight(FieldHighlightContext fieldContext); + HighlightField highlight(FieldHighlightContext fieldContext) throws IOException; boolean canHighlight(MappedFieldType fieldType); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index 7d42a53db652b..e78f3d9f9d3eb 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.highlight.Formatter; import org.apache.lucene.search.highlight.Fragmenter; +import org.apache.lucene.search.highlight.InvalidTokenOffsetsException; import org.apache.lucene.search.highlight.NullFragmenter; import org.apache.lucene.search.highlight.QueryScorer; import org.apache.lucene.search.highlight.SimpleFragmenter; @@ -33,18 +34,15 @@ import org.apache.lucene.search.highlight.TextFragment; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import java.io.IOException; import java.util.ArrayList; -import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -55,7 +53,7 @@ public class PlainHighlighter implements Highlighter { private static final String CACHE_KEY = "highlight-plain"; @Override - public HighlightField highlight(FieldHighlightContext fieldContext) { + public HighlightField highlight(FieldHighlightContext fieldContext) throws IOException { SearchHighlightContext.Field field = fieldContext.field; QueryShardContext context = fieldContext.context; FetchSubPhase.HitContext hitContext = fieldContext.hitContext; @@ -111,56 +109,42 @@ public HighlightField highlight(FieldHighlightContext fieldContext) { } final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset(); - try { - textsToHighlight = HighlightUtils.loadFieldValues(fieldType, hitContext, fieldContext.forceSource); + textsToHighlight = HighlightUtils.loadFieldValues(fieldType, hitContext, fieldContext.forceSource); - for (Object textToHighlight : textsToHighlight) { - String text = convertFieldValue(fieldType, textToHighlight); - int textLength = text.length(); - if (keywordIgnoreAbove != null && textLength > keywordIgnoreAbove) { - continue; // skip highlighting keyword terms that were ignored during indexing - } - if (textLength > maxAnalyzedOffset) { - throw new IllegalArgumentException( - "The length of [" + fieldContext.fieldName + "] field of [" + hitContext.hit().getId() + - "] doc of [" + context.index().getName() + "] index " + - "has exceeded [" + maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " + - "This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() + - "] index level setting. " + "For large texts, indexing with offsets or term vectors, and highlighting " + - "with unified or fvh highlighter is recommended!"); - } + for (Object textToHighlight : textsToHighlight) { + String text = convertFieldValue(fieldType, textToHighlight); + int textLength = text.length(); + if (keywordIgnoreAbove != null && textLength > keywordIgnoreAbove) { + continue; // skip highlighting keyword terms that were ignored during indexing + } + if (textLength > maxAnalyzedOffset) { + throw new IllegalArgumentException( + "The length of [" + fieldContext.fieldName + "] field of [" + hitContext.hit().getId() + + "] doc of [" + context.index().getName() + "] index " + + "has exceeded [" + maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " + + "This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() + + "] index level setting. " + "For large texts, indexing with offsets or term vectors, and highlighting " + + "with unified or fvh highlighter is recommended!"); + } - try (TokenStream tokenStream = analyzer.tokenStream(fieldType.name(), text)) { - if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { - // can't perform highlighting if the stream has no terms (binary token stream) or no offsets - continue; - } - TextFragment[] bestTextFragments = entry.getBestTextFragments(tokenStream, text, false, numberOfFragments); - for (TextFragment bestTextFragment : bestTextFragments) { - if (bestTextFragment != null && bestTextFragment.getScore() > 0) { - fragsList.add(bestTextFragment); - } + try (TokenStream tokenStream = analyzer.tokenStream(fieldType.name(), text)) { + if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { + // can't perform highlighting if the stream has no terms (binary token stream) or no offsets + continue; + } + TextFragment[] bestTextFragments = entry.getBestTextFragments(tokenStream, text, false, numberOfFragments); + for (TextFragment bestTextFragment : bestTextFragments) { + if (bestTextFragment != null && bestTextFragment.getScore() > 0) { + fragsList.add(bestTextFragment); } } - } - } catch (Exception e) { - if (ExceptionsHelper.unwrap(e, BytesRefHash.MaxBytesLengthExceededException.class) != null) { - // this can happen if for example a field is not_analyzed and ignore_above option is set. - // the field will be ignored when indexing but the huge term is still in the source and - // the plain highlighter will parse the source and try to analyze it. - return null; - } else { - throw new FetchPhaseExecutionException(fieldContext.shardTarget, - "Failed to highlight field [" + fieldContext.fieldName + "]", e); + } catch (InvalidTokenOffsetsException | BytesRefHash.MaxBytesLengthExceededException e) { + // ignore and continue to the next value } } + if (field.fieldOptions().scoreOrdered()) { - CollectionUtil.introSort(fragsList, new Comparator() { - @Override - public int compare(TextFragment o1, TextFragment o2) { - return Math.round(o2.getScore() - o1.getScore()); - } - }); + CollectionUtil.introSort(fragsList, (o1, o2) -> Math.round(o2.getScore() - o1.getScore())); } String[] fragments; // number_of_fragments is set to 0 but we have a multivalued field @@ -171,7 +155,7 @@ public int compare(TextFragment o1, TextFragment o2) { } } else { // refine numberOfFragments if needed - numberOfFragments = fragsList.size() < numberOfFragments ? fragsList.size() : numberOfFragments; + numberOfFragments = Math.min(fragsList.size(), numberOfFragments); fragments = new String[numberOfFragments]; for (int i = 0; i < fragments.length; i++) { fragments[i] = fragsList.get(i).toString(); @@ -186,13 +170,7 @@ public int compare(TextFragment o1, TextFragment o2) { if (noMatchSize > 0 && textsToHighlight.size() > 0) { // Pull an excerpt from the beginning of the string but make sure to split the string on a term boundary. String fieldContents = textsToHighlight.get(0).toString(); - int end; - try { - end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents); - } catch (Exception e) { - throw new FetchPhaseExecutionException(fieldContext.shardTarget, - "Failed to highlight field [" + fieldContext.fieldName + "]", e); - } + int end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents); if (end > 0) { return new HighlightField(fieldContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) }); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index aa11898e3e80e..40907dea82544 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -61,66 +61,14 @@ public boolean canHighlight(MappedFieldType fieldType) { } @Override - public HighlightField highlight(FieldHighlightContext fieldContext) { + public HighlightField highlight(FieldHighlightContext fieldContext) throws IOException { @SuppressWarnings("unchecked") Map cache = (Map) fieldContext.hitContext.cache() .computeIfAbsent(UnifiedHighlighter.class.getName(), k -> new HashMap<>()); - CustomUnifiedHighlighter highlighter = (CustomUnifiedHighlighter) cache.computeIfAbsent(fieldContext.fieldName, f -> { - Encoder encoder = fieldContext.field.fieldOptions().encoder().equals("html") - ? HighlightUtils.Encoders.HTML - : HighlightUtils.Encoders.DEFAULT; - int maxAnalyzedOffset = fieldContext.context.getIndexSettings().getHighlightMaxAnalyzedOffset(); - int keywordIgnoreAbove = Integer.MAX_VALUE; - if (fieldContext.fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - KeywordFieldMapper mapper = (KeywordFieldMapper) fieldContext.context.getMapperService().documentMapper() - .mappers().getMapper(fieldContext.fieldName); - keywordIgnoreAbove = mapper.ignoreAbove(); - } - int numberOfFragments = fieldContext.field.fieldOptions().numberOfFragments(); - Analyzer analyzer = getAnalyzer(fieldContext.context.getMapperService().documentMapper()); - PassageFormatter passageFormatter = getPassageFormatter(fieldContext.hitContext, fieldContext.field, encoder); - IndexSearcher searcher = fieldContext.context.searcher(); - OffsetSource offsetSource = getOffsetSource(fieldContext.fieldType); - BreakIterator breakIterator; - int higlighterNumberOfFragments; - if (numberOfFragments == 0 - // non-tokenized fields should not use any break iterator (ignore boundaryScannerType) - || fieldContext.fieldType.getTextSearchInfo().isTokenized() == false) { - /* - * We use a control char to separate values, which is the - * only char that the custom break iterator breaks the text - * on, so we don't lose the distinction between the different - * values of a field and we get back a snippet per value - */ - breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); - higlighterNumberOfFragments = numberOfFragments == 0 ? Integer.MAX_VALUE - 1 : numberOfFragments; - } else { - //using paragraph separator we make sure that each field value holds a discrete passage for highlighting - breakIterator = getBreakIterator(fieldContext.field); - higlighterNumberOfFragments = numberOfFragments; - } - try { - return new CustomUnifiedHighlighter( - searcher, - analyzer, - offsetSource, - passageFormatter, - fieldContext.field.fieldOptions().boundaryScannerLocale(), - breakIterator, - fieldContext.context.getFullyQualifiedIndex().getName(), - fieldContext.fieldName, - fieldContext.query, - fieldContext.field.fieldOptions().noMatchSize(), - higlighterNumberOfFragments, - fieldMatcher(fieldContext), - keywordIgnoreAbove, - maxAnalyzedOffset - ); - } catch (IOException e) { - throw new FetchPhaseExecutionException(fieldContext.shardTarget, - "Failed to highlight field [" + fieldContext.fieldName + "]", e); - } - }); + if (cache.containsKey(fieldContext.fieldName) == false) { + cache.put(fieldContext.fieldName, buildHighlighter(fieldContext)); + } + CustomUnifiedHighlighter highlighter = cache.get(fieldContext.fieldName); MappedFieldType fieldType = fieldContext.fieldType; SearchHighlightContext.Field field = fieldContext.field; FetchSubPhase.HitContext hitContext = fieldContext.hitContext; @@ -166,10 +114,61 @@ public HighlightField highlight(FieldHighlightContext fieldContext) { return new HighlightField(fieldContext.fieldName, Text.convertFromStringArray(fragments)); } + CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) throws IOException { + Encoder encoder = fieldContext.field.fieldOptions().encoder().equals("html") + ? HighlightUtils.Encoders.HTML + : HighlightUtils.Encoders.DEFAULT; + int maxAnalyzedOffset = fieldContext.context.getIndexSettings().getHighlightMaxAnalyzedOffset(); + int keywordIgnoreAbove = Integer.MAX_VALUE; + if (fieldContext.fieldType instanceof KeywordFieldMapper.KeywordFieldType) { + KeywordFieldMapper mapper = (KeywordFieldMapper) fieldContext.context.getMapperService().documentMapper() + .mappers().getMapper(fieldContext.fieldName); + keywordIgnoreAbove = mapper.ignoreAbove(); + } + int numberOfFragments = fieldContext.field.fieldOptions().numberOfFragments(); + Analyzer analyzer = getAnalyzer(fieldContext.context.getMapperService().documentMapper()); + PassageFormatter passageFormatter = getPassageFormatter(fieldContext.hitContext, fieldContext.field, encoder); + IndexSearcher searcher = fieldContext.context.searcher(); + OffsetSource offsetSource = getOffsetSource(fieldContext.fieldType); + BreakIterator breakIterator; + int higlighterNumberOfFragments; + if (numberOfFragments == 0 + // non-tokenized fields should not use any break iterator (ignore boundaryScannerType) + || fieldContext.fieldType.getTextSearchInfo().isTokenized() == false) { + /* + * We use a control char to separate values, which is the + * only char that the custom break iterator breaks the text + * on, so we don't lose the distinction between the different + * values of a field and we get back a snippet per value + */ + breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); + higlighterNumberOfFragments = numberOfFragments == 0 ? Integer.MAX_VALUE - 1 : numberOfFragments; + } else { + //using paragraph separator we make sure that each field value holds a discrete passage for highlighting + breakIterator = getBreakIterator(fieldContext.field); + higlighterNumberOfFragments = numberOfFragments; + } + return new CustomUnifiedHighlighter( + searcher, + analyzer, + offsetSource, + passageFormatter, + fieldContext.field.fieldOptions().boundaryScannerLocale(), + breakIterator, + fieldContext.context.getFullyQualifiedIndex().getName(), + fieldContext.fieldName, + fieldContext.query, + fieldContext.field.fieldOptions().noMatchSize(), + higlighterNumberOfFragments, + fieldMatcher(fieldContext), + keywordIgnoreAbove, + maxAnalyzedOffset + ); + } + protected PassageFormatter getPassageFormatter(HitContext hitContext, SearchHighlightContext.Field field, Encoder encoder) { - CustomPassageFormatter passageFormatter = new CustomPassageFormatter(field.fieldOptions().preTags()[0], + return new CustomPassageFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0], encoder); - return passageFormatter; } @@ -244,7 +243,7 @@ protected OffsetSource getOffsetSource(MappedFieldType fieldType) { private Predicate fieldMatcher(FieldHighlightContext fieldContext) { if (fieldContext.field.fieldOptions().requireFieldMatch()) { String fieldName = fieldContext.fieldName; - return name -> fieldName.equals(name); + return fieldName::equals; } // ignore terms that targets the _id field since they use a different encoding // that is not compatible with utf8 From 66541df9770c8948285f419ba7e8d7931694dc1f Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Sat, 12 Sep 2020 16:46:38 +0100 Subject: [PATCH 03/10] Introduce FetchContext --- .../PercolatorHighlightSubFetchPhase.java | 17 ++-- .../PercolatorMatchedSlotSubFetchPhase.java | 6 +- ...PercolatorHighlightSubFetchPhaseTests.java | 18 ++--- ...rcolatorMatchedSlotSubFetchPhaseTests.java | 10 +-- .../search/fetch/FetchSubPhasePluginIT.java | 26 +++--- .../search/fetch/FetchContext.java | 79 +++++++++++++++++-- .../search/fetch/FetchSubPhase.java | 1 - .../fetch/subphase/FetchDocValuesPhase.java | 20 ++--- .../fetch/subphase/FetchFieldsPhase.java | 4 +- .../fetch/subphase/FetchScorePhase.java | 8 +- .../fetch/subphase/FetchSourcePhase.java | 5 +- .../fetch/subphase/FetchVersionPhase.java | 4 +- .../search/fetch/subphase/InnerHitsPhase.java | 4 +- .../fetch/subphase/MatchedQueriesPhase.java | 1 - .../fetch/subphase/ScriptFieldsPhase.java | 6 +- .../fetch/subphase/SeqNoPrimaryTermPhase.java | 4 +- .../highlight/FieldHighlightContext.java | 7 +- .../subphase/highlight/HighlightPhase.java | 28 +++---- .../subphase/highlight/PlainHighlighter.java | 10 +-- .../highlight/UnifiedHighlighter.java | 15 +--- .../fetch/subphase/FetchSourcePhaseTests.java | 37 ++------- 21 files changed, 160 insertions(+), 150 deletions(-) diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 10fe3c0cf3c8e..53dd2ba8bce94 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -25,15 +25,14 @@ import org.apache.lucene.search.QueryVisitor; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; @@ -54,11 +53,11 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { } @Override - public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException { - if (searchContext.highlight() == null) { + public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { + if (fetchContext.highlight() == null) { return null; } - List percolateQueries = locatePercolatorQuery(searchContext.query()); + List percolateQueries = locatePercolatorQuery(fetchContext.query()); if (percolateQueries.isEmpty()) { return null; } @@ -67,7 +66,7 @@ public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws I LeafReaderContext ctx; @Override - public void setNextReader(LeafReaderContext readerContext) throws IOException { + public void setNextReader(LeafReaderContext readerContext) { this.ctx = readerContext; } @@ -99,10 +98,8 @@ public void process(HitContext hit) throws IOException { ); subContext.sourceLookup().setSource(document); // force source because MemoryIndex does not store fields - SearchHighlightContext highlight = new SearchHighlightContext(searchContext.highlight().fields(), true); - QueryShardContext shardContext = new QueryShardContext(searchContext.getQueryShardContext()); - FetchSubPhaseProcessor processor = highlightPhase.getProcessor(shardContext, searchContext.shardTarget(), - highlight, query); + SearchHighlightContext highlight = new SearchHighlightContext(fetchContext.highlight().fields(), true); + FetchSubPhaseProcessor processor = highlightPhase.getProcessor(fetchContext, highlight, query); processor.process(subContext); for (Map.Entry entry : subContext.hit().getHighlightFields().entrySet()) { if (percolateQuery.getDocuments().size() == 1) { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index 56ba934486cec..ae1ccd872fa56 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -33,9 +33,9 @@ import org.apache.lucene.util.BitSetIterator; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; @@ -56,10 +56,10 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase { static final String FIELD_NAME_PREFIX = "_percolator_document_slot"; @Override - public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException { + public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) throws IOException { List percolateContexts = new ArrayList<>(); - List percolateQueries = locatePercolatorQuery(searchContext.query()); + List percolateQueries = locatePercolatorQuery(fetchContext.query()); boolean singlePercolateQuery = percolateQueries.size() == 1; for (PercolateQuery pq : percolateQueries) { percolateContexts.add(new PercolateContext(pq, singlePercolateQuery)); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java index 15d5682725dc1..656d850e50948 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -28,12 +28,11 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; @@ -41,20 +40,21 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { - public void testHitsExecutionNeeded() throws IOException { + public void testHitsExecutionNeeded() { PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(emptyMap()); - SearchContext searchContext = Mockito.mock(SearchContext.class); - Mockito.when(searchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList())); - Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery()); + FetchContext fetchContext = mock(FetchContext.class); + Mockito.when(fetchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList())); + Mockito.when(fetchContext.query()).thenReturn(new MatchAllDocsQuery()); - assertNull(subFetchPhase.getProcessor(searchContext)); - Mockito.when(searchContext.query()).thenReturn(percolateQuery); - assertNotNull(subFetchPhase.getProcessor(searchContext)); + assertNull(subFetchPhase.getProcessor(fetchContext)); + Mockito.when(fetchContext.query()).thenReturn(percolateQuery); + assertNotNull(subFetchPhase.getProcessor(fetchContext)); } public void testLocatePercolatorQuery() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java index bf53b8393c461..3b886d0ee5e9d 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java @@ -26,7 +26,6 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.ScoreDoc; @@ -37,9 +36,9 @@ import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -62,7 +61,6 @@ public void testHitsExecute() throws Exception { PercolatorMatchedSlotSubFetchPhase phase = new PercolatorMatchedSlotSubFetchPhase(); try (DirectoryReader reader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = new IndexSearcher(reader); LeafReaderContext context = reader.leaves().get(0); // A match: { @@ -74,7 +72,7 @@ public void testHitsExecute() throws Exception { PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); - SearchContext sc = mock(SearchContext.class); + FetchContext sc = mock(FetchContext.class); when(sc.query()).thenReturn(percolateQuery); FetchSubPhaseProcessor processor = phase.getProcessor(sc); @@ -95,7 +93,7 @@ public void testHitsExecute() throws Exception { PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); - SearchContext sc = mock(SearchContext.class); + FetchContext sc = mock(FetchContext.class); when(sc.query()).thenReturn(percolateQuery); FetchSubPhaseProcessor processor = phase.getProcessor(sc); @@ -115,7 +113,7 @@ public void testHitsExecute() throws Exception { PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); - SearchContext sc = mock(SearchContext.class); + FetchContext sc = mock(FetchContext.class); when(sc.query()).thenReturn(percolateQuery); FetchSubPhaseProcessor processor = phase.getProcessor(sc); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 9ed2a5397a12b..1bc5d03631fad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -19,26 +19,22 @@ package org.elasticsearch.search.fetch; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.termvectors.TermVectorsRequest; -import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -114,7 +110,7 @@ private static final class TermVectorsFetchSubPhase implements FetchSubPhase { private static final String NAME = "term_vectors_fetch"; @Override - public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) { + public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) { return new FetchSubPhaseProcessor() { @Override public void setNextReader(LeafReaderContext readerContext) { @@ -122,13 +118,13 @@ public void setNextReader(LeafReaderContext readerContext) { } @Override - public void process(HitContext hitContext) { + public void process(HitContext hitContext) throws IOException { hitExecute(searchContext, hitContext); } }; } - private void hitExecute(SearchContext context, HitContext hitContext) { + private void hitExecute(FetchContext context, HitContext hitContext) throws IOException { TermVectorsFetchBuilder fetchSubPhaseBuilder = (TermVectorsFetchBuilder)context.getSearchExt(NAME); if (fetchSubPhaseBuilder == null) { return; @@ -139,19 +135,15 @@ private void hitExecute(SearchContext context, HitContext hitContext) { hitField = new DocumentField(NAME, new ArrayList<>(1)); hitContext.hit().setDocumentField(NAME, hitField); } - TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(), - hitContext.hit().getId()); - TermVectorsResponse termVector = TermVectorsService.getTermVectors(context.indexShard(), termVectorsRequest); - try { + Terms terms = hitContext.reader().getTermVector(hitContext.docId(), field); + if (terms != null) { + TermsEnum te = terms.iterator(); Map tv = new HashMap<>(); - TermsEnum terms = termVector.getFields().terms(field).iterator(); BytesRef term; - while ((term = terms.next()) != null) { - tv.put(term.utf8ToString(), terms.postings(null, PostingsEnum.ALL).freq()); + while ((term = te.next()) != null) { + tv.put(term.utf8ToString(), te.postings(null, PostingsEnum.ALL).freq()); } hitField.getValues().add(tv); - } catch (IOException e) { - LogManager.getLogger(FetchSubPhasePluginIT.class).info("Swallowed exception", e); } } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java index bbdeb5e4e47dd..f4bcfde0816c1 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java @@ -20,8 +20,19 @@ package org.elasticsearch.search.fetch; import org.apache.lucene.search.Query; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.search.SearchExtBuilder; +import org.elasticsearch.search.collapse.CollapseContext; +import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; +import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.InnerHitsContext; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; +import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.rescore.RescoreContext; @@ -48,10 +59,26 @@ public int getFetchSize() { return searchContext.docIdsToLoadSize(); } + public boolean hasOnlySuggest() { + return searchContext.hasOnlySuggest(); + } + public ContextIndexSearcher searcher() { return searchContext.searcher(); } + public MapperService mapperService() { + return searchContext.mapperService(); + } + + public IndexSettings getIndexSettings() { + return mapperService().getIndexSettings(); + } + + public IndexFieldData getForField(MappedFieldType fieldType) { + return searchContext.getForField(fieldType); + } + public Query query() { return searchContext.query(); } @@ -64,23 +91,59 @@ public ParsedQuery parsedPostFilter() { return searchContext.parsedPostFilter(); } - public boolean sourceRequested() { - return searchContext.sourceRequested(); + public FetchSourceContext fetchSourceContext() { + return searchContext.fetchSourceContext(); + } + + public boolean explain() { + return searchContext.explain(); } public List rescore() { return searchContext.rescore(); } - public FetchSourceContext fetchSourceContext() { - return searchContext.fetchSourceContext(); + public boolean seqNoAndPrimaryTerm() { + return searchContext.seqNoAndPrimaryTerm(); } - public boolean hasOnlySuggest() { - return searchContext.hasOnlySuggest(); + public CollapseContext collapse() { + return searchContext.collapse(); } - public boolean explain() { - return searchContext.explain(); + public FetchDocValuesContext docValuesContext() { + return searchContext.docValuesContext(); + } + + public SearchHighlightContext highlight() { + return searchContext.highlight(); + } + + public boolean fetchScores() { + return getFetchSize() > 0 && searchContext.sort() != null && searchContext.trackScores(); + } + + public InnerHitsContext innerHits() { + return searchContext.innerHits(); + } + + public boolean version() { + return searchContext.version(); + } + + public StoredFieldsContext storedFieldsContext() { + return searchContext.storedFieldsContext(); + } + + public FetchFieldsContext fetchFieldsContext() { + return searchContext.fetchFieldsContext(); + } + + public ScriptFieldsContext scriptFields() { + return searchContext.scriptFields(); + } + + public SearchExtBuilder getSearchExt(String name) { + return searchContext.getSearchExt(name); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java index 9156b7fa04f94..9351037a8475e 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index a05e25c17e258..cb33737651580 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -31,7 +31,6 @@ import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; @@ -49,24 +48,27 @@ public final class FetchDocValuesPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { + public FetchSubPhaseProcessor getProcessor(FetchContext context) { + + FetchDocValuesContext dvContext = context.docValuesContext(); + if (context.collapse() != null) { // retrieve the `doc_value` associated with the collapse field String name = context.collapse().getFieldName(); - if (context.docValuesContext() == null) { - context.docValuesContext(new FetchDocValuesContext( - Collections.singletonList(new FieldAndFormat(name, null)))); + if (dvContext == null) { + dvContext = new FetchDocValuesContext( + Collections.singletonList(new FieldAndFormat(name, null))); } else if (context.docValuesContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) { - context.docValuesContext().fields().add(new FieldAndFormat(name, null)); + dvContext.fields().add(new FieldAndFormat(name, null)); } } - if (context.docValuesContext() == null) { + if (dvContext == null) { return null; } List fields = new ArrayList<>(); - for (FieldAndFormat fieldAndFormat : context.docValuesContext().fields()) { + for (FieldAndFormat fieldAndFormat : dvContext.fields()) { DocValueField f = buildField(context, fieldAndFormat); if (f != null) { fields.add(f); @@ -75,7 +77,7 @@ public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOExcept return new FetchSubPhaseProcessor() { @Override - public void setNextReader(LeafReaderContext readerContext) throws IOException { + public void setNextReader(LeafReaderContext readerContext) { for (DocValueField f : fields) { f.setNextReader(readerContext); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java index ad3827b2879c9..486e95d3698bf 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java @@ -23,9 +23,9 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import java.util.HashSet; @@ -39,7 +39,7 @@ public final class FetchFieldsPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) { + public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) { FetchFieldsContext fetchFieldsContext = searchContext.fetchFieldsContext(); if (fetchFieldsContext == null) { return null; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java index 8f59a771da5d5..a30d80070d1fa 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java @@ -25,19 +25,17 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; public class FetchScorePhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext context) throws IOException { - if (context.trackScores() == false || context.docIdsToLoadSize() == 0 || - // scores were already computed since they are needed on the coordinated node to merge top hits - context.sort() == null) { + public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { + if (context.fetchScores() == false) { return null; } final IndexSearcher searcher = context.searcher(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index 4e083b313aec3..8dd99691ddcb6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -38,11 +37,11 @@ public final class FetchSourcePhase implements FetchSubPhase { @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { - if (fetchContext.sourceRequested() == false) { + FetchSourceContext fetchSourceContext = fetchContext.fetchSourceContext(); + if (fetchSourceContext == null || fetchSourceContext.fetchSource() == false) { return null; } String index = fetchContext.getIndexName(); - FetchSourceContext fetchSourceContext = fetchContext.fetchSourceContext(); assert fetchSourceContext.fetchSource(); return new FetchSubPhaseProcessor() { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java index 394d534b56e3c..ba02491ee7c8f 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java @@ -22,16 +22,16 @@ import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.mapper.VersionFieldMapper; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; public final class FetchVersionPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext context) { + public FetchSubPhaseProcessor getProcessor(FetchContext context) { if (context.version() == false || (context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false)) { return null; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index 9d6efa92818da..c6d05a06fe7f2 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -25,11 +25,11 @@ import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -45,7 +45,7 @@ public InnerHitsPhase(FetchPhase fetchPhase) { } @Override - public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) { + public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) { if (searchContext.innerHits() == null) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java index 520d59b7e9d97..381dbf202296a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java index 4145b1d4e397e..bdfb1223d58df 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java @@ -22,9 +22,9 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.script.FieldScript; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; @@ -35,8 +35,8 @@ public final class ScriptFieldsPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext context) { - if (context.hasScriptFields() == false) { + public FetchSubPhaseProcessor getProcessor(FetchContext context) { + if (context.scriptFields() == null) { return null; } List scriptFields = context.scriptFields().fields(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java index 5ed4a6ca9b670..90da5267443e1 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java @@ -22,16 +22,16 @@ import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; public final class SeqNoPrimaryTermPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(SearchContext context) throws IOException { + public FetchSubPhaseProcessor getProcessor(FetchContext context) { if (context.seqNoAndPrimaryTerm() == false) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java index 670a5ccfec1ff..4e3ef9af3a757 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java @@ -20,8 +20,7 @@ import org.apache.lucene.search.Query; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; public class FieldHighlightContext { @@ -29,7 +28,7 @@ public class FieldHighlightContext { public final String fieldName; public final SearchHighlightContext.Field field; public final MappedFieldType fieldType; - public final QueryShardContext context; + public final FetchContext context; public final FetchSubPhase.HitContext hitContext; public final Query query; public final boolean forceSource; @@ -37,7 +36,7 @@ public class FieldHighlightContext { public FieldHighlightContext(String fieldName, SearchHighlightContext.Field field, MappedFieldType fieldType, - QueryShardContext context, + FetchContext context, FetchSubPhase.HitContext hitContext, Query query, boolean forceSource) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 96bf2a0393506..3235e099137ef 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -26,11 +26,9 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collection; @@ -49,16 +47,16 @@ public HighlightPhase(Map highlighters) { } @Override - public FetchSubPhaseProcessor getProcessor(SearchContext context) { + public FetchSubPhaseProcessor getProcessor(FetchContext context) { if (context.highlight() == null) { return null; } - return getProcessor(context.getQueryShardContext(), context.shardTarget(), context.highlight(), context.parsedQuery().query()); + return getProcessor(context, context.highlight(), context.parsedQuery().query()); } - public FetchSubPhaseProcessor getProcessor(QueryShardContext qsc, SearchShardTarget target, SearchHighlightContext hc, Query query) { - Map> contextBuilders = contextBuilders(qsc, target, hc, query); + public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchHighlightContext highlightContext, Query query) { + Map> contextBuilders = contextBuilders(context, highlightContext, query); return new FetchSubPhaseProcessor() { @Override public void setNextReader(LeafReaderContext readerContext) { @@ -98,21 +96,21 @@ private Highlighter getHighlighter(SearchHighlightContext.Field field) { return highlighter; } - private Map> contextBuilders(QueryShardContext context, - SearchHighlightContext highlight, + private Map> contextBuilders(FetchContext context, + SearchHighlightContext highlightContext, Query query) { Map> builders = new LinkedHashMap<>(); - for (SearchHighlightContext.Field field : highlight.fields()) { + for (SearchHighlightContext.Field field : highlightContext.fields()) { Highlighter highlighter = getHighlighter(field); Collection fieldNamesToHighlight; if (Regex.isSimpleMatchPattern(field.field())) { - fieldNamesToHighlight = context.getMapperService().simpleMatchToFullName(field.field()); + fieldNamesToHighlight = context.mapperService().simpleMatchToFullName(field.field()); } else { fieldNamesToHighlight = Collections.singletonList(field.field()); } - if (highlight.forceSource(field)) { - SourceFieldMapper sourceFieldMapper = context.getMapperService().documentMapper().sourceMapper(); + if (highlightContext.forceSource(field)) { + SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper().sourceMapper(); if (sourceFieldMapper.enabled() == false) { throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but _source is disabled"); @@ -121,7 +119,7 @@ private Map> contextBuilders boolean fieldNameContainsWildcards = field.field().contains("*"); for (String fieldName : fieldNamesToHighlight) { - MappedFieldType fieldType = context.getMapperService().fieldType(fieldName); + MappedFieldType fieldType = context.mapperService().fieldType(fieldName); if (fieldType == null) { continue; } @@ -146,7 +144,7 @@ private Map> contextBuilders Query highlightQuery = field.fieldOptions().highlightQuery(); - boolean forceSource = highlight.forceSource(field); + boolean forceSource = highlightContext.forceSource(field); builders.put(fieldName, hc -> new FieldHighlightContext(fieldType.name(), field, fieldType, context, hc, highlightQuery == null ? query : highlightQuery, forceSource)); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index e78f3d9f9d3eb..9a584a36b97c7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -38,7 +38,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import java.io.IOException; @@ -55,7 +55,7 @@ public class PlainHighlighter implements Highlighter { @Override public HighlightField highlight(FieldHighlightContext fieldContext) throws IOException { SearchHighlightContext.Field field = fieldContext.field; - QueryShardContext context = fieldContext.context; + FetchContext context = fieldContext.context; FetchSubPhase.HitContext hitContext = fieldContext.hitContext; MappedFieldType fieldType = fieldContext.fieldType; @@ -100,10 +100,10 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList fragsList = new ArrayList<>(); List textsToHighlight; - Analyzer analyzer = context.getMapperService().documentMapper().mappers().indexAnalyzer(); + Analyzer analyzer = context.mapperService().documentMapper().mappers().indexAnalyzer(); Integer keywordIgnoreAbove = null; if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - KeywordFieldMapper mapper = (KeywordFieldMapper) context.getMapperService().documentMapper() + KeywordFieldMapper mapper = (KeywordFieldMapper) context.mapperService().documentMapper() .mappers().getMapper(fieldContext.fieldName); keywordIgnoreAbove = mapper.ignoreAbove(); } @@ -120,7 +120,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc if (textLength > maxAnalyzedOffset) { throw new IllegalArgumentException( "The length of [" + fieldContext.fieldName + "] field of [" + hitContext.hit().getId() + - "] doc of [" + context.index().getName() + "] index " + + "] doc of [" + context.getIndexName() + "] index " + "has exceeded [" + maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " + "This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() + "] index level setting. " + "For large texts, indexing with offsets or term vectors, and highlighting " + diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 40907dea82544..6c6b0e7c0ec4a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; -import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; @@ -80,13 +79,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc } return mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR); }; - Snippet[] fieldSnippets; - try { - fieldSnippets = highlighter.highlightField(hitContext.reader(), hitContext.docId(), loadFieldValues); - } catch (IOException e) { - throw new FetchPhaseExecutionException(fieldContext.shardTarget, - "Failed to highlight field [" + fieldContext.fieldName + "]", e); - } + Snippet[] fieldSnippets = highlighter.highlightField(hitContext.reader(), hitContext.docId(), loadFieldValues); if (fieldSnippets == null || fieldSnippets.length == 0) { return null; @@ -121,12 +114,12 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) th int maxAnalyzedOffset = fieldContext.context.getIndexSettings().getHighlightMaxAnalyzedOffset(); int keywordIgnoreAbove = Integer.MAX_VALUE; if (fieldContext.fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - KeywordFieldMapper mapper = (KeywordFieldMapper) fieldContext.context.getMapperService().documentMapper() + KeywordFieldMapper mapper = (KeywordFieldMapper) fieldContext.context.mapperService().documentMapper() .mappers().getMapper(fieldContext.fieldName); keywordIgnoreAbove = mapper.ignoreAbove(); } int numberOfFragments = fieldContext.field.fieldOptions().numberOfFragments(); - Analyzer analyzer = getAnalyzer(fieldContext.context.getMapperService().documentMapper()); + Analyzer analyzer = getAnalyzer(fieldContext.context.mapperService().documentMapper()); PassageFormatter passageFormatter = getPassageFormatter(fieldContext.hitContext, fieldContext.field, encoder); IndexSearcher searcher = fieldContext.context.searcher(); OffsetSource offsetSource = getOffsetSource(fieldContext.fieldType); @@ -155,7 +148,7 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) th passageFormatter, fieldContext.field.fieldOptions().boundaryScannerLocale(), breakIterator, - fieldContext.context.getFullyQualifiedIndex().getName(), + fieldContext.context.getIndexName(), fieldContext.fieldName, fieldContext.query, fieldContext.field.fieldOptions().noMatchSize(), diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index c4fcfd87f1e62..cb9a1c03beddd 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -25,14 +25,11 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TestSearchContext; import java.io.IOException; import java.util.Collections; @@ -152,7 +149,9 @@ private HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSourc private HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSource, String[] includes, String[] excludes, SearchHit.NestedIdentity nestedIdentity) throws IOException { FetchSourceContext fetchSourceContext = new FetchSourceContext(fetchSource, includes, excludes); - SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext); + FetchContext fetchContext = mock(FetchContext.class); + when(fetchContext.fetchSourceContext()).thenReturn(fetchSourceContext); + when(fetchContext.getIndexName()).thenReturn("index"); final SearchHit searchHit = new SearchHit(1, null, nestedIdentity, null, null); @@ -163,7 +162,7 @@ private HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSourc hitContext.sourceLookup().setSource(source == null ? null : BytesReference.bytes(source)); FetchSourcePhase phase = new FetchSourcePhase(); - FetchSubPhaseProcessor processor = phase.getProcessor(searchContext); + FetchSubPhaseProcessor processor = phase.getProcessor(fetchContext); if (fetchSource == false) { assertNull(processor); } else { @@ -173,30 +172,4 @@ private HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSourc return hitContext; } - private static class FetchSourcePhaseTestSearchContext extends TestSearchContext { - final FetchSourceContext context; - final IndexShard indexShard; - - FetchSourcePhaseTestSearchContext(FetchSourceContext context) { - super(null); - this.context = context; - this.indexShard = mock(IndexShard.class); - when(indexShard.shardId()).thenReturn(new ShardId("index", "index", 1)); - } - - @Override - public boolean sourceRequested() { - return context != null && context.fetchSource(); - } - - @Override - public FetchSourceContext fetchSourceContext() { - return context; - } - - @Override - public IndexShard indexShard() { - return indexShard; - } - } } From 8df15359f34d4094567b879e4be2b00ab44856d0 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Sun, 13 Sep 2020 15:56:19 +0100 Subject: [PATCH 04/10] reduce our surface area a little --- .../search/fetch/FetchContext.java | 37 +++++++++---------- .../search/fetch/FetchPhase.java | 15 ++++++-- .../search/fetch/subphase/ExplainPhase.java | 2 +- .../fetch/subphase/FetchDocValuesContext.java | 2 +- .../fetch/subphase/FetchDocValuesPhase.java | 12 ------ .../fetch/subphase/FetchVersionPhase.java | 3 +- .../fetch/subphase/MatchedQueriesPhase.java | 3 -- 7 files changed, 32 insertions(+), 42 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java index f4bcfde0816c1..1b33f2c0c5edd 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java @@ -30,6 +30,7 @@ import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext; @@ -37,6 +38,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.rescore.RescoreContext; +import java.util.Collections; import java.util.List; public class FetchContext { @@ -55,14 +57,6 @@ public String getIndexName() { return searchContext.indexShard().shardId().getIndexName(); } - public int getFetchSize() { - return searchContext.docIdsToLoadSize(); - } - - public boolean hasOnlySuggest() { - return searchContext.hasOnlySuggest(); - } - public ContextIndexSearcher searcher() { return searchContext.searcher(); } @@ -107,12 +101,18 @@ public boolean seqNoAndPrimaryTerm() { return searchContext.seqNoAndPrimaryTerm(); } - public CollapseContext collapse() { - return searchContext.collapse(); - } - public FetchDocValuesContext docValuesContext() { - return searchContext.docValuesContext(); + FetchDocValuesContext dvContext = searchContext.docValuesContext(); + if (searchContext.collapse() != null) { + // retrieve the `doc_value` associated with the collapse field + String name = searchContext.collapse().getFieldName(); + if (dvContext == null) { + return new FetchDocValuesContext(Collections.singletonList(new FieldAndFormat(name, null))); + } else if (searchContext.docValuesContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) { + dvContext.fields().add(new FieldAndFormat(name, null)); + } + } + return dvContext; } public SearchHighlightContext highlight() { @@ -120,7 +120,7 @@ public SearchHighlightContext highlight() { } public boolean fetchScores() { - return getFetchSize() > 0 && searchContext.sort() != null && searchContext.trackScores(); + return searchContext.sort() != null && searchContext.trackScores(); } public InnerHitsContext innerHits() { @@ -128,11 +128,10 @@ public InnerHitsContext innerHits() { } public boolean version() { - return searchContext.version(); - } - - public StoredFieldsContext storedFieldsContext() { - return searchContext.storedFieldsContext(); + // TODO version is loaded from docvalues, not stored fields, so why are we checking + // stored fields here? + return searchContext.version() && + (searchContext.storedFieldsContext() == null || searchContext.storedFieldsContext().fetchFields()); } public FetchFieldsContext fetchFieldsContext() { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 90a2d5709743b..5654a6618c236 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -88,10 +88,12 @@ public void execute(SearchContext context) { LOGGER.trace("{}", new SearchContextSourcePrinter(context)); } - Map> storedToRequestedFields = new HashMap<>(); - FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); - - FetchContext fetchContext = FetchContext.fromSearchContext(context); + if (context.docIdsToLoadSize() == 0) { + // no individual hits to process, so we shortcut + context.fetchResult().hits(new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), + context.queryResult().getMaxScore())); + return; + } DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()]; for (int index = 0; index < context.docIdsToLoadSize(); index++) { @@ -99,6 +101,11 @@ public void execute(SearchContext context) { } Arrays.sort(docs); + Map> storedToRequestedFields = new HashMap<>(); + FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); + + FetchContext fetchContext = FetchContext.fromSearchContext(context); + SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; Map sharedCache = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java index 6310288dbf781..760a32d7e02c6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java @@ -34,7 +34,7 @@ public final class ExplainPhase implements FetchSubPhase { @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { - if (context.explain() == false || context.hasOnlySuggest()) { + if (context.explain() == false) { return null; } return new FetchSubPhaseProcessor() { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java index 3ab3003a27f6b..e207976240d92 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesContext.java @@ -52,7 +52,7 @@ public static FetchDocValuesContext create(MapperService mapperService, return new FetchDocValuesContext(fields); } - FetchDocValuesContext(List fields) { + public FetchDocValuesContext(List fields) { this.fields = fields; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index cb33737651580..404a54f1c9010 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -51,18 +51,6 @@ public final class FetchDocValuesPhase implements FetchSubPhase { public FetchSubPhaseProcessor getProcessor(FetchContext context) { FetchDocValuesContext dvContext = context.docValuesContext(); - - if (context.collapse() != null) { - // retrieve the `doc_value` associated with the collapse field - String name = context.collapse().getFieldName(); - if (dvContext == null) { - dvContext = new FetchDocValuesContext( - Collections.singletonList(new FieldAndFormat(name, null))); - } else if (context.docValuesContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) { - dvContext.fields().add(new FieldAndFormat(name, null)); - } - } - if (dvContext == null) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java index ba02491ee7c8f..76b887cf03384 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java @@ -32,8 +32,7 @@ public final class FetchVersionPhase implements FetchSubPhase { @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { - if (context.version() == false || - (context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false)) { + if (context.version() == false) { return null; } return new FetchSubPhaseProcessor() { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java index 381dbf202296a..81a6c6ebfbce7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -39,9 +39,6 @@ public final class MatchedQueriesPhase implements FetchSubPhase { @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { - if (context.getFetchSize() == 0 || context.hasOnlySuggest()) { - return null; - } Map namedQueries = new HashMap<>(context.parsedQuery().namedFilters()); if (context.parsedPostFilter() != null) { namedQueries.putAll(context.parsedPostFilter().namedFilters()); From e66798aee28445ef9039c4a238c94b88e999c8c5 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 14 Sep 2020 16:15:55 +0100 Subject: [PATCH 05/10] null check on matchedqueries --- .../search/fetch/subphase/MatchedQueriesPhase.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java index 81a6c6ebfbce7..7ac796c30be23 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -39,7 +39,10 @@ public final class MatchedQueriesPhase implements FetchSubPhase { @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { - Map namedQueries = new HashMap<>(context.parsedQuery().namedFilters()); + Map namedQueries = new HashMap<>(); + if (context.parsedQuery() != null) { + namedQueries.putAll(context.parsedQuery().namedFilters()); + } if (context.parsedPostFilter() != null) { namedQueries.putAll(context.parsedPostFilter().namedFilters()); } From d292e4ff61b6d68e20c9cb2ed954f418d27351a6 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 15 Sep 2020 09:48:01 +0100 Subject: [PATCH 06/10] javadocs --- .../search/fetch/FetchContext.java | 69 ++++++++++++++++++- 1 file changed, 67 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java index 1b33f2c0c5edd..2ebb9629e9914 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java @@ -26,7 +26,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.SearchExtBuilder; -import org.elasticsearch.search.collapse.CollapseContext; import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext; import org.elasticsearch.search.fetch.subphase.FetchFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; @@ -41,66 +40,111 @@ import java.util.Collections; import java.util.List; +/** + * Encapsulates state required to execute fetch phases + */ public class FetchContext { - public FetchContext(SearchContext searchContext) { + private FetchContext(SearchContext searchContext) { this.searchContext = searchContext; } + /** + * Create a FetchContext based on a SearchContext + */ public static FetchContext fromSearchContext(SearchContext context) { return new FetchContext(context); } private final SearchContext searchContext; + /** + * The name of the index that documents are being fetched from + */ public String getIndexName() { return searchContext.indexShard().shardId().getIndexName(); } + /** + * The point-in-time searcher the original query was executed against + */ public ContextIndexSearcher searcher() { return searchContext.searcher(); } + /** + * The mapper service for the index we are fetching documents from + */ public MapperService mapperService() { return searchContext.mapperService(); } + /** + * The index settings for the index we are fetching documents from + */ public IndexSettings getIndexSettings() { return mapperService().getIndexSettings(); } + /** + * Gets index field data for a specific fieldtype + */ public IndexFieldData getForField(MappedFieldType fieldType) { return searchContext.getForField(fieldType); } + /** + * The original query + */ public Query query() { return searchContext.query(); } + /** + * The original query with additional filters and named queries + */ public ParsedQuery parsedQuery() { return searchContext.parsedQuery(); } + /** + * Any post-filters run as part of the search + */ public ParsedQuery parsedPostFilter() { return searchContext.parsedPostFilter(); } + /** + * Configuration for fetching _source + */ public FetchSourceContext fetchSourceContext() { return searchContext.fetchSourceContext(); } + /** + * Should the response include `explain` output + */ public boolean explain() { return searchContext.explain(); } + /** + * The rescorers included in the original search, used for explain output + */ public List rescore() { return searchContext.rescore(); } + /** + * Should the response include sequence number and primary term metadata + */ public boolean seqNoAndPrimaryTerm() { return searchContext.seqNoAndPrimaryTerm(); } + /** + * Configuration for fetching docValues fields + */ public FetchDocValuesContext docValuesContext() { FetchDocValuesContext dvContext = searchContext.docValuesContext(); if (searchContext.collapse() != null) { @@ -115,18 +159,30 @@ public FetchDocValuesContext docValuesContext() { return dvContext; } + /** + * Configuration for highlighting + */ public SearchHighlightContext highlight() { return searchContext.highlight(); } + /** + * Should the response include scores, even if scores were not calculated in the original query + */ public boolean fetchScores() { return searchContext.sort() != null && searchContext.trackScores(); } + /** + * Configuration for returning inner hits + */ public InnerHitsContext innerHits() { return searchContext.innerHits(); } + /** + * Should the response include version metadata + */ public boolean version() { // TODO version is loaded from docvalues, not stored fields, so why are we checking // stored fields here? @@ -134,14 +190,23 @@ public boolean version() { (searchContext.storedFieldsContext() == null || searchContext.storedFieldsContext().fetchFields()); } + /** + * Configuration for the 'fields' response + */ public FetchFieldsContext fetchFieldsContext() { return searchContext.fetchFieldsContext(); } + /** + * Configuration for script fields + */ public ScriptFieldsContext scriptFields() { return searchContext.scriptFields(); } + /** + * Configuration for external fetch phase plugins + */ public SearchExtBuilder getSearchExt(String name) { return searchContext.getSearchExt(name); } From 762f3a89146a93ba149efacb3878f4d47a097c10 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 15 Sep 2020 10:06:04 +0100 Subject: [PATCH 07/10] precommit --- .../main/java/org/elasticsearch/search/fetch/FetchPhase.java | 2 +- .../search/fetch/subphase/FetchDocValuesPhase.java | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 6c97674a788ea..34260a1ed4816 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -157,7 +157,7 @@ List getProcessors(SearchShardTarget target, FetchContex } } return processors; - } catch (IOException e) { + } catch (Exception e) { throw new FetchPhaseExecutionException(target, "Error building fetch sub-phases", e); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index 404a54f1c9010..ccf4ce8de04d0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -34,7 +34,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import static org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; From fa61e5b51a564b40f801f5b6a78528a31b963d40 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 15 Sep 2020 11:11:11 +0100 Subject: [PATCH 08/10] check for null query in explain --- .../main/java/org/elasticsearch/search/fetch/FetchContext.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java index 2ebb9629e9914..f5a083eb3c10b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java @@ -125,7 +125,7 @@ public FetchSourceContext fetchSourceContext() { * Should the response include `explain` output */ public boolean explain() { - return searchContext.explain(); + return searchContext.explain() && searchContext.query() != null; } /** From 9f76bd7502b7ec4c078246d420919d638678b0a1 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 15 Sep 2020 12:02:54 +0100 Subject: [PATCH 09/10] test plugin failure --- .../elasticsearch/search/fetch/FetchSubPhasePluginIT.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 1bc5d03631fad..74014516262bd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -140,8 +140,11 @@ private void hitExecute(FetchContext context, HitContext hitContext) throws IOEx TermsEnum te = terms.iterator(); Map tv = new HashMap<>(); BytesRef term; + PostingsEnum pe = null; while ((term = te.next()) != null) { - tv.put(term.utf8ToString(), te.postings(null, PostingsEnum.ALL).freq()); + pe = te.postings(pe, PostingsEnum.FREQS); + pe.nextDoc(); + tv.put(term.utf8ToString(), pe.freq()); } hitField.getValues().add(tv); } From 5b429a82dea54cd2c2fb6103475df0dec626d8d5 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 16 Sep 2020 15:25:11 +0100 Subject: [PATCH 10/10] feedback --- .../rest-api-spec/test/search/330_fetch_fields.yml | 4 ++++ .../org/elasticsearch/search/fetch/FetchContext.java | 10 +++------- .../org/elasticsearch/search/fetch/FetchPhase.java | 2 +- .../search/fetch/subphase/FetchFieldsPhase.java | 8 ++++---- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml index 377c5e0e08ba4..6233fce7040b3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -115,6 +115,10 @@ setup: body: keyword: [ "a" ] + - do: + indices.refresh: + index: [ test ] + - do: catch: bad_request search: diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java index ecb4482df2820..90831f15229a6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java @@ -43,19 +43,15 @@ */ public class FetchContext { - private FetchContext(SearchContext searchContext) { - this.searchContext = searchContext; - } + private final SearchContext searchContext; /** * Create a FetchContext based on a SearchContext */ - public static FetchContext fromSearchContext(SearchContext context) { - return new FetchContext(context); + public FetchContext(SearchContext searchContext) { + this.searchContext = searchContext; } - private final SearchContext searchContext; - /** * The name of the index that documents are being fetched from */ diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index a5c2426734af1..670a784852a31 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -105,7 +105,7 @@ public void execute(SearchContext context) { Map> storedToRequestedFields = new HashMap<>(); FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); - FetchContext fetchContext = FetchContext.fromSearchContext(context); + FetchContext fetchContext = new FetchContext(context); SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; Map sharedCache = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java index 659594eb3f3bd..2ea7ec045afb3 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java @@ -41,14 +41,14 @@ public final class FetchFieldsPhase implements FetchSubPhase { @Override - public FetchSubPhaseProcessor getProcessor(FetchContext searchContext, SearchLookup lookup) { - FetchFieldsContext fetchFieldsContext = searchContext.fetchFieldsContext(); + public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext, SearchLookup lookup) { + FetchFieldsContext fetchFieldsContext = fetchContext.fetchFieldsContext(); if (fetchFieldsContext == null) { return null; } FieldValueRetriever retriever = fetchFieldsContext.fieldValueRetriever( - searchContext.getIndexName(), - searchContext.mapperService(), + fetchContext.getIndexName(), + fetchContext.mapperService(), lookup ); return new FetchSubPhaseProcessor() {