diff --git a/docs/reference/indices/apis/reload-analyzers.asciidoc b/docs/reference/indices/apis/reload-analyzers.asciidoc index eba55ff615bdc..63af37ae32a9e 100644 --- a/docs/reference/indices/apis/reload-analyzers.asciidoc +++ b/docs/reference/indices/apis/reload-analyzers.asciidoc @@ -13,9 +13,17 @@ stream's backing indices. [source,console] -------------------------------------------------- POST /my-index-000001/_reload_search_analyzers +POST /my-index-000001/_cache/clear?request=true -------------------------------------------------- // TEST[setup:my_index] +IMPORTANT: After reloading the search analyzers you should clear the request + cache to make sure it doesn't contain responses derived from the + previous versions of the analyzer. +// the need for this is tracked in https://github.com/elastic/elasticsearch/issues/66722 + + + [discrete] [[indices-reload-analyzers-api-request]] === {api-request-title} diff --git a/docs/reference/modules/indices/request_cache.asciidoc b/docs/reference/modules/indices/request_cache.asciidoc index 6208f09bf0832..4504f1f4f368b 100644 --- a/docs/reference/modules/indices/request_cache.asciidoc +++ b/docs/reference/modules/indices/request_cache.asciidoc @@ -32,14 +32,14 @@ Scripted queries that use the API calls which are non-deterministic, such as The cache is smart -- it keeps the same _near real-time_ promise as uncached search. -Cached results are invalidated automatically whenever the shard refreshes, but -only if the data in the shard has actually changed. In other words, you will -always get the same results from the cache as you would for an uncached search -request. +Cached results are invalidated automatically whenever the shard refreshes to +pick up changes to the documents or when you update the mapping. In other +words you will always get the same results from the cache as you would for an +uncached search request. The longer the refresh interval, the longer that cached entries will remain -valid. If the cache is full, the least recently used cache keys will be -evicted. +valid even if there are changes to the documents. If the cache is full, the +least recently used cache keys will be evicted. The cache can be expired manually with the <>: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml index e0183f0c54f66..72b9eba61f76e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml @@ -272,3 +272,72 @@ setup: the_filter: filters: filters: [] + + +--- +"cache": + - skip: + version: " - 7.99.99" + reason: cache fixed in 8.0.0 to be backported to 7.11.0 + + - do: + bulk: + refresh: true + body: + - index: + _index: test_1 + _id: 100 + - int_field: 1 + double_field: 1.0 + string_field: foo bar + + - do: + search: + index: test_1 + body: + size: 0 + aggs: + f: + filters: + filters: + foo: + match: + string_field: foo + foo_bar: + match: + string_field: foo bar + - match: { hits.total.value: 5 } + - length: { aggregations.f.buckets: 2 } + - match: { aggregations.f.buckets.foo.doc_count: 4 } + - match: { aggregations.f.buckets.foo_bar.doc_count: 1 } + + # Modify the mapping configuration that generates queries. This should bust the cache. + - do: + indices.put_mapping: + index: test_1 + body: + properties: + string_field: + type: keyword + split_queries_on_whitespace: true + + # This should be entirely fresh because updating the mapping busted the cache. + - do: + search: + index: test_1 + body: + size: 0 + aggs: + f: + filters: + filters: + foo: + match: + string_field: foo + foo_bar: + match: + string_field: foo bar + - match: { hits.total.value: 5 } + - length: { aggregations.f.buckets: 2 } + - match: { aggregations.f.buckets.foo.doc_count: 4 } + - match: { aggregations.f.buckets.foo_bar.doc_count: 4 } diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 09ae7e2c64470..23c605f7c4409 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -612,6 +612,7 @@ public QueryShardContext newQueryShardContext( indexCache.bitsetFilterCache(), indexFieldData::getForField, mapperService(), + mapperService().mappingLookup(), similarityService(), scriptService, xContentRegistry, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 9e31860a70f28..4d6c252b6044f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -115,7 +115,7 @@ private DocumentMapper(IndexSettings indexSettings, this.documentParser = documentParser; this.indexSettings = indexSettings; this.indexAnalyzers = indexAnalyzers; - this.fieldMappers = MappingLookup.fromMapping(this.mapping); + this.fieldMappers = MappingLookup.fromMapping(mapping, this::parse); try { mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index aeb7f75001954..22a904daf12b5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.regex.Regex; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -43,11 +44,16 @@ final class FieldTypeLookup { * For convenience, the set of copied fields includes the field itself. */ private final Map> fieldToCopiedFields = new HashMap<>(); + private final String type; private final DynamicKeyFieldTypeLookup dynamicKeyLookup; - FieldTypeLookup(Collection fieldMappers, - Collection fieldAliasMappers, - Collection runtimeFieldTypes) { + FieldTypeLookup( + String type, + Collection fieldMappers, + Collection fieldAliasMappers, + Collection runtimeFieldTypes + ) { + this.type = type; Map dynamicKeyMappers = new HashMap<>(); for (FieldMapper fieldMapper : fieldMappers) { @@ -89,6 +95,10 @@ final class FieldTypeLookup { * Returns the mapped field type for the given field name. */ MappedFieldType get(String field) { + if (field.equals(TypeFieldType.NAME)) { + return new TypeFieldType(type); + } + MappedFieldType fieldType = fullNameToFieldType.get(field); if (fieldType != null) { return fieldType; @@ -103,6 +113,10 @@ MappedFieldType get(String field) { * Returns a list of the full names of a simple match regex like pattern against full name and index name. */ Set simpleMatchToFullName(String pattern) { + if (Regex.isSimpleMatchPattern(pattern) == false) { + // no wildcards + return Collections.singleton(pattern); + } Set fields = new HashSet<>(); for (String field : fullNameToFieldType.keySet()) { if (Regex.simpleMatch(pattern, field)) { @@ -125,6 +139,9 @@ Set simpleMatchToFullName(String pattern) { * @return A set of paths in the _source that contain the field's values. */ Set sourcePaths(String field) { + if (fullNameToFieldType.isEmpty()) { + return Set.of(); + } String resolvedField = field; int lastDotIndex = field.lastIndexOf('.'); if (lastDotIndex > 0) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index f4fec07069845..96282c9ffad28 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -132,7 +131,7 @@ public MapperService(IndexSettings indexSettings, IndexAnalyzers indexAnalyzers, } public boolean hasNested() { - return this.mapper != null && this.mapper.hasNestedObjects(); + return mappingLookup().hasNested(); } public IndexAnalyzers getIndexAnalyzers() { @@ -399,10 +398,7 @@ public DocumentMapperForType documentMapperWithAutoCreate() { * Given the full name of a field, returns its {@link MappedFieldType}. */ public MappedFieldType fieldType(String fullName) { - if (fullName.equals(TypeFieldType.NAME)) { - return new TypeFieldType(this.mapper == null ? "_doc" : this.mapper.type()); - } - return this.mapper == null ? null : this.mapper.mappers().fieldTypes().get(fullName); + return mappingLookup().fieldTypes().get(fullName); } /** @@ -410,19 +406,15 @@ public MappedFieldType fieldType(String fullName) { * then the fields will be returned with a type prefix. */ public Set simpleMatchToFullName(String pattern) { - if (Regex.isSimpleMatchPattern(pattern) == false) { - // no wildcards - return Collections.singleton(pattern); - } - return this.mapper == null ? Collections.emptySet() : this.mapper.mappers().fieldTypes().simpleMatchToFullName(pattern); + return mappingLookup().simpleMatchToFullName(pattern); } /** - * Given a field name, returns its possible paths in the _source. For example, - * the 'source path' for a multi-field is the path to its parent field. + * {@code volatile} read a (mostly) immutable snapshot current mapping. */ - public Set sourcePath(String fullName) { - return this.mapper == null ? Collections.emptySet() : this.mapper.mappers().fieldTypes().sourcePaths(fullName); + public MappingLookup mappingLookup() { + DocumentMapper mapper = this.mapper; + return mapper == null ? MappingLookup.EMPTY : mapper.mappers(); } /** @@ -444,18 +436,7 @@ public ObjectMapper getObjectMapper(String name) { * directly associated index-time analyzer */ public NamedAnalyzer indexAnalyzer(String field, Function unindexedFieldAnalyzer) { - if (this.mapper == null) { - return unindexedFieldAnalyzer.apply(field); - } - return this.mapper.mappers().indexAnalyzer(field, unindexedFieldAnalyzer); - } - - public boolean containsBrokenAnalysis(String field) { - NamedAnalyzer a = indexAnalyzer(field, f -> null); - if (a == null) { - return false; - } - return a.containsBrokenAnalysis(); + return mappingLookup().indexAnalyzer(field, unindexedFieldAnalyzer); } @Override @@ -504,6 +485,7 @@ public synchronized List reloadSearchAnalyzers(AnalysisRegistry registry reloadedAnalyzers.add(analyzerName); } } + // TODO this should bust the cache somehow. Tracked in https://github.com/elastic/elasticsearch/issues/66722 return reloadedAnalyzers; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index ad68865ba101d..83b44963a7516 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -28,10 +28,38 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Stream; -public final class MappingLookup { +/** + * A (mostly) immutable snapshot of the current mapping of an index with + * access to everything we need for the search phase. + */ +public class MappingLookup { + /** + * Key for the lookup to be used in caches. + */ + public static class CacheKey { + private CacheKey() {} + } + + /** + * A lookup representing an empty mapping. + */ + public static final MappingLookup EMPTY = new MappingLookup( + "_doc", + List.of(), + List.of(), + List.of(), + List.of(), + 0, + soucreToParse -> null, + false + ); + + private final CacheKey cacheKey = new CacheKey(); + /** Full field name to mapper */ private final Map fieldMappers; private final Map objectMappers; @@ -39,8 +67,10 @@ public final class MappingLookup { private final FieldTypeLookup fieldTypeLookup; private final int metadataFieldCount; private final Map indexAnalyzers = new HashMap<>(); + private final Function documentParser; + private final boolean sourceEnabled; - public static MappingLookup fromMapping(Mapping mapping) { + public static MappingLookup fromMapping(Mapping mapping, Function documentParser) { List newObjectMappers = new ArrayList<>(); List newFieldMappers = new ArrayList<>(); List newFieldAliasMappers = new ArrayList<>(); @@ -52,8 +82,16 @@ public static MappingLookup fromMapping(Mapping mapping) { for (Mapper child : mapping.root) { collect(child, newObjectMappers, newFieldMappers, newFieldAliasMappers); } - return new MappingLookup(newFieldMappers, newObjectMappers, newFieldAliasMappers, - mapping.root.runtimeFieldTypes(), mapping.metadataMappers.length); + return new MappingLookup( + mapping.root().name(), + newFieldMappers, + newObjectMappers, + newFieldAliasMappers, + mapping.root.runtimeFieldTypes(), + mapping.metadataMappers.length, + documentParser, + mapping.metadataMapper(SourceFieldMapper.class).enabled() + ); } private static void collect(Mapper mapper, Collection objectMappers, @@ -74,11 +112,16 @@ private static void collect(Mapper mapper, Collection objectMapper } } - public MappingLookup(Collection mappers, + public MappingLookup(String type, + Collection mappers, Collection objectMappers, Collection aliasMappers, Collection runtimeFieldTypes, - int metadataFieldCount) { + int metadataFieldCount, + Function documentParser, + boolean sourceEnabled) { + this.documentParser = documentParser; + this.sourceEnabled = sourceEnabled; Map fieldMappers = new HashMap<>(); Map objects = new HashMap<>(); @@ -113,7 +156,7 @@ public MappingLookup(Collection mappers, } } - this.fieldTypeLookup = new FieldTypeLookup(mappers, aliasMappers, runtimeFieldTypes); + this.fieldTypeLookup = new FieldTypeLookup(type, mappers, aliasMappers, runtimeFieldTypes); this.fieldMappers = Collections.unmodifiableMap(fieldMappers); this.objectMappers = Collections.unmodifiableMap(objects); @@ -147,7 +190,7 @@ public Iterable fieldMappers() { return fieldMappers.values(); } - public void checkLimits(IndexSettings settings) { + void checkLimits(IndexSettings settings) { checkFieldLimit(settings.getMappingTotalFieldsLimit()); checkObjectDepthLimit(settings.getMappingDepthLimit()); checkFieldNameLengthLimit(settings.getMappingFieldNameLengthLimit()); @@ -234,4 +277,50 @@ private static String parentObject(String field) { } return field.substring(0, lastDot); } + + public Set simpleMatchToFullName(String pattern) { + return fieldTypes().simpleMatchToFullName(pattern); + } + + /** + * Returns the mapped field type for the given field name. + */ + public MappedFieldType getFieldType(String field) { + return fieldTypes().get(field); + } + + /** + * Given a concrete field name, return its paths in the _source. + * + * For most fields, the source path is the same as the field itself. However + * there are cases where a field's values are found elsewhere in the _source: + * - For a multi-field, the source path is the parent field. + * - One field's content could have been copied to another through copy_to. + * + * @param field The field for which to look up the _source path. Note that the field + * should be a concrete field and *not* an alias. + * @return A set of paths in the _source that contain the field's values. + */ + public Set sourcePaths(String field) { + return fieldTypes().sourcePaths(field); + } + + public ParsedDocument parseDocument(SourceToParse source) { + return documentParser.apply(source); + } + + public boolean hasMappings() { + return this != EMPTY; + } + + public boolean isSourceEnabled() { + return sourceEnabled; + } + + /** + * Key for the lookup to be used in caches. + */ + public CacheKey cacheKey() { + return cacheKey; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 2df7b86f36dce..5932baf574177 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.RuntimeFieldType; @@ -90,6 +91,7 @@ public class QueryShardContext extends QueryRewriteContext { private final IndexSettings indexSettings; private final BigArrays bigArrays; private final MapperService mapperService; + private final MappingLookup mappingLookup; private final SimilarityService similarityService; private final BitsetFilterCache bitsetFilterCache; private final TriFunction, IndexFieldData> indexFieldDataService; @@ -121,6 +123,7 @@ public QueryShardContext( BitsetFilterCache bitsetFilterCache, TriFunction, IndexFieldData> indexFieldDataLookup, MapperService mapperService, + MappingLookup mappingLookup, SimilarityService similarityService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, @@ -142,6 +145,7 @@ public QueryShardContext( bitsetFilterCache, indexFieldDataLookup, mapperService, + mappingLookup, similarityService, scriptService, xContentRegistry, @@ -169,6 +173,7 @@ public QueryShardContext(QueryShardContext source) { source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, + source.mappingLookup, source.similarityService, source.scriptService, source.getXContentRegistry(), @@ -190,6 +195,7 @@ private QueryShardContext(int shardId, BitsetFilterCache bitsetFilterCache, TriFunction, IndexFieldData> indexFieldDataLookup, MapperService mapperService, + MappingLookup mappingLookup, SimilarityService similarityService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, @@ -207,6 +213,7 @@ private QueryShardContext(int shardId, this.shardRequestIndex = shardRequestIndex; this.similarityService = similarityService; this.mapperService = mapperService; + this.mappingLookup = mappingLookup; this.bigArrays = bigArrays; this.bitsetFilterCache = bitsetFilterCache; this.indexFieldDataService = indexFieldDataLookup; @@ -230,7 +237,7 @@ private void reset() { } public Similarity getSearchSimilarity() { - return similarityService != null ? similarityService.similarity(mapperService::fieldType) : null; + return similarityService != null ? similarityService.similarity(this::fieldType) : null; } public List defaultFields() { @@ -274,16 +281,19 @@ public Map copyNamedQueries() { return Map.copyOf(namedQueries); } + /** + * Parse a document with current mapping. + */ public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException { - return mapperService.documentMapper() == null ? null : mapperService.documentMapper().parse(source); + return mappingLookup.parseDocument(source); } public boolean hasNested() { - return mapperService.hasNested(); + return mappingLookup.hasNested(); } public boolean hasMappings() { - return mapperService.documentMapper() != null; + return mappingLookup.hasMappings(); } /** @@ -292,13 +302,13 @@ public boolean hasMappings() { */ public Set simpleMatchToIndexNames(String pattern) { if (runtimeMappings.isEmpty()) { - return mapperService.simpleMatchToFullName(pattern); + return mappingLookup.simpleMatchToFullName(pattern); } if (Regex.isSimpleMatchPattern(pattern) == false) { // no wildcards return Collections.singleton(pattern); } - Set matches = new HashSet<>(mapperService.simpleMatchToFullName(pattern)); + Set matches = new HashSet<>(mappingLookup.simpleMatchToFullName(pattern)); for (String name : runtimeMappings.keySet()) { if (Regex.simpleMatch(pattern, name)) { matches.add(name); @@ -329,11 +339,11 @@ public boolean isFieldMapped(String name) { private MappedFieldType fieldType(String name) { MappedFieldType fieldType = runtimeMappings.get(name); - return fieldType == null ? mapperService.fieldType(name) : fieldType; + return fieldType == null ? mappingLookup.getFieldType(name) : fieldType; } public ObjectMapper getObjectMapper(String name) { - return mapperService.getObjectMapper(name); + return mappingLookup.objectMappers().get(name); } public boolean isMetadataField(String field) { @@ -341,11 +351,11 @@ public boolean isMetadataField(String field) { } public Set sourcePath(String fullName) { - return mapperService.sourcePath(fullName); + return mappingLookup.sourcePaths(fullName); } public boolean isSourceEnabled() { - return mapperService.documentMapper().sourceMapper().enabled(); + return mappingLookup.isSourceEnabled(); } /** @@ -378,7 +388,7 @@ public Analyzer getIndexAnalyzer(Function unindexedFieldA return new DelegatingAnalyzerWrapper(Analyzer.PER_FIELD_REUSE_STRATEGY) { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { - return mapperService.indexAnalyzer(fieldName, unindexedFieldAnalyzer); + return mappingLookup.indexAnalyzer(fieldName, unindexedFieldAnalyzer); } }; } @@ -399,8 +409,7 @@ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMap if (fieldMapping != null || allowUnmappedFields) { return fieldMapping; } else if (mapUnmappedFieldAsString) { - TextFieldMapper.Builder builder - = new TextFieldMapper.Builder(name, mapperService.getIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, getIndexAnalyzers()); return builder.build(new ContentPath(1)).fieldType(); } else { throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name); @@ -412,7 +421,8 @@ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMap * backwards offsets in term vectors */ public boolean containsBrokenAnalysis(String field) { - return mapperService.containsBrokenAnalysis(field); + NamedAnalyzer a = mappingLookup.indexAnalyzer(field, f -> null); + return a == null ? false : a.containsBrokenAnalysis(); } private SearchLookup lookup = null; @@ -603,10 +613,7 @@ public BigArrays bigArrays() { // TODO this is only used in agg land, maybe rem return bigArrays; } - private static Map parseRuntimeMappings( - Map runtimeMappings, - MapperService mapperService - ) { + private static Map parseRuntimeMappings(Map runtimeMappings, MapperService mapperService) { Map runtimeFieldTypes = new HashMap<>(); if (runtimeMappings.isEmpty() == false) { RuntimeFieldType.parseRuntimeFields(new HashMap<>(runtimeMappings), mapperService.parserContext(), @@ -614,4 +621,11 @@ private static Map parseRuntimeMappings( } return Collections.unmodifiableMap(runtimeFieldTypes); } + + /** + * Cache key for current mapping. + */ + public MappingLookup.CacheKey mappingCacheKey() { + return mappingLookup.cacheKey(); + } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 973595b084b7e..0e0a2fcd0e6dd 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.index.mapper.MappingLookup; import java.io.Closeable; import java.io.IOException; @@ -113,9 +114,9 @@ public void onRemoval(RemovalNotification notification) { } BytesReference getOrCompute(CacheEntity cacheEntity, CheckedSupplier loader, - DirectoryReader reader, BytesReference cacheKey) throws Exception { + MappingLookup.CacheKey mappingCacheKey, DirectoryReader reader, BytesReference cacheKey) throws Exception { assert reader.getReaderCacheHelper() != null; - final Key key = new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey); + final Key key = new Key(cacheEntity, mappingCacheKey, reader.getReaderCacheHelper().getKey(), cacheKey); Loader cacheLoader = new Loader(cacheEntity, loader); BytesReference value = cache.computeIfAbsent(key, cacheLoader); if (cacheLoader.isLoaded()) { @@ -128,6 +129,15 @@ BytesReference getOrCompute(CacheEntity cacheEntity, CheckedSupplier { @@ -211,11 +221,13 @@ static class Key implements Accountable { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class); public final CacheEntity entity; // use as identity equality + public final MappingLookup.CacheKey mappingCacheKey; public final IndexReader.CacheKey readerCacheKey; public final BytesReference value; - Key(CacheEntity entity, IndexReader.CacheKey readerCacheKey, BytesReference value) { + Key(CacheEntity entity, MappingLookup.CacheKey mappingCacheKey, IndexReader.CacheKey readerCacheKey, BytesReference value) { this.entity = entity; + this.mappingCacheKey = Objects.requireNonNull(mappingCacheKey); this.readerCacheKey = Objects.requireNonNull(readerCacheKey); this.value = value; } @@ -236,7 +248,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Key key = (Key) o; - if (Objects.equals(readerCacheKey, key.readerCacheKey) == false) return false; + if (mappingCacheKey.equals(key.mappingCacheKey) == false) return false; + if (readerCacheKey.equals(key.readerCacheKey) == false) return false; if (!entity.getCacheIdentity().equals(key.entity.getCacheIdentity())) return false; if (!value.equals(key.value)) return false; return true; @@ -245,6 +258,7 @@ public boolean equals(Object o) { @Override public int hashCode() { int result = entity.getCacheIdentity().hashCode(); + result = 31 * result + mappingCacheKey.hashCode(); result = 31 * result + readerCacheKey.hashCode(); result = 31 * result + value.hashCode(); return result; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index d9a19a37ff0d2..dbc1f0eb1edad 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -101,6 +101,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.query.CoordinatorRewriteContextProvider; import org.elasticsearch.index.query.QueryBuilder; @@ -1408,12 +1409,18 @@ public void loadIntoContext(ShardSearchRequest request, SearchContext context, Q final DirectoryReader directoryReader = context.searcher().getDirectoryReader(); boolean[] loadedFromCache = new boolean[] { true }; - BytesReference bytesReference = cacheShardLevelResult(context.indexShard(), directoryReader, request.cacheKey(), + BytesReference cacheKey = request.cacheKey(); + BytesReference bytesReference = cacheShardLevelResult( + context.indexShard(), + context.getQueryShardContext().mappingCacheKey(), + directoryReader, + cacheKey, out -> { - queryPhase.execute(context); - context.queryResult().writeToNoId(out); - loadedFromCache[0] = false; - }); + queryPhase.execute(context); + context.queryResult().writeToNoId(out); + loadedFromCache[0] = false; + } + ); if (loadedFromCache[0]) { // restore the cached query result into the context @@ -1429,7 +1436,12 @@ public void loadIntoContext(ShardSearchRequest request, SearchContext context, Q // key invalidate the result in the thread that caused the timeout. This will end up to be simpler and eventually correct since // running a search that times out concurrently will likely timeout again if it's run while we have this `stale` result in the // cache. One other option is to not cache requests with a timeout at all... - indicesRequestCache.invalidate(new IndexShardCacheEntity(context.indexShard()), directoryReader, request.cacheKey()); + indicesRequestCache.invalidate( + new IndexShardCacheEntity(context.indexShard()), + context.getQueryShardContext().mappingCacheKey(), + directoryReader, + cacheKey + ); if (logger.isTraceEnabled()) { logger.trace("Query timed out, invalidating cache entry for request on shard [{}]:\n {}", request.shardId(), request.source()); @@ -1449,8 +1461,13 @@ public ByteSizeValue getTotalIndexingBufferBytes() { * @param loader loads the data into the cache if needed * @return the contents of the cache or the result of calling the loader */ - private BytesReference cacheShardLevelResult(IndexShard shard, DirectoryReader reader, BytesReference cacheKey, - CheckedConsumer loader) throws Exception { + private BytesReference cacheShardLevelResult( + IndexShard shard, + MappingLookup.CacheKey mappingCacheKey, + DirectoryReader reader, + BytesReference cacheKey, + CheckedConsumer loader + ) throws Exception { IndexShardCacheEntity cacheEntity = new IndexShardCacheEntity(shard); CheckedSupplier supplier = () -> { /* BytesStreamOutput allows to pass the expected size but by default uses @@ -1468,7 +1485,7 @@ private BytesReference cacheShardLevelResult(IndexShard shard, DirectoryReader r return out.bytes(); } }; - return indicesRequestCache.getOrCompute(cacheEntity, supplier, reader, cacheKey); + return indicesRequestCache.getOrCompute(cacheEntity, supplier, mappingCacheKey, reader, cacheKey); } static final class IndexShardCacheEntity extends AbstractIndexShardCacheEntity { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index 131e00076b76b..82aa2ea4449e8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -562,8 +562,16 @@ protected String contentType() { return null; } }; - MappingLookup mappingLookup = - new MappingLookup(List.of(mockedTimestampField, dateFieldMapper), List.of(), List.of(), List.of(), 0); + MappingLookup mappingLookup = new MappingLookup( + "_doc", + List.of(mockedTimestampField, dateFieldMapper), + List.of(), + List.of(), + List.of(), + 0, + null, + false + ); ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool); Environment env = mock(Environment.class); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 376de20cabda2..06873cb179685 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -125,7 +125,7 @@ public void setupCreateIndexRequestAndAliasValidator() { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build(); queryShardContext = new QueryShardContext(0, 0, new IndexSettings(IndexMetadata.builder("test").settings(indexSettings).build(), indexSettings), - BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(), + BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, () -> randomNonNegativeLong(), null, null, () -> true, null, emptyMap()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 06d42b41af0ad..4cf2c7f65b7fa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -167,7 +167,7 @@ public void testTermQuery() { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build(); QueryShardContext context = new QueryShardContext(0, 0, new IndexSettings(IndexMetadata.builder("foo").settings(indexSettings).build(), indexSettings), - BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, + BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()); MappedFieldType ft = new DateFieldType("field"); String date = "2015-10-12T14:10:55"; @@ -189,7 +189,7 @@ public void testRangeQuery() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build(); QueryShardContext context = new QueryShardContext(0, 0, new IndexSettings(IndexMetadata.builder("foo").settings(indexSettings).build(), indexSettings), - BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(), + BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()); MappedFieldType ft = new DateFieldType("field"); String date1 = "2015-10-12T14:10:55"; @@ -233,7 +233,7 @@ public void testRangeQueryWithIndexSort() { IndexSettings indexSettings = new IndexSettings(indexMetadata, settings); QueryShardContext context = new QueryShardContext(0, 0, indexSettings, - BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(), + BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, () -> 0L, null, null, () -> true, null, emptyMap()); MappedFieldType ft = new DateFieldType("field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java deleted file mode 100644 index d20ffbd784590..0000000000000 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.index.analysis.AnalyzerScope; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.io.StringReader; -import java.util.Arrays; -import java.util.Collections; - -public class DocumentFieldMapperTests extends LuceneTestCase { - - private static class FakeAnalyzer extends Analyzer { - - private final String output; - - FakeAnalyzer(String output) { - this.output = output; - } - - @Override - protected TokenStreamComponents createComponents(String fieldName) { - Tokenizer tokenizer = new Tokenizer() { - boolean incremented = false; - final CharTermAttribute term = addAttribute(CharTermAttribute.class); - - @Override - public boolean incrementToken() { - if (incremented) { - return false; - } - term.setLength(0).append(output); - incremented = true; - return true; - } - }; - return new TokenStreamComponents(tokenizer); - } - - } - - static class FakeFieldType extends TermBasedFieldType { - - private FakeFieldType(String name) { - super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); - } - - @Override - public ValueFetcher valueFetcher(QueryShardContext context, String format) { - throw new UnsupportedOperationException(); - } - - @Override - public String typeName() { - return "fake"; - } - } - - static class FakeFieldMapper extends FieldMapper { - - final String indexedValue; - - FakeFieldMapper(FakeFieldType fieldType, String indexedValue) { - super(fieldType.name(), fieldType, - new NamedAnalyzer("fake", AnalyzerScope.INDEX, new FakeAnalyzer(indexedValue)), - MultiFields.empty(), CopyTo.empty()); - this.indexedValue = indexedValue; - } - - @Override - protected void parseCreateField(ParseContext context) { - } - - @Override - protected String contentType() { - return null; - } - - @Override - public Builder getMergeBuilder() { - return null; - } - } - - public void testAnalyzers() throws IOException { - FakeFieldType fieldType1 = new FakeFieldType("field1"); - FieldMapper fieldMapper1 = new FakeFieldMapper(fieldType1, "index1"); - - FakeFieldType fieldType2 = new FakeFieldType("field2"); - FieldMapper fieldMapper2 = new FakeFieldMapper(fieldType2, "index2"); - - MappingLookup mappingLookup = new MappingLookup( - Arrays.asList(fieldMapper1, fieldMapper2), - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - 0); - - assertAnalyzes(mappingLookup.indexAnalyzer("field1", f -> null), "field1", "index1"); - assertAnalyzes(mappingLookup.indexAnalyzer("field2", f -> null), "field2", "index2"); - expectThrows(IllegalArgumentException.class, - () -> mappingLookup.indexAnalyzer("field3", f -> { - throw new IllegalArgumentException(); - }).tokenStream("field3", "blah")); - } - - private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException { - try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) { - CharTermAttribute term = tok.addAttribute(CharTermAttribute.class); - assertTrue(tok.incrementToken()); - assertEquals(output, term.toString()); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index 475d9b252fbad..487860282749a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -36,13 +36,19 @@ public void testDuplicateFieldAliasAndObject() { ObjectMapper objectMapper = createObjectMapper("some.path"); FieldAliasMapper aliasMapper = new FieldAliasMapper("path", "some.path", "field"); - MapperParsingException e = expectThrows(MapperParsingException.class, () -> - new MappingLookup( + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> new MappingLookup( + "_doc", Collections.emptyList(), singletonList(objectMapper), singletonList(aliasMapper), emptyList(), - 0)); + 0, + null, + false + ) + ); assertEquals("Alias [some.path] is defined both as an object and an alias", e.getMessage()); } @@ -51,14 +57,19 @@ public void testDuplicateFieldAliasAndConcreteField() { FieldMapper invalidField = new MockFieldMapper("invalid"); FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid", "invalid", "field"); - MapperParsingException e = expectThrows(MapperParsingException.class, () -> - new MappingLookup( + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> new MappingLookup( + "_doc", Arrays.asList(field, invalidField), emptyList(), singletonList(invalidAlias), emptyList(), - 0)); - + 0, + null, + false + ) + ); assertEquals("Alias [invalid] is defined both as an alias and a concrete field", e.getMessage()); } @@ -68,11 +79,15 @@ public void testAliasThatRefersToAlias() { FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "alias"); MappingLookup mappers = new MappingLookup( + "_doc", singletonList(field), emptyList(), Arrays.asList(alias, invalidAlias), emptyList(), - 0); + 0, + null, + false + ); alias.validate(mappers); MapperParsingException e = expectThrows(MapperParsingException.class, () -> { @@ -88,11 +103,15 @@ public void testAliasThatRefersToItself() { MapperParsingException e = expectThrows(MapperParsingException.class, () -> { MappingLookup mappers = new MappingLookup( + "_doc", emptyList(), emptyList(), singletonList(invalidAlias), emptyList(), - 0); + 0, + null, + false + ); invalidAlias.validate(mappers); }); @@ -105,11 +124,15 @@ public void testAliasWithNonExistentPath() { MapperParsingException e = expectThrows(MapperParsingException.class, () -> { MappingLookup mappers = new MappingLookup( + "_doc", emptyList(), emptyList(), singletonList(invalidAlias), emptyList(), - 0); + 0, + null, + false + ); invalidAlias.validate(mappers); }); @@ -122,11 +145,15 @@ public void testFieldAliasWithNestedScope() { FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "nested.alias", "nested.field"); MappingLookup mappers = new MappingLookup( + "_doc", singletonList(createFieldMapper("nested", "field")), singletonList(objectMapper), singletonList(aliasMapper), emptyList(), - 0); + 0, + null, + false + ); aliasMapper.validate(mappers); } @@ -135,11 +162,15 @@ public void testFieldAliasWithDifferentObjectScopes() { FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "object2.alias", "object1.field"); MappingLookup mappers = new MappingLookup( + "_doc", List.of(createFieldMapper("object1", "field")), List.of(createObjectMapper("object1"), createObjectMapper("object2")), singletonList(aliasMapper), emptyList(), - 0); + 0, + null, + false + ); aliasMapper.validate(mappers); } @@ -149,11 +180,15 @@ public void testFieldAliasWithNestedTarget() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { MappingLookup mappers = new MappingLookup( + "_doc", singletonList(createFieldMapper("nested", "field")), Collections.singletonList(objectMapper), singletonList(aliasMapper), emptyList(), - 0); + 0, + null, + false + ); aliasMapper.validate(mappers); }); @@ -168,11 +203,15 @@ public void testFieldAliasWithDifferentNestedScopes() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { MappingLookup mappers = new MappingLookup( + "_doc", singletonList(createFieldMapper("nested1", "field")), List.of(createNestedObjectMapper("nested1"), createNestedObjectMapper("nested2")), singletonList(aliasMapper), emptyList(), - 0); + 0, + null, + false + ); aliasMapper.validate(mappers); }); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index c78346f469439..90f021f9b5869 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -29,11 +29,7 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.test.ESTestCase; -import java.util.Collections; - import static java.util.Collections.emptyMap; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class FieldNamesFieldTypeTests extends ESTestCase { @@ -45,13 +41,10 @@ public void testTermQuery() { Settings settings = settings(Version.CURRENT).build(); IndexSettings indexSettings = new IndexSettings( new IndexMetadata.Builder("foo").settings(settings).numberOfShards(1).numberOfReplicas(0).build(), settings); - MapperService mapperService = mock(MapperService.class); - when(mapperService.fieldType("_field_names")).thenReturn(fieldNamesFieldType); - when(mapperService.fieldType("field_name")).thenReturn(fieldType); - when(mapperService.simpleMatchToFullName("field_name")).thenReturn(Collections.singleton("field_name")); + MappingLookup mappingLookup = MappingLookupUtils.fromTypes(fieldNamesFieldType, fieldType); QueryShardContext queryShardContext = new QueryShardContext(0, 0, - indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, + indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, mappingLookup, null, null, null, null, null, null, () -> 0L, null, null, () -> true, null, emptyMap()); Query termQuery = fieldNamesFieldType.termQuery("field_name", queryShardContext); assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.CONTENT_TYPE, "field_name")), termQuery); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index f2b63c145c0f9..7f4e05f3a398b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -30,15 +30,16 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.equalTo; public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { - FieldTypeLookup lookup = new FieldTypeLookup(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertNull(lookup.get("foo")); Collection names = lookup.simpleMatchToFullName("foo"); assertNotNull(names); - assertTrue(names.isEmpty()); + assertThat(names, equalTo(Set.of("foo"))); assertEquals(0, size(lookup.filter(ft -> true))); } @@ -47,7 +48,7 @@ public void testFilter() { Collection fieldAliases = singletonList(new FieldAliasMapper("alias", "alias", "test")); Collection runtimeFields = List.of( new TestRuntimeField("runtime", "type"), new TestRuntimeField("field", "type")); - FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(fieldMappers, fieldAliases, runtimeFields); + FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", fieldMappers, fieldAliases, runtimeFields); assertEquals(3, size(fieldTypeLookup.filter(ft -> true))); for (MappedFieldType fieldType : fieldTypeLookup.filter(ft -> true)) { if (fieldType.name().equals("test")) { @@ -76,7 +77,7 @@ public void testFilter() { public void testAddNewField() { MockFieldMapper f = new MockFieldMapper("foo"); - FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(f), emptyList(), Collections.emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", Collections.singletonList(f), emptyList(), Collections.emptyList()); assertNull(lookup.get("bar")); assertEquals(f.fieldType(), lookup.get("foo")); assertEquals(1, size(lookup.filter(ft -> true))); @@ -86,7 +87,7 @@ public void testAddFieldAlias() { MockFieldMapper field = new MockFieldMapper("foo"); FieldAliasMapper alias = new FieldAliasMapper("alias", "alias", "foo"); - FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(field), Collections.singletonList(alias), + FieldTypeLookup lookup = new FieldTypeLookup("_doc", Collections.singletonList(field), Collections.singletonList(alias), Collections.emptyList()); MappedFieldType aliasType = lookup.get("alias"); @@ -100,7 +101,7 @@ public void testSimpleMatchToFullName() { FieldAliasMapper alias1 = new FieldAliasMapper("food", "food", "foo"); FieldAliasMapper alias2 = new FieldAliasMapper("barometer", "barometer", "bar"); - FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(field1, field2), Arrays.asList(alias1, alias2), Collections.emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", List.of(field1, field2), List.of(alias1, alias2), List.of()); Collection names = lookup.simpleMatchToFullName("b*"); @@ -117,7 +118,7 @@ public void testSourcePathWithMultiFields() { .addMultiField(new MockFieldMapper.Builder("field.subfield2")) .build(new ContentPath()); - FieldTypeLookup lookup = new FieldTypeLookup(singletonList(field), emptyList(), emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", singletonList(field), emptyList(), emptyList()); assertEquals(Set.of("field"), lookup.sourcePaths("field")); assertEquals(Set.of("field"), lookup.sourcePaths("field.subfield1")); @@ -133,17 +134,25 @@ public void testSourcePathsWithCopyTo() { .copyTo("field") .build(new ContentPath()); - FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(field, otherField), emptyList(), emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", Arrays.asList(field, otherField), emptyList(), emptyList()); assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field")); assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field.subfield1")); } + public void testTypeLookup() { + String type = randomAlphaOfLength(4); + assertThat( + ((TypeFieldType) new FieldTypeLookup(type, List.of(), List.of(), List.of()).get(TypeFieldType.NAME)).getType(), + equalTo(type) + ); + } + public void testRuntimeFieldsLookup() { MockFieldMapper concrete = new MockFieldMapper("concrete"); TestRuntimeField runtime = new TestRuntimeField("runtime", "type"); - FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(concrete), emptyList(), List.of(runtime)); + FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", List.of(concrete), emptyList(), List.of(runtime)); assertThat(fieldTypeLookup.get("concrete"), instanceOf(MockFieldMapper.FakeFieldType.class)); assertThat(fieldTypeLookup.get("runtime"), instanceOf(TestRuntimeField.class)); assertEquals(2, size(fieldTypeLookup.filter(ft -> true))); @@ -157,7 +166,7 @@ public void testRuntimeFieldOverrides() { TestRuntimeField subfieldOverride = new TestRuntimeField("object.subfield", "type"); TestRuntimeField runtime = new TestRuntimeField("runtime", "type"); - FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(field, concrete, subfield), emptyList(), + FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", List.of(field, concrete, subfield), emptyList(), List.of(fieldOverride, runtime, subfieldOverride)); assertThat(fieldTypeLookup.get("field"), instanceOf(TestRuntimeField.class)); assertThat(fieldTypeLookup.get("object.subfield"), instanceOf(TestRuntimeField.class)); @@ -172,7 +181,7 @@ public void testRuntimeFieldsSimpleMatchToFullName() { TestRuntimeField field2 = new TestRuntimeField("field2", "type"); TestRuntimeField subfield = new TestRuntimeField("object.subfield", "type"); - FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(field1, concrete), emptyList(), List.of(field2, subfield)); + FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", List.of(field1, concrete), emptyList(), List.of(field2, subfield)); { Set matches = fieldTypeLookup.simpleMatchToFullName("fie*"); assertEquals(2, matches.size()); @@ -194,7 +203,7 @@ public void testRuntimeFieldsSourcePaths() { TestRuntimeField field2 = new TestRuntimeField("field2", "type"); TestRuntimeField subfield = new TestRuntimeField("object.subfield", "type"); - FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(field1, concrete), emptyList(), List.of(field2, subfield)); + FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", List.of(field1, concrete), emptyList(), List.of(field2, subfield)); { Set sourcePaths = fieldTypeLookup.sourcePaths("field1"); assertEquals(1, sourcePaths.size()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java index cd24c8bb48d11..d84821852b5bc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java @@ -69,7 +69,7 @@ private QueryShardContext createContext() { IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY); Predicate indexNameMatcher = pattern -> Regex.simpleMatch(pattern, "index"); - return new QueryShardContext(0, 0, indexSettings, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), + return new QueryShardContext(0, 0, indexSettings, null, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null, indexNameMatcher, () -> true, null, emptyMap()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 8c415cdbdbba6..cc6fe07e6e92b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -33,6 +33,7 @@ import java.io.IOException; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -46,6 +47,17 @@ public void testPreflightUpdateDoesNotChangeMapping() throws Throwable { assertThat("field was not created by mapping update", mapperService.fieldType("field0"), notNullValue()); } + public void testMappingLookup() throws IOException { + MapperService service = createMapperService(mapping(b -> {})); + MappingLookup oldLookup = service.mappingLookup(); + assertThat(oldLookup.fieldTypes().get("cat"), nullValue()); + + merge(service, mapping(b -> b.startObject("cat").field("type", "keyword").endObject())); + MappingLookup newLookup = service.mappingLookup(); + assertThat(newLookup.fieldTypes().get("cat"), not(nullValue())); + assertThat(oldLookup.fieldTypes().get("cat"), nullValue()); + } + /** * Test that we can have at least the number of fields in new mappings that are defined by "index.mapping.total_fields.limit". * Any additional field should trigger an IllegalArgumentException. diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java index b918819bcd29b..e709dbf842dd7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java @@ -19,10 +19,20 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; +import org.elasticsearch.index.analysis.AnalyzerScope; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; +import java.io.StringReader; +import java.util.Arrays; import java.util.Collections; import static org.hamcrest.CoreMatchers.instanceOf; @@ -30,8 +40,8 @@ public class MappingLookupTests extends ESTestCase { public void testOnlyRuntimeField() { - MappingLookup mappingLookup = new MappingLookup(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), - Collections.singletonList(new TestRuntimeField("test", "type")), 0); + MappingLookup mappingLookup = new MappingLookup("_doc", Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), + Collections.singletonList(new TestRuntimeField("test", "type")), 0, null, false); assertEquals(0, size(mappingLookup.fieldMappers())); assertEquals(0, mappingLookup.objectMappers().size()); assertNull(mappingLookup.getMapper("test")); @@ -40,8 +50,8 @@ public void testOnlyRuntimeField() { public void testRuntimeFieldLeafOverride() { MockFieldMapper fieldMapper = new MockFieldMapper("test"); - MappingLookup mappingLookup = new MappingLookup(Collections.singletonList(fieldMapper), Collections.emptyList(), - Collections.emptyList(), Collections.singletonList(new TestRuntimeField("test", "type")), 0); + MappingLookup mappingLookup = new MappingLookup("_doc", Collections.singletonList(fieldMapper), Collections.emptyList(), + Collections.emptyList(), Collections.singletonList(new TestRuntimeField("test", "type")), 0, null, false); assertThat(mappingLookup.getMapper("test"), instanceOf(MockFieldMapper.class)); assertEquals(1, size(mappingLookup.fieldMappers())); assertEquals(0, mappingLookup.objectMappers().size()); @@ -53,8 +63,16 @@ public void testSubfieldOverride() { MockFieldMapper fieldMapper = new MockFieldMapper("object.subfield"); ObjectMapper objectMapper = new ObjectMapper("object", "object", new Explicit<>(true, true), ObjectMapper.Nested.NO, ObjectMapper.Dynamic.TRUE, Collections.singletonMap("object.subfield", fieldMapper), Version.CURRENT); - MappingLookup mappingLookup = new MappingLookup(Collections.singletonList(fieldMapper), Collections.singletonList(objectMapper), - Collections.emptyList(), Collections.singletonList(new TestRuntimeField("object.subfield", "type")), 0); + MappingLookup mappingLookup = new MappingLookup( + "_doc", + Collections.singletonList(fieldMapper), + Collections.singletonList(objectMapper), + Collections.emptyList(), + Collections.singletonList(new TestRuntimeField("object.subfield", "type")), + 0, + null, + false + ); assertThat(mappingLookup.getMapper("object.subfield"), instanceOf(MockFieldMapper.class)); assertEquals(1, size(mappingLookup.fieldMappers())); assertEquals(1, mappingLookup.objectMappers().size()); @@ -62,6 +80,41 @@ public void testSubfieldOverride() { assertEquals(1, size(mappingLookup.fieldTypes().filter(ft -> true))); } + + public void testAnalyzers() throws IOException { + FakeFieldType fieldType1 = new FakeFieldType("field1"); + FieldMapper fieldMapper1 = new FakeFieldMapper(fieldType1, "index1"); + + FakeFieldType fieldType2 = new FakeFieldType("field2"); + FieldMapper fieldMapper2 = new FakeFieldMapper(fieldType2, "index2"); + + MappingLookup mappingLookup = new MappingLookup( + "_doc", + Arrays.asList(fieldMapper1, fieldMapper2), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + 0, + null, + false + ); + + assertAnalyzes(mappingLookup.indexAnalyzer("field1", f -> null), "field1", "index1"); + assertAnalyzes(mappingLookup.indexAnalyzer("field2", f -> null), "field2", "index2"); + expectThrows(IllegalArgumentException.class, + () -> mappingLookup.indexAnalyzer("field3", f -> { + throw new IllegalArgumentException(); + }).tokenStream("field3", "blah")); + } + + private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException { + try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) { + CharTermAttribute term = tok.addAttribute(CharTermAttribute.class); + assertTrue(tok.incrementToken()); + assertEquals(output, term.toString()); + } + } + private static int size(Iterable iterable) { int count = 0; for (Object obj : iterable) { @@ -69,4 +122,76 @@ private static int size(Iterable iterable) { } return count; } + + private static class FakeAnalyzer extends Analyzer { + + private final String output; + + FakeAnalyzer(String output) { + this.output = output; + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer tokenizer = new Tokenizer() { + boolean incremented = false; + final CharTermAttribute term = addAttribute(CharTermAttribute.class); + + @Override + public boolean incrementToken() { + if (incremented) { + return false; + } + term.setLength(0).append(output); + incremented = true; + return true; + } + }; + return new TokenStreamComponents(tokenizer); + } + + } + + static class FakeFieldType extends TermBasedFieldType { + + private FakeFieldType(String name) { + super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + } + + @Override + public ValueFetcher valueFetcher(QueryShardContext context, String format) { + throw new UnsupportedOperationException(); + } + + @Override + public String typeName() { + return "fake"; + } + } + + static class FakeFieldMapper extends FieldMapper { + + final String indexedValue; + + FakeFieldMapper(FakeFieldType fieldType, String indexedValue) { + super(fieldType.name(), fieldType, + new NamedAnalyzer("fake", AnalyzerScope.INDEX, new FakeAnalyzer(indexedValue)), + MultiFields.empty(), CopyTo.empty()); + this.indexedValue = indexedValue; + } + + @Override + protected void parseCreateField(ParseContext context) { + } + + @Override + protected String contentType() { + return null; + } + + @Override + public Builder getMergeBuilder() { + return null; + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 397823d8347a7..ba4515aab4bb3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -474,7 +474,7 @@ public void doTestIndexSortRangeQueries(NumberType type, Supplier valueS IndexSearcher searcher = newSearcher(reader); QueryShardContext context = new QueryShardContext(0, 0, indexSettings, - BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(), + BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, () -> 0L, null, null, () -> true, null, emptyMap()); final int iters = 10; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 7c4465c03c1d1..93ed185ac5c4f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -213,7 +213,7 @@ private QueryShardContext createContext() { Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings); - return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, + return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()); } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java index 9ac8c8e0cbae0..58b034bb802a2 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java @@ -36,10 +36,8 @@ import org.apache.lucene.store.Directory; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -56,12 +54,15 @@ import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MappingLookupUtils; import org.elasticsearch.index.mapper.MockFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.RuntimeFieldType; import org.elasticsearch.index.mapper.TestRuntimeField; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.plugins.MapperPlugin; @@ -78,19 +79,22 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class QueryShardContextTests extends ESTestCase { @@ -204,6 +208,7 @@ public void testIndexSortedOnField() { null, null, null, + null, NamedXContentRegistry.EMPTY, new NamedWriteableRegistry(Collections.emptyList()), null, @@ -222,100 +227,101 @@ public void testIndexSortedOnField() { } public void testFielddataLookupSelfReference() { - QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> { - //simulate a runtime field that depends on itself e.g. field: doc['field'] - return leafLookup.doc().get(field).toString(); - })); + QueryShardContext queryShardContext = createQueryShardContext( + // simulate a runtime field that depends on itself e.g. field: doc['field'] + runtimeField("field", leafLookup -> leafLookup.doc().get("field").toString()) + ); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("field", queryShardContext)); assertEquals("Cyclic dependency detected while resolving runtime fields: field -> field", iae.getMessage()); } public void testFielddataLookupLooseLoop() { - QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> { - //simulate a runtime field cycle: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['1'] - if (field.equals("4")) { - return leafLookup.doc().get("1").toString(); - } - return leafLookup.doc().get(Integer.toString(Integer.parseInt(field) + 1)).toString(); - })); + QueryShardContext queryShardContext = createQueryShardContext( + // simulate a runtime field cycle: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['1'] + runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()), + runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()), + runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()), + runtimeField("4", leafLookup -> leafLookup.doc().get("1").get(0).toString()) + ); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", queryShardContext)); assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 1", iae.getMessage()); } public void testFielddataLookupTerminatesInLoop() { - QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> { - //simulate a runtime field cycle: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4'] - if (field.equals("4")) { - return leafLookup.doc().get("4").toString(); - } - return leafLookup.doc().get(Integer.toString(Integer.parseInt(field) + 1)).toString(); - })); + QueryShardContext queryShardContext = createQueryShardContext( + // simulate a runtime field cycle: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4'] + runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()), + runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()), + runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()), + runtimeField("4", leafLookup -> leafLookup.doc().get("4").get(0).toString()) + ); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", queryShardContext)); assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 4", iae.getMessage()); } public void testFielddataLookupSometimesLoop() throws IOException { - QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> { - if (docId == 0) { - return field + "_" + docId; - } else { - assert docId == 1; - if (field.equals("field4")) { - return leafLookup.doc().get("field1").toString(); + QueryShardContext queryShardContext = createQueryShardContext( + // simulate a runtime field cycle in the second doc: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4'] + runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()), + runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()), + runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()), + runtimeField("4", (leafLookup, docId) -> { + if (docId == 0) { + return "escape!"; } - int i = Integer.parseInt(field.substring(field.length() - 1)); - return leafLookup.doc().get("field" + (i + 1)).toString(); - } - })); - List values = collect("field1", queryShardContext, new TermQuery(new Term("indexed_field", "first"))); - assertEquals(List.of("field1_0"), values); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("field1", queryShardContext)); - assertEquals("Cyclic dependency detected while resolving runtime fields: field1 -> field2 -> field3 -> field4 -> field1", - iae.getMessage()); + return leafLookup.doc().get("4").get(0).toString(); + }) + ); + List values = collect("1", queryShardContext, new TermQuery(new Term("indexed_field", "first"))); + assertEquals(List.of("escape!"), values); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", queryShardContext)); + assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 4", iae.getMessage()); } public void testFielddataLookupBeyondMaxDepth() { - QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> { - int i = Integer.parseInt(field); - return leafLookup.doc().get(Integer.toString(i + 1)).toString(); - })); + QueryShardContext queryShardContext = createQueryShardContext( + runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()), + runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()), + runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()), + runtimeField("4", leafLookup -> leafLookup.doc().get("5").get(0).toString()), + runtimeField("5", leafLookup -> leafLookup.doc().get("6").get(0).toString()), + runtimeField("6", leafLookup -> "cat") + ); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", queryShardContext)); assertEquals("Field requires resolving too many dependent fields: 1 -> 2 -> 3 -> 4 -> 5 -> 6", iae.getMessage()); } public void testFielddataLookupReferencesBelowMaxDepth() throws IOException { - QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> { - int i = Integer.parseInt(field.substring(field.length() - 1)); - if (i == 5) { - return "test"; - } else { - ScriptDocValues scriptDocValues = leafLookup.doc().get("field" + (i + 1)); - return scriptDocValues.get(0).toString() + docId; - } - })); - assertEquals(List.of("test0000", "test1111"), collect("field1", queryShardContext)); + QueryShardContext queryShardContext = createQueryShardContext( + runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()), + runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()), + runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()), + runtimeField("4", leafLookup -> leafLookup.doc().get("5").get(0).toString()), + runtimeField("5", (leafLookup, docId) -> "cat on doc " + docId) + ); + assertEquals(List.of("cat on doc 0", "cat on doc 1"), collect("1", queryShardContext)); } public void testFielddataLookupOneFieldManyReferences() throws IOException { int numFields = randomIntBetween(5, 20); - QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> { - if (field.equals("field")) { - StringBuilder value = new StringBuilder(); - for (int i = 0; i < numFields; i++) { - value.append(leafLookup.doc().get("field" + i).get(0)); - } - return value.toString(); - } else { - return "test" + docId; + List fields = new ArrayList<>(numFields + 1); + fields.add(runtimeField("root", leafLookup -> { + StringBuilder value = new StringBuilder(); + for (int i = 0; i < numFields; i++) { + value.append(leafLookup.doc().get(i).get(0)); } + return value.toString(); })); - StringBuilder expectedFirstDoc = new StringBuilder(); - StringBuilder expectedSecondDoc = new StringBuilder(); + StringBuilder expected = new StringBuilder(); for (int i = 0; i < numFields; i++) { - expectedFirstDoc.append("test0"); - expectedSecondDoc.append("test1"); + String fieldValue = Integer.toString(i); + fields.add(runtimeField(Integer.toString(i), leafLookup -> fieldValue)); + expected.append(i); } - assertEquals(List.of(expectedFirstDoc.toString(), expectedSecondDoc.toString()), collect("field", queryShardContext)); + assertEquals( + List.of(expected.toString(), expected.toString()), + collect("root", createQueryShardContext("uuid", null, MappingLookupUtils.fromTypes(List.of(), fields), Map.of(), List.of())) + ); } public void testSearchRequestRuntimeFields() { @@ -331,7 +337,7 @@ public void testSearchRequestRuntimeFields() { QueryShardContext qsc = createQueryShardContext( "uuid", null, - Map.of("pig", new MockFieldMapper.FakeFieldType("pig"), "cat", new MockFieldMapper.FakeFieldType("cat")), + MappingLookupUtils.fromTypes(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")), runtimeMappings, Collections.singletonList(new TestRuntimeField.Plugin())); assertTrue(qsc.isFieldMapped("cat")); @@ -347,43 +353,26 @@ public void testSearchRequestRuntimeFields() { } public static QueryShardContext createQueryShardContext(String indexUuid, String clusterAlias) { - return createQueryShardContext(indexUuid, clusterAlias, name -> { - throw new UnsupportedOperationException(); - }); + return createQueryShardContext(indexUuid, clusterAlias, MappingLookup.EMPTY, Map.of(), List.of()); } - private static QueryShardContext createQueryShardContext( - String indexUuid, - String clusterAlias, - Function fieldTypeLookup - ) { - return createQueryShardContext(indexUuid, clusterAlias, new HashMap<>() { - @Override - public MappedFieldType get(Object key) { - return fieldTypeLookup.apply(key.toString()); - } - }, Collections.emptyMap(), Collections.emptyList()); + private static QueryShardContext createQueryShardContext(RuntimeFieldType... fieldTypes) { + return createQueryShardContext( + "uuid", + null, + MappingLookupUtils.fromTypes(List.of(), List.of(fieldTypes)), + Collections.emptyMap(), + Collections.emptyList() + ); } private static QueryShardContext createQueryShardContext( String indexUuid, String clusterAlias, - Map fieldTypeLookup, + MappingLookup mappingLookup, Map runtimeMappings, List mapperPlugins ) { - MapperService mapperService = createMapperService(indexUuid, fieldTypeLookup, mapperPlugins); - final long nowInMillis = randomNonNegativeLong(); - return new QueryShardContext( - 0, 0, mapperService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, null, - (mappedFieldType, idxName, searchLookup) -> mappedFieldType.fielddataBuilder(idxName, searchLookup).build(null, null), - mapperService, null, null, NamedXContentRegistry.EMPTY, new NamedWriteableRegistry(Collections.emptyList()), - null, null, () -> nowInMillis, clusterAlias, null, () -> true, null, runtimeMappings); - } - - private static MapperService createMapperService(String indexUuid, - Map fieldTypeLookup, - List mapperPlugins) { IndexMetadata.Builder indexMetadataBuilder = new IndexMetadata.Builder("index"); indexMetadataBuilder.settings(Settings.builder().put("index.version.created", Version.CURRENT) .put("index.number_of_shards", 1) @@ -391,36 +380,69 @@ private static MapperService createMapperService(String indexUuid, .put(IndexMetadata.SETTING_INDEX_UUID, indexUuid) ); IndexMetadata indexMetadata = indexMetadataBuilder.build(); + IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY); + MapperService mapperService = createMapperService(indexSettings, mapperPlugins); + final long nowInMillis = randomNonNegativeLong(); + return new QueryShardContext( + 0, + 0, + indexSettings, + BigArrays.NON_RECYCLING_INSTANCE, + null, + (mappedFieldType, idxName, searchLookup) -> mappedFieldType.fielddataBuilder(idxName, searchLookup).build(null, null), + mapperService, + mappingLookup, + null, + null, + NamedXContentRegistry.EMPTY, + new NamedWriteableRegistry(Collections.emptyList()), + null, + null, + () -> nowInMillis, + clusterAlias, + null, + () -> true, + null, + runtimeMappings + ); + } + + private static MapperService createMapperService( + IndexSettings indexSettings, + List mapperPlugins + ) { IndexAnalyzers indexAnalyzers = new IndexAnalyzers( - Collections.singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, null)), - Collections.emptyMap(), Collections.emptyMap() + singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, null)), + emptyMap(), + emptyMap() ); - IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY); IndicesModule indicesModule = new IndicesModule(mapperPlugins); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); - SimilarityService similarityService = new SimilarityService(indexSettings, null, Collections.emptyMap()); - return new MapperService(indexSettings, indexAnalyzers, NamedXContentRegistry.EMPTY, similarityService, - mapperRegistry, () -> { - throw new UnsupportedOperationException(); - }, () -> true, null) { - @Override - public MappedFieldType fieldType(String name) { - return fieldTypeLookup.get(name); - } + Supplier queryShardContextSupplier = () -> { throw new UnsupportedOperationException(); }; + MapperService mapperService = mock(MapperService.class); + when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers); + when(mapperService.parserContext()).thenReturn(new Mapper.TypeParser.ParserContext( + null, + mapperRegistry.getMapperParsers()::get, + mapperRegistry.getRuntimeFieldTypeParsers()::get, + indexSettings.getIndexVersionCreated(), + queryShardContextSupplier, + null, + null, + indexAnalyzers, + indexSettings, + () -> true, + false + )); + return mapperService; + } - @Override - public Set simpleMatchToFullName(String pattern) { - if (Regex.isMatchAllPattern(pattern)) { - return Collections.unmodifiableSet(fieldTypeLookup.keySet()); - } - throw new UnsupportedOperationException(); - } - }; + private static RuntimeFieldType runtimeField(String name, Function runtimeDocValues) { + return runtimeField(name, (leafLookup, docId) -> runtimeDocValues.apply(leafLookup)); } - private static Function fieldTypeLookup( - TriFunction runtimeDocValues) { - return name -> new TestRuntimeField(name, null) { + private static RuntimeFieldType runtimeField(String name, BiFunction runtimeDocValues) { + return new TestRuntimeField(name, null) { @Override public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { @@ -460,7 +482,7 @@ public void setNextDocId(int docId) { LeafSearchLookup leafLookup = searchLookup.get() .getLeafSearchLookup(context); leafLookup.setDocument(docId); - value = runtimeDocValues.apply(name, leafLookup, docId); + value = runtimeDocValues.apply(leafLookup, docId); } }; } diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java index d7f6cb3a6c61d..82a626213bdc0 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java @@ -41,9 +41,28 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { public void testRewriteMissingField() throws Exception { IndexService indexService = createIndex("test"); IndexReader reader = new MultiReader(); - QueryRewriteContext context = new QueryShardContext(0, 0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, - null, null, indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), - null, new IndexSearcher(reader), null, null, null, () -> true, null, emptyMap()); + QueryRewriteContext context = new QueryShardContext( + 0, + 0, + indexService.getIndexSettings(), + BigArrays.NON_RECYCLING_INSTANCE, + null, + null, + indexService.mapperService(), + indexService.mapperService().mappingLookup(), + null, + null, + xContentRegistry(), + writableRegistry(), + null, + new IndexSearcher(reader), + null, + null, + null, + () -> true, + null, + emptyMap() + ); RangeQueryBuilder range = new RangeQueryBuilder("foo"); assertEquals(Relation.DISJOINT, range.getRelation(context)); } @@ -60,7 +79,7 @@ public void testRewriteMissingReader() throws Exception { indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); QueryRewriteContext context = new QueryShardContext(0, 0, indexService.getIndexSettings(), null, null, null, - indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), + indexService.mapperService(), indexService.mapperService().mappingLookup(), null, null, xContentRegistry(), writableRegistry(), null, null, null, null, null, () -> true, null, emptyMap()); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // can't make assumptions on a missing reader, so it must return INTERSECT @@ -79,9 +98,28 @@ public void testRewriteEmptyReader() throws Exception { indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); IndexReader reader = new MultiReader(); - QueryRewriteContext context = new QueryShardContext(0, 0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, - null, null, indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), - null, new IndexSearcher(reader), null, null, null, () -> true, null, emptyMap()); + QueryRewriteContext context = new QueryShardContext( + 0, + 0, + indexService.getIndexSettings(), + BigArrays.NON_RECYCLING_INSTANCE, + null, + null, + indexService.mapperService(), + indexService.mapperService().mappingLookup(), + null, + null, + xContentRegistry(), + writableRegistry(), + null, + new IndexSearcher(reader), + null, + null, + null, + () -> true, + null, + emptyMap() + ); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // no values -> DISJOINT assertEquals(Relation.DISJOINT, range.getRelation(context)); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index e4eb70c3df03a..3262b925966be 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -43,12 +43,16 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.cache.request.ShardRequestCache; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MappingLookupUtils; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; public class IndicesRequestCacheTests extends ESTestCase { @@ -62,6 +66,7 @@ public void testBasicOperationsCache() throws Exception { writer.addDocument(newDoc(0, "foo")); DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); + MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey(); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); AtomicBoolean indexShard = new AtomicBoolean(true); @@ -69,7 +74,7 @@ public void testBasicOperationsCache() throws Exception { // initial cache TestEntity entity = new TestEntity(requestCacheStats, indexShard); Loader loader = new Loader(reader, 0); - BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); + BytesReference value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); @@ -80,14 +85,14 @@ public void testBasicOperationsCache() throws Exception { // cache hit entity = new TestEntity(requestCacheStats, indexShard); loader = new Loader(reader, 0); - value = cache.getOrCompute(entity, loader, reader, termBytes); + value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); assertTrue(loader.loadedFromCache); assertEquals(1, cache.count()); - assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length()); + assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length()); assertEquals(1, cache.numRegisteredCloseListeners()); // Closing the cache doesn't modify an already returned CacheEntity @@ -103,7 +108,7 @@ public void testBasicOperationsCache() throws Exception { assertEquals(0, requestCacheStats.stats().getEvictions()); assertTrue(loader.loadedFromCache); assertEquals(0, cache.count()); - assertEquals(0, requestCacheStats.stats().getMemorySize().bytesAsInt()); + assertEquals(0L, requestCacheStats.stats().getMemorySize().getBytes()); IOUtils.close(reader, writer, dir, cache); assertEquals(0, cache.numRegisteredCloseListeners()); @@ -111,6 +116,7 @@ public void testBasicOperationsCache() throws Exception { public void testCacheDifferentReaders() throws Exception { IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); + MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey(); AtomicBoolean indexShard = new AtomicBoolean(true); ShardRequestCache requestCacheStats = new ShardRequestCache(); Directory dir = newDirectory(); @@ -131,33 +137,33 @@ public void testCacheDifferentReaders() throws Exception { // initial cache TestEntity entity = new TestEntity(requestCacheStats, indexShard); Loader loader = new Loader(reader, 0); - BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); + BytesReference value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); assertFalse(loader.loadedFromCache); assertEquals(1, cache.count()); - assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length()); - final int cacheSize = requestCacheStats.stats().getMemorySize().bytesAsInt(); + assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length()); + final long cacheSize = requestCacheStats.stats().getMemorySize().getBytes(); assertEquals(1, cache.numRegisteredCloseListeners()); // cache the second TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); loader = new Loader(secondReader, 0); - value = cache.getOrCompute(entity, loader, secondReader, termBytes); + value = cache.getOrCompute(entity, loader, mappingKey, secondReader, termBytes); assertEquals("bar", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); assertFalse(loader.loadedFromCache); assertEquals(2, cache.count()); - assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > cacheSize + value.length()); + assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > cacheSize + value.length()); assertEquals(2, cache.numRegisteredCloseListeners()); secondEntity = new TestEntity(requestCacheStats, indexShard); loader = new Loader(secondReader, 0); - value = cache.getOrCompute(secondEntity, loader, secondReader, termBytes); + value = cache.getOrCompute(secondEntity, loader, mappingKey, secondReader, termBytes); assertEquals("bar", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); @@ -167,7 +173,7 @@ public void testCacheDifferentReaders() throws Exception { entity = new TestEntity(requestCacheStats, indexShard); loader = new Loader(reader, 0); - value = cache.getOrCompute(entity, loader, reader, termBytes); + value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value.streamInput().readString()); assertEquals(2, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); @@ -182,10 +188,9 @@ public void testCacheDifferentReaders() throws Exception { assertEquals(0, requestCacheStats.stats().getEvictions()); assertTrue(loader.loadedFromCache); assertEquals(1, cache.count()); - assertEquals(cacheSize, requestCacheStats.stats().getMemorySize().bytesAsInt()); + assertEquals(cacheSize, requestCacheStats.stats().getMemorySize().getBytes()); assertEquals(1, cache.numRegisteredCloseListeners()); - // release if (randomBoolean()) { secondReader.close(); @@ -198,13 +203,93 @@ public void testCacheDifferentReaders() throws Exception { assertEquals(0, requestCacheStats.stats().getEvictions()); assertTrue(loader.loadedFromCache); assertEquals(0, cache.count()); - assertEquals(0, requestCacheStats.stats().getMemorySize().bytesAsInt()); + assertEquals(0L, requestCacheStats.stats().getMemorySize().getBytes()); IOUtils.close(secondReader, writer, dir, cache); assertEquals(0, cache.numRegisteredCloseListeners()); } + public void testCacheDifferentMapping() throws Exception { + IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); + MappingLookup.CacheKey mappingKey1 = MappingLookupUtils.fromTypes().cacheKey(); + MappingLookup.CacheKey mappingKey2 = MappingLookupUtils.fromTypes().cacheKey(); + AtomicBoolean indexShard = new AtomicBoolean(true); + ShardRequestCache requestCacheStats = new ShardRequestCache(); + Directory dir = newDirectory(); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); + writer.addDocument(newDoc(0, "foo")); + writer.addDocument(newDoc(1, "bar")); + DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); + TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); + BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); + + // initial cache + TestEntity entity = new TestEntity(requestCacheStats, indexShard); + Loader loader = new Loader(reader, 0); + BytesReference value = cache.getOrCompute(entity, loader, mappingKey1, reader, termBytes); + assertEquals("foo", value.streamInput().readString()); + assertEquals(0, requestCacheStats.stats().getHitCount()); + assertEquals(1, requestCacheStats.stats().getMissCount()); + assertEquals(0, requestCacheStats.stats().getEvictions()); + assertFalse(loader.loadedFromCache); + assertEquals(1, cache.count()); + assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length()); + final long cacheSize = requestCacheStats.stats().getMemorySize().getBytes(); + assertEquals(1, cache.numRegisteredCloseListeners()); + + // cache the second + TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + loader = new Loader(reader, 1); + value = cache.getOrCompute(entity, loader, mappingKey2, reader, termBytes); + assertEquals("bar", value.streamInput().readString()); + assertEquals(0, requestCacheStats.stats().getHitCount()); + assertEquals(2, requestCacheStats.stats().getMissCount()); + assertEquals(0, requestCacheStats.stats().getEvictions()); + assertFalse(loader.loadedFromCache); + assertEquals(2, cache.count()); + assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > cacheSize + value.length()); + assertEquals(1, cache.numRegisteredCloseListeners()); + + secondEntity = new TestEntity(requestCacheStats, indexShard); + loader = new Loader(reader, 1); + value = cache.getOrCompute(secondEntity, loader, mappingKey2, reader, termBytes); + assertEquals("bar", value.streamInput().readString()); + assertEquals(1, requestCacheStats.stats().getHitCount()); + assertEquals(2, requestCacheStats.stats().getMissCount()); + assertEquals(0, requestCacheStats.stats().getEvictions()); + assertTrue(loader.loadedFromCache); + assertEquals(2, cache.count()); + + entity = new TestEntity(requestCacheStats, indexShard); + loader = new Loader(reader, 0); + value = cache.getOrCompute(entity, loader, mappingKey1, reader, termBytes); + assertEquals("foo", value.streamInput().readString()); + assertEquals(2, requestCacheStats.stats().getHitCount()); + assertEquals(2, requestCacheStats.stats().getMissCount()); + assertEquals(0, requestCacheStats.stats().getEvictions()); + assertTrue(loader.loadedFromCache); + assertEquals(2, cache.count()); + + // Closing the cache doesn't change returned entities + if (randomBoolean()) { + reader.close(); + } else { + indexShard.set(false); // closed shard but reader is still open + cache.clear(secondEntity); + } + cache.cleanCache(); + assertEquals(2, requestCacheStats.stats().getMissCount()); + assertEquals(0, requestCacheStats.stats().getEvictions()); + assertTrue(loader.loadedFromCache); + assertEquals(0, cache.count()); + assertEquals(0L, requestCacheStats.stats().getMemorySize().getBytes()); + + IOUtils.close(reader, writer, dir, cache); + assertEquals(0, cache.numRegisteredCloseListeners()); + } + public void testEviction() throws Exception { + MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey(); final ByteSizeValue size; { IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); @@ -227,9 +312,9 @@ public void testEviction() throws Exception { TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); Loader secondLoader = new Loader(secondReader, 0); - BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); + BytesReference value1 = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); - BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); + BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, mappingKey, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); size = requestCacheStats.stats().getMemorySize(); IOUtils.close(reader, secondReader, writer, dir, cache); @@ -262,12 +347,12 @@ public void testEviction() throws Exception { TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard); Loader thirdLoader = new Loader(thirdReader, 0); - BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); + BytesReference value1 = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); - BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); + BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, mappingKey, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); - BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); + BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, mappingKey, thirdReader, termBytes); assertEquals("baz", value3.streamInput().readString()); assertEquals(2, cache.count()); assertEquals(1, requestCacheStats.stats().getEvictions()); @@ -285,6 +370,7 @@ public void testClearAllEntityIdentity() throws Exception { writer.addDocument(newDoc(0, "foo")); DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); + MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey(); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); TestEntity entity = new TestEntity(requestCacheStats, indexShard); @@ -293,31 +379,33 @@ public void testClearAllEntityIdentity() throws Exception { writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); + MappingLookup.CacheKey secondMappingKey = MappingLookupUtils.fromTypes().cacheKey(); TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); Loader secondLoader = new Loader(secondReader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); + MappingLookup.CacheKey thirdMappingKey = MappingLookupUtils.fromTypes().cacheKey(); AtomicBoolean differentIdentity = new AtomicBoolean(true); - TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity); + TestEntity thirdEntity = new TestEntity(requestCacheStats, differentIdentity); Loader thirdLoader = new Loader(thirdReader, 0); - BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); + BytesReference value1 = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); - BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); + BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondMappingKey, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); - BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); + BytesReference value3 = cache.getOrCompute(thirdEntity, thirdLoader, thirdMappingKey, thirdReader, termBytes); assertEquals("baz", value3.streamInput().readString()); assertEquals(3, cache.count()); final long hitCount = requestCacheStats.stats().getHitCount(); - // clear all for the indexShard Idendity even though is't still open + // clear all for the indexShard entity even though is't still open cache.clear(randomFrom(entity, secondEntity)); cache.cleanCache(); assertEquals(1, cache.count()); // third has not been validated since it's a different identity - value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); + value3 = cache.getOrCompute(thirdEntity, thirdLoader, thirdMappingKey, thirdReader, termBytes); assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount()); assertEquals("baz", value3.streamInput().readString()); @@ -367,6 +455,7 @@ public void testInvalidate() throws Exception { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.addDocument(newDoc(0, "foo")); + MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey(); DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); @@ -376,7 +465,7 @@ public void testInvalidate() throws Exception { // initial cache TestEntity entity = new TestEntity(requestCacheStats, indexShard); Loader loader = new Loader(reader, 0); - BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); + BytesReference value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); @@ -387,28 +476,28 @@ public void testInvalidate() throws Exception { // cache hit entity = new TestEntity(requestCacheStats, indexShard); loader = new Loader(reader, 0); - value = cache.getOrCompute(entity, loader, reader, termBytes); + value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); assertTrue(loader.loadedFromCache); assertEquals(1, cache.count()); - assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length()); + assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length()); assertEquals(1, cache.numRegisteredCloseListeners()); // load again after invalidate entity = new TestEntity(requestCacheStats, indexShard); loader = new Loader(reader, 0); - cache.invalidate(entity, reader, termBytes); - value = cache.getOrCompute(entity, loader, reader, termBytes); + cache.invalidate(entity, mappingKey, reader, termBytes); + value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes); assertEquals("foo", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); assertFalse(loader.loadedFromCache); assertEquals(1, cache.count()); - assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length()); + assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length()); assertEquals(1, cache.numRegisteredCloseListeners()); // release @@ -423,15 +512,17 @@ public void testInvalidate() throws Exception { assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); assertEquals(0, cache.count()); - assertEquals(0, requestCacheStats.stats().getMemorySize().bytesAsInt()); + assertEquals(0L, requestCacheStats.stats().getMemorySize().getBytes()); IOUtils.close(reader, writer, dir, cache); assertEquals(0, cache.numRegisteredCloseListeners()); } - public void testEqualsKey() throws IOException { + public void testKeyEqualsAndHashCode() throws IOException { AtomicBoolean trueBoolean = new AtomicBoolean(true); AtomicBoolean falseBoolean = new AtomicBoolean(false); + MappingLookup.CacheKey mKey1 = MappingLookupUtils.fromTypes().cacheKey(); + MappingLookup.CacheKey mKey2 = MappingLookupUtils.fromTypes().cacheKey(); Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(); IndexWriter writer = new IndexWriter(dir, config); @@ -441,19 +532,51 @@ public void testEqualsKey() throws IOException { IndexReader reader2 = DirectoryReader.open(writer); IndexReader.CacheKey rKey2 = reader2.getReaderCacheHelper().getKey(); IOUtils.close(reader1, reader2, writer, dir); - IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(new TestEntity(null, falseBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey2, new TestBytesReference(1)); - IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(2)); - String s = "Some other random object"; - assertEquals(key1, key1); + List keys = new ArrayList<>(); + for (AtomicBoolean bool : new AtomicBoolean[] { trueBoolean, falseBoolean }) { + for (MappingLookup.CacheKey mKey : new MappingLookup.CacheKey[] { mKey1, mKey2 }) { + for (IndexReader.CacheKey rKey : new IndexReader.CacheKey[] { rKey1, rKey2 }) { + for (BytesReference requestKey : new BytesReference[] { new TestBytesReference(1), new TestBytesReference(2) }) { + keys.add(new IndicesRequestCache.Key(new TestEntity(null, bool), mKey, rKey, requestKey)); + } + } + } + } + for (IndicesRequestCache.Key key : keys) { + assertNotEquals(key, null); + assertNotEquals(key, "Some other random object"); + } + for (IndicesRequestCache.Key key1 : keys) { + assertNotEquals(key1, null); + for (IndicesRequestCache.Key key2 : keys) { + if (key1 == key2) { + assertEquals(key1, key2); + assertEquals(key1.hashCode(), key2.hashCode()); + } else { + assertNotEquals(key1, key2); + assertNotEquals(key1.hashCode(), key2.hashCode()); + /* + * If we made random keys it'd be possible for us to have + * hash collisions and for the assertion above to fail. + * But we don't use random keys for this test. + */ + } + } + } + IndicesRequestCache.Key key1 = new IndicesRequestCache.Key( + new TestEntity(null, trueBoolean), + mKey1, + rKey1, + new TestBytesReference(1) + ); + IndicesRequestCache.Key key2 = new IndicesRequestCache.Key( + new TestEntity(null, trueBoolean), + mKey1, + rKey1, + new TestBytesReference(1) + ); assertEquals(key1, key2); - assertNotEquals(key1, null); - assertNotEquals(key1, s); - assertNotEquals(key1, key3); - assertNotEquals(key1, key4); - assertNotEquals(key1, key5); + assertEquals(key1.hashCode(), key2.hashCode()); } private class TestBytesReference extends AbstractBytesReference { diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceCloseTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceCloseTests.java index ad8b3b0d54984..23736d3c36c84 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceCloseTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceCloseTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesRequestCache.Key; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -287,7 +288,8 @@ public void onMiss() {} @Override public void onRemoval(RemovalNotification notification) {} }; - cache.getOrCompute(cacheEntity, () -> new BytesArray("bar"), searcher.getDirectoryReader(), new BytesArray("foo")); + MappingLookup.CacheKey mappingCacheKey = indexService.mapperService().mappingLookup().cacheKey(); + cache.getOrCompute(cacheEntity, () -> new BytesArray("bar"), mappingCacheKey, searcher.getDirectoryReader(), new BytesArray("foo")); assertEquals(1L, cache.count()); searcher.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index cb4f50eba72d6..864e056d8b784 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -37,15 +37,18 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -878,4 +881,20 @@ private List generateBook(String id, String[] authors, int[] numPages) return documents; } + @Override + protected List objectMappers() { + return MOCK_OBJECT_MAPPERS; + } + + static final List MOCK_OBJECT_MAPPERS = List.of( + nestedObject(NESTED_OBJECT), + nestedObject(NESTED_OBJECT + "." + NESTED_OBJECT2), + nestedObject("nested_reseller"), + nestedObject("nested_chapters"), + nestedObject("nested_field") + ); + + public static ObjectMapper nestedObject(String path) { + return new ObjectMapper.Builder(path, Version.CURRENT).nested(ObjectMapper.Nested.newNested()).build(new ContentPath()); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 52e25aee12d8f..dd0ffdcfcea60 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -250,4 +251,8 @@ public void testNestedUnderTerms() throws IOException { }, NestedAggregatorTests.resellersMappedFields()); } + @Override + protected List objectMappers() { + return NestedAggregatorTests.MOCK_OBJECT_MAPPERS; + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index c11926c6b969d..fb37c4947a01a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -46,6 +46,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.mapper.SeqNoFieldMapper; @@ -64,6 +65,7 @@ import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests; import org.elasticsearch.search.aggregations.metrics.InternalTopHits; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; @@ -601,4 +603,10 @@ public void doAssertReducedMultiBucketConsumer(Aggregation agg, MultiBucketConsu * buckets we should have left after each reduction. */ } + + @Override + protected List objectMappers() { + return List.of(NestedAggregatorTests.nestedObject("nested_object")); + } + } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java index 8f60825732757..305aa1ec3d06c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java @@ -35,7 +35,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.mapper.BinaryFieldMapper; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MockFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -49,6 +51,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.Map; import static org.elasticsearch.search.aggregations.AggregationBuilders.sampler; import static org.elasticsearch.search.aggregations.AggregationBuilders.significantText; @@ -346,4 +349,9 @@ public void testSignificanceOnTextArrays() throws IOException { } } } + + @Override + protected FieldMapper buildMockFieldMapper(MappedFieldType ft) { + return new MockFieldMapper(ft, Map.of(ft.name(), ft.getTextSearchInfo().getSearchAnalyzer())); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index c119cc1e48458..755327ad2af06 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.mapper.SeqNoFieldMapper; @@ -84,6 +85,7 @@ import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests; import org.elasticsearch.search.aggregations.metrics.InternalTopHits; import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; @@ -1500,4 +1502,8 @@ private T reduce(Aggregator agg, BigArrays bigAr return result; } + @Override + protected List objectMappers() { + return List.of(NestedAggregatorTests.nestedObject("nested_object")); + } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java index b882f57b17aa0..0c8107e665f37 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java @@ -737,7 +737,27 @@ private static QueryShardContext newQueryShardContext(MapperService mapperServic .put(IndexMetadata.SETTING_INDEX_UUID, "uuid").build(); IndexMetadata indexMetadata = new IndexMetadata.Builder("index").settings(settings).build(); IndexSettings indexSettings = new IndexSettings(indexMetadata, settings); - return new QueryShardContext(0, 0, indexSettings, null, null, null, mapperService, null, null, null, null, null, null, null, null, - null, null, null, emptyMap()); + return new QueryShardContext( + 0, + 0, + indexSettings, + null, + null, + null, + mapperService, + mapperService.mappingLookup(), + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + emptyMap() + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 6bf9fb5ee116f..c1180e23e76b4 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -279,7 +279,7 @@ public void testBuildSearchContextHighlight() throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, - null, null, null, null, null, xContentRegistry(), namedWriteableRegistry, + null, null, null, null, null, null, xContentRegistry(), namedWriteableRegistry, null, null, System::currentTimeMillis, null, null, () -> true, null, emptyMap()) { @Override public MappedFieldType getFieldType(String name) { diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 6e04efbd5d170..29f3aee382b7e 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -143,7 +143,7 @@ public void testBuildRescoreSearchContext() throws ElasticsearchParseException, IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, - null, null, null, null, null, + null, null, null, null, null, null, xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()) { @Override public MappedFieldType getFieldType(String name) { @@ -187,7 +187,7 @@ public void testRewritingKeepsSettings() throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, - null, null, null, null, null, + null, null, null, null, null, null, xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()) { @Override public MappedFieldType getFieldType(String name) { diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index e9cb1ce7aff37..ef6a474ce4886 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -202,7 +202,7 @@ protected final QueryShardContext createMockShardContext(IndexSearcher searcher) return builder.build(new IndexFieldDataCache.None(), null); }; return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, indexFieldDataLookup, - null, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, searcher, + null, null, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, searcher, () -> randomNonNegativeLong(), null, null, () -> true, null, emptyMap()) { @Override diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 3ac6396e580d0..4832dcd0abe22 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -39,6 +39,8 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MappingLookupUtils; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.query.QueryShardContext; @@ -163,10 +165,8 @@ public void testBuild() throws IOException { Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index(randomAlphaOfLengthBetween(1, 10), "_na_"), indexSettings); - MapperService mapperService = mock(MapperService.class); ScriptService scriptService = mock(ScriptService.class); MappedFieldType fieldType = mockFieldType(suggestionBuilder.field()); - when(mapperService.fieldType(any(String.class))).thenReturn(fieldType); IndexAnalyzers indexAnalyzers = new IndexAnalyzers( new HashMap<>() { @Override @@ -176,11 +176,13 @@ public NamedAnalyzer get(Object key) { }, Collections.emptyMap(), Collections.emptyMap()); + MapperService mapperService = mock(MapperService.class); when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers); + MappingLookup lookup = MappingLookupUtils.fromTypes(fieldType); when(scriptService.compile(any(Script.class), any())).then(invocation -> new TestTemplateService.MockTemplateScript.Factory( ((Script) invocation.getArguments()[0]).getIdOrCode())); QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, - null, mapperService, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null, + null, mapperService, lookup, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null, System::currentTimeMillis, null, null, () -> true, null, emptyMap()); SuggestionContext suggestionContext = suggestionBuilder.build(mockShardContext); @@ -214,13 +216,9 @@ public void testBuildWithUnmappedField() { Settings indexSettings = builder.build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index(randomAlphaOfLengthBetween(1, 10), "_na_"), indexSettings); - MapperService mapperService = mock(MapperService.class); - ScriptService scriptService = mock(ScriptService.class); - when(mapperService.getNamedAnalyzer(any(String.class))).then( - invocation -> new NamedAnalyzer((String) invocation.getArguments()[0], AnalyzerScope.INDEX, new SimpleAnalyzer())); QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, - null, mapperService, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null, + null, mock(MapperService.class), MappingLookup.EMPTY, null, null, xContentRegistry(), namedWriteableRegistry, null, null, System::currentTimeMillis, null, null, () -> true, null, emptyMap()); if (randomBoolean()) { mockShardContext.setAllowUnmappedFields(randomBoolean()); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MappingLookupUtils.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MappingLookupUtils.java new file mode 100644 index 0000000000000..56d866f849805 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MappingLookupUtils.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import java.util.Arrays; +import java.util.List; + +import static java.util.stream.Collectors.toList; + +public class MappingLookupUtils { + public static MappingLookup fromTypes(MappedFieldType... types) { + return fromTypes(Arrays.asList(types), List.of()); + } + + public static MappingLookup fromTypes(List concreteFields, List runtimeFields) { + List mappers = concreteFields.stream().map(MockFieldMapper::new).collect(toList()); + return new MappingLookup("_doc", mappers, List.of(), List.of(), runtimeFields, 0, souceToParse -> null, true); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java index 4072083ab59c5..f46cac153f925 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java @@ -19,10 +19,12 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.QueryShardContext; import java.util.Collections; import java.util.List; +import java.util.Map; // this sucks how much must be overridden just do get a dummy field mapper... public class MockFieldMapper extends FieldMapper { @@ -32,7 +34,11 @@ public MockFieldMapper(String fullName) { } public MockFieldMapper(MappedFieldType fieldType) { - super(findSimpleName(fieldType.name()), fieldType, + this(fieldType, Map.of()); + } + + public MockFieldMapper(MappedFieldType fieldType, Map indexAnalyzers) { + super(findSimpleName(fieldType.name()), fieldType, indexAnalyzers, MultiFields.empty(), new CopyTo.Builder().build()); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 5131b311d6b26..9a0ab844cead8 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -44,6 +44,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; @@ -55,7 +56,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; @@ -71,7 +71,6 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener; import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -87,9 +86,10 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MockFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; -import org.elasticsearch.index.mapper.ObjectMapper.Nested; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.mapper.TextFieldMapper; @@ -142,11 +142,10 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; +import static java.util.stream.Collectors.toList; import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -156,7 +155,6 @@ * {@link AggregationBuilder} instance. */ public abstract class AggregatorTestCase extends ESTestCase { - private static final String NESTEDFIELD_PREFIX = "nested_"; private List releasables = new ArrayList<>(); protected ValuesSourceRegistry valuesSourceRegistry; @@ -227,38 +225,41 @@ protected AggregationContext createAggregationContext(IndexSearcher indexSearche */ BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), breakerService).withCircuitBreaking(); - // TODO: now just needed for top_hits, this will need to be revised for other agg unit tests: - MapperService mapperService = mock(MapperService.class); - when(mapperService.getIndexSettings()).thenReturn(indexSettings); - when(mapperService.hasNested()).thenReturn(false); - when(mapperService.indexAnalyzer(anyString(), any())).thenReturn(Lucene.STANDARD_ANALYZER); // for significant text - for (MappedFieldType type : fieldTypes) { - String name = type.name(); - when(mapperService.fieldType(name)).thenReturn(type); - // Alias each field to -alias so everyone can test aliases - when(mapperService.fieldType(name + "-alias")).thenReturn(type); - } - when(mapperService.getObjectMapper(anyString())).thenAnswer(invocation -> { - String fieldName = (String) invocation.getArguments()[0]; - if (fieldName.startsWith(NESTEDFIELD_PREFIX)) { - return new ObjectMapper.Builder(fieldName, Version.CURRENT).nested(Nested.newNested()).build(new ContentPath()); - } - return null; - }); + MappingLookup mappingLookup = new MappingLookup( + "_doc", + Arrays.stream(fieldTypes).map(this::buildMockFieldMapper).collect(toList()), + objectMappers(), + // Alias all fields to -alias to test aliases + Arrays.stream(fieldTypes) + .map(ft -> new FieldAliasMapper(ft.name() + "-alias", ft.name() + "-alias", ft.name())) + .collect(toList()), + List.of(), + 0, + sourceToParse -> null, + true + ); TriFunction, IndexFieldData> fieldDataBuilder = ( fieldType, s, - searchLookup) -> fieldType.fielddataBuilder(mapperService.getIndexSettings().getIndex().getName(), searchLookup) + searchLookup) -> fieldType.fielddataBuilder(indexSettings.getIndex().getName(), searchLookup) .build(new IndexFieldDataCache.None(), breakerService); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() { + @Override + public void onRemoval(ShardId shardId, Accountable accountable) {} + + @Override + public void onCache(ShardId shardId, Accountable accountable) {} + }); QueryShardContext queryShardContext = new QueryShardContext( 0, -1, indexSettings, bigArrays, - null, + bitsetFilterCache, fieldDataBuilder, - mapperService, + null, + mappingLookup, null, getMockScriptService(), xContentRegistry(), @@ -274,13 +275,12 @@ protected AggregationContext createAggregationContext(IndexSearcher indexSearche ); MultiBucketConsumer consumer = new MultiBucketConsumer(maxBucket, breakerService.getBreaker(CircuitBreaker.REQUEST)); - BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, mock(Listener.class)); return new ProductionAggregationContext( queryShardContext, query, null, consumer, - () -> buildSubSearchContext(mapperService, queryShardContext, bitsetFilterCache), + () -> buildSubSearchContext(indexSettings, queryShardContext, bitsetFilterCache), releasables::add, bitsetFilterCache, randomInt(), @@ -289,16 +289,32 @@ protected AggregationContext createAggregationContext(IndexSearcher indexSearche ); } + /** + * Build a {@link FieldMapper} to create the {@link MappingLookup} used for the aggs. + * {@code protected} so subclasses can have it. + */ + protected FieldMapper buildMockFieldMapper(MappedFieldType ft) { + return new MockFieldMapper(ft); + } + + /** + * {@link ObjectMapper}s to add to the lookup. By default we don't need + * any {@link ObjectMapper}s but testing nested objects will require adding some. + */ + protected List objectMappers() { + return List.of(); + } + /** * Build a {@link SubSearchContext}s to power {@code top_hits}. */ private SubSearchContext buildSubSearchContext( - MapperService mapperService, + IndexSettings indexSettings, QueryShardContext queryShardContext, BitsetFilterCache bitsetFilterCache ) { SearchContext ctx = mock(SearchContext.class); - QueryCache queryCache = new DisabledQueryCache(mapperService.getIndexSettings()); + QueryCache queryCache = new DisabledQueryCache(indexSettings); QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override public void onUse(Query query) { @@ -325,11 +341,13 @@ public boolean shouldCache(Query query) { } when(ctx.fetchPhase()).thenReturn(new FetchPhase(Arrays.asList(new FetchSourcePhase(), new FetchDocValuesPhase()))); when(ctx.getQueryShardContext()).thenReturn(queryShardContext); - NestedDocuments nestedDocuments = new NestedDocuments(mapperService, bitsetFilterCache::getBitSetProducer); - when(ctx.getNestedDocuments()).thenReturn(nestedDocuments); IndexShard indexShard = mock(IndexShard.class); when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0)); when(ctx.indexShard()).thenReturn(indexShard); + MapperService mapperService = mock(MapperService.class); + when(mapperService.hasNested()).thenReturn(false); + NestedDocuments nested = new NestedDocuments(mapperService, bitsetFilterCache::getBitSetProducer); + when(ctx.getNestedDocuments()).thenReturn(nested); return new SubSearchContext(ctx); } @@ -610,7 +628,6 @@ protected List unsupportedMappedFieldTypes() { */ public void testSupportedFieldTypes() throws IOException { MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); - Settings settings = Settings.builder().put("index.version.created", Version.CURRENT.id).build(); String fieldName = "typeTestFieldName"; List supportedVSTypes = getSupportedValuesSourceTypes(); List unsupportedMappedFieldTypes = unsupportedMappedFieldTypes(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index b66156c28cc76..8cba816a5e4ff 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -411,9 +411,28 @@ public void close() throws IOException { } QueryShardContext createShardContext(IndexSearcher searcher) { - return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, - indexFieldDataService::getForField, mapperService, similarityService, scriptService, xContentRegistry, - namedWriteableRegistry, this.client, searcher, () -> nowInMillis, null, indexNameMatcher(), () -> true, null, emptyMap()); + return new QueryShardContext( + 0, + 0, + idxSettings, + BigArrays.NON_RECYCLING_INSTANCE, + bitsetFilterCache, + indexFieldDataService::getForField, + mapperService, + mapperService.mappingLookup(), + similarityService, + scriptService, + xContentRegistry, + namedWriteableRegistry, + this.client, + searcher, + () -> nowInMillis, + null, + indexNameMatcher(), + () -> true, + null, + emptyMap() + ); } ScriptModule createScriptModule(List scriptPlugins) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java index 14d87dcb6a803..181b5cec3bd08 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java @@ -33,7 +33,9 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MappingLookupUtils; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; @@ -74,7 +76,6 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase { - private static final String MISSING_FIELD_NAME = "does-not-exist"; private static final int FIELD_COUNT = 10; private ExecutorService singleThreadExecutor; @@ -106,7 +107,7 @@ public void testSameBitSetIsReturnedForIdenticalQuery() throws Exception { public void testNullBitSetIsReturnedForNonMatchingQuery() throws Exception { final DocumentSubsetBitsetCache cache = newCache(Settings.EMPTY); runTestOnIndex((shardContext, leafContext) -> { - final Query query = QueryBuilders.termQuery(MISSING_FIELD_NAME, "any-value").rewrite(shardContext).toQuery(shardContext); + final Query query = QueryBuilders.termQuery("not-mapped", "any-value").rewrite(shardContext).toQuery(shardContext); final BitSet bitSet = cache.getBitSet(query, leafContext); assertThat(bitSet, nullValue()); }); @@ -536,7 +537,7 @@ public void close() throws IOException { } } - private TestIndexContext testIndex(MapperService mapperService, Client client) throws IOException { + private TestIndexContext testIndex(MappingLookup mappingLookup, Client client) throws IOException { TestIndexContext context = null; final long nowInMillis = randomNonNegativeLong(); @@ -564,7 +565,7 @@ private TestIndexContext testIndex(MapperService mapperService, Client client) t final LeafReaderContext leaf = directoryReader.leaves().get(0); final QueryShardContext shardContext = new QueryShardContext(shardId.id(), 0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE, - null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), + null, null, null, mappingLookup, null, null, xContentRegistry(), writableRegistry(), client, new IndexSearcher(directoryReader), () -> nowInMillis, null, null, () -> true, null, emptyMap()); context = new TestIndexContext(directory, iw, directoryReader, shardContext, leaf); @@ -585,15 +586,14 @@ null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), } private void runTestOnIndices(int numberIndices, CheckedConsumer, Exception> body) throws Exception { - final MapperService mapperService = mock(MapperService.class); - when(mapperService.fieldType(Mockito.anyString())).thenAnswer(invocation -> { - final String fieldName = (String) invocation.getArguments()[0]; - if (fieldName.equals(MISSING_FIELD_NAME)) { - return null; - } else { - return new KeywordFieldMapper.KeywordFieldType(fieldName); - } - }); + List types = new ArrayList<>(); + for (int i = 0; i < 11; i++) { // the tests use fields 1 to 10. + // This field has a value. + types.add(new KeywordFieldMapper.KeywordFieldType("field-" + i)); + // This field never has a value + types.add(new KeywordFieldMapper.KeywordFieldType("dne-" + i)); + } + MappingLookup mappingLookup = MappingLookupUtils.fromTypes(types, List.of()); final Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -601,7 +601,7 @@ private void runTestOnIndices(int numberIndices, CheckedConsumer context = new ArrayList<>(numberIndices); try { for (int i = 0; i < numberIndices; i++) { - context.add(testIndex(mapperService, client)); + context.add(testIndex(mappingLookup, client)); } body.accept(context); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index 774c0d9694448..f297aa4ad359b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -29,15 +29,15 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MappingLookupUtils; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.XPackLicenseState.Feature; -import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.test.AbstractBuilderTestCase; @@ -49,7 +49,6 @@ import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.core.security.user.User; -import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Executors; @@ -59,7 +58,6 @@ import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -68,15 +66,8 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT public void testDLS() throws Exception { ShardId shardId = new ShardId("_index", "_na_", 0); - MapperService mapperService = mock(MapperService.class); - ScriptService scriptService = mock(ScriptService.class); - when(mapperService.documentMapper()).thenReturn(null); - when(mapperService.simpleMatchToFullName(anyString())) - .then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0])); - when(mapperService.fieldType(Mockito.anyString())).then(invocation -> { - final String fieldName = (String) invocation.getArguments()[0]; - return new KeywordFieldMapper.KeywordFieldType(fieldName); - }); + MappingLookup mappingLookup = MappingLookupUtils.fromTypes(new KeywordFieldType("field")); + ScriptService scriptService = mock(ScriptService.class); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); @@ -91,7 +82,7 @@ public void testDLS() throws Exception { when(client.settings()).thenReturn(Settings.EMPTY); final long nowInMillis = randomNonNegativeLong(); QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), 0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE, - null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), + null, null, null, mappingLookup, null, null, xContentRegistry(), writableRegistry(), client, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()); QueryShardContext queryShardContext = spy(realQueryShardContext); DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor()); @@ -182,15 +173,12 @@ protected IndicesAccessControl getIndicesAccessControl() { public void testDLSWithLimitedPermissions() throws Exception { ShardId shardId = new ShardId("_index", "_na_", 0); - MapperService mapperService = mock(MapperService.class); - ScriptService scriptService = mock(ScriptService.class); - when(mapperService.documentMapper()).thenReturn(null); - when(mapperService.simpleMatchToFullName(anyString())) - .then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0])); - when(mapperService.fieldType(Mockito.anyString())).then(invocation -> { - final String fieldName = (String) invocation.getArguments()[0]; - return new KeywordFieldMapper.KeywordFieldType(fieldName); - }); + MappingLookup mappingLookup = MappingLookupUtils.fromTypes( + new KeywordFieldType("field"), + new KeywordFieldType("f1"), + new KeywordFieldType("f2") + ); + ScriptService scriptService = mock(ScriptService.class); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); @@ -223,7 +211,7 @@ public void testDLSWithLimitedPermissions() throws Exception { when(client.settings()).thenReturn(Settings.EMPTY); final long nowInMillis = randomNonNegativeLong(); QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), 0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE, - null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), + null, null, null, mappingLookup, null, null, xContentRegistry(), writableRegistry(), client, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()); QueryShardContext queryShardContext = spy(realQueryShardContext); DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor()); diff --git a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/index/mapper/FlattenedFieldLookupTests.java b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/index/mapper/FlattenedFieldLookupTests.java index e31ea7868f791..bd7fa9fcb9968 100644 --- a/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/index/mapper/FlattenedFieldLookupTests.java +++ b/x-pack/plugin/mapper-flattened/src/test/java/org/elasticsearch/index/mapper/FlattenedFieldLookupTests.java @@ -39,7 +39,7 @@ public void testFieldTypeLookup() { String fieldName = "object1.object2.field"; FlattenedFieldMapper mapper = createFlattenedMapper(fieldName); - FieldTypeLookup lookup = new FieldTypeLookup(singletonList(mapper), emptyList(), emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", singletonList(mapper), emptyList(), emptyList()); assertEquals(mapper.fieldType(), lookup.get(fieldName)); String objectKey = "key1.key2"; @@ -60,7 +60,7 @@ public void testFieldTypeLookupWithAlias() { String aliasName = "alias"; FieldAliasMapper alias = new FieldAliasMapper(aliasName, aliasName, fieldName); - FieldTypeLookup lookup = new FieldTypeLookup(singletonList(mapper), singletonList(alias), emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", singletonList(mapper), singletonList(alias), emptyList()); assertEquals(mapper.fieldType(), lookup.get(aliasName)); String objectKey = "key1.key2"; @@ -83,11 +83,11 @@ public void testFieldTypeLookupWithMultipleFields() { FlattenedFieldMapper mapper2 = createFlattenedMapper(field2); FlattenedFieldMapper mapper3 = createFlattenedMapper(field3); - FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(mapper1, mapper2), emptyList(), emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", Arrays.asList(mapper1, mapper2), emptyList(), emptyList()); assertNotNull(lookup.get(field1 + ".some.key")); assertNotNull(lookup.get(field2 + ".some.key")); - lookup = new FieldTypeLookup(Arrays.asList(mapper1, mapper2, mapper3), emptyList(), emptyList()); + lookup = new FieldTypeLookup("_doc", Arrays.asList(mapper1, mapper2, mapper3), emptyList(), emptyList()); assertNotNull(lookup.get(field1 + ".some.key")); assertNotNull(lookup.get(field2 + ".some.key")); assertNotNull(lookup.get(field3 + ".some.key")); @@ -124,7 +124,7 @@ public void testFieldLookupIterator() { MockFieldMapper mapper = new MockFieldMapper("foo"); FlattenedFieldMapper flattenedMapper = createFlattenedMapper("object1.object2.field"); - FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(mapper, flattenedMapper), emptyList(), emptyList()); + FieldTypeLookup lookup = new FieldTypeLookup("_doc", Arrays.asList(mapper, flattenedMapper), emptyList(), emptyList()); Set fieldNames = new HashSet<>(); lookup.filter(ft -> true).forEach(ft -> fieldNames.add(ft.name())); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index e713defbc3756..4f0e64d2f9871 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -89,7 +89,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { private void setup() { settings = createIndexSettings(); queryShardContext = new QueryShardContext(0, 0, settings, - BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, + BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, null, null, null, null, () -> 0L, null, null, () -> true, null, emptyMap()); } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/runtime_fields/10_keyword.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/runtime_fields/10_keyword.yml index 19567a3928c68..bd5c45823aaae 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/runtime_fields/10_keyword.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/runtime_fields/10_keyword.yml @@ -128,6 +128,7 @@ setup: search: index: sensor body: + size: 0 aggs: dow: terms: @@ -138,6 +139,31 @@ setup: - match: {aggregations.dow.buckets.1.key: Monday} - match: {aggregations.dow.buckets.1.doc_count: 1} + # Update the mapping and make sure the cache doesn't still have the old results + - do: + indices.put_mapping: + index: sensor + body: + runtime: + day_of_week: + type: keyword + script: | + emit(doc['timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.SHORT, Locale.ROOT)); + - do: + search: + index: sensor + body: + size: 0 + aggs: + dow: + terms: + field: day_of_week + - match: {hits.total.value: 6} + - match: {aggregations.dow.buckets.0.key: Fri} + - match: {aggregations.dow.buckets.0.doc_count: 1} + - match: {aggregations.dow.buckets.1.key: Mon} + - match: {aggregations.dow.buckets.1.doc_count: 1} + --- "term query": - do: diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index b34b0689eddeb..e72e59a946646 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -898,7 +898,7 @@ protected final QueryShardContext createMockShardContext() { return builder.build(new IndexFieldDataCache.None(), null); }; return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, indexFieldDataLookup, - null, null, null, xContentRegistry(), null, null, null, + null, null, null, null, xContentRegistry(), null, null, null, () -> randomNonNegativeLong(), null, null, () -> true, null, emptyMap()) { @Override