Skip to content

Speed up some more logic around resolving shards+indices in searches #111129

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -215,10 +215,7 @@ private Map<String, OriginalIndices> buildPerIndexOriginalIndices(
indicesAndAliases
);
String[] finalIndices = Strings.EMPTY_ARRAY;
if (aliases == null
|| aliases.length == 0
|| indicesAndAliases.contains(index)
|| hasDataStreamRef(clusterState, indicesAndAliases, index)) {
if (aliases == null || indicesAndAliases.contains(index) || hasDataStreamRef(clusterState, indicesAndAliases, index)) {
finalIndices = new String[] { index };
}
if (aliases != null) {
Expand All @@ -238,10 +235,15 @@ private static boolean hasDataStreamRef(ClusterState clusterState, Set<String> i
}

Map<String, AliasFilter> buildIndexAliasFilters(ClusterState clusterState, Set<String> indicesAndAliases, Index[] concreteIndices) {
var blocks = clusterState.blocks();
boolean hasBlocks = blocks.global().isEmpty() == false || blocks.indices().isEmpty() == false;
final Map<String, AliasFilter> aliasFilterMap = new HashMap<>();
for (Index index : concreteIndices) {
clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName());
AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, index.getName(), indicesAndAliases);
final String indexName = index.getName();
if (hasBlocks) {
blocks.indexBlockedRaiseException(ClusterBlockLevel.READ, indexName);
}
AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, indexName, indicesAndAliases);
assert aliasFilter != null;
aliasFilterMap.put(index.getUUID(), aliasFilter);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -822,35 +822,40 @@ public String[] indexAliases(
IndexAbstraction ia = state.metadata().getIndicesLookup().get(index);
DataStream dataStream = ia.getParentDataStream();
if (dataStream != null) {
if (skipIdentity == false && resolvedExpressions.contains(dataStream.getName())) {
final String dsName = dataStream.getName();
if (skipIdentity == false && resolvedExpressions.contains(dsName)) {
// skip the filters when the request targets the data stream name
return null;
}
Map<String, DataStreamAlias> dataStreamAliases = state.metadata().dataStreamAliases();
List<DataStreamAlias> aliasesForDataStream;
List<String> requiredAliases = null;
if (iterateIndexAliases(dataStreamAliases.size(), resolvedExpressions.size())) {
aliasesForDataStream = dataStreamAliases.values()
.stream()
.filter(dataStreamAlias -> resolvedExpressions.contains(dataStreamAlias.getName()))
.filter(dataStreamAlias -> dataStreamAlias.getDataStreams().contains(dataStream.getName()))
.toList();
for (DataStreamAlias dataStreamAlias : dataStreamAliases.values()) {
final String dsAliasName = dataStreamAlias.getName();
if (resolvedExpressions.contains(dsAliasName) && dataStreamAlias.getDataStreams().contains(dsName)) {
if (requiredDataStreamAlias.test(dataStreamAlias) == false) {
// we have a non-required alias for this data stream so no need to check further
return null;
}
if (requiredAliases == null) {
requiredAliases = new ArrayList<>();
}
requiredAliases.add(dsAliasName);
}
}
} else {
aliasesForDataStream = resolvedExpressions.stream()
.map(dataStreamAliases::get)
.filter(dataStreamAlias -> dataStreamAlias != null && dataStreamAlias.getDataStreams().contains(dataStream.getName()))
.toList();
}

List<String> requiredAliases = null;
for (DataStreamAlias dataStreamAlias : aliasesForDataStream) {
if (requiredDataStreamAlias.test(dataStreamAlias)) {
if (requiredAliases == null) {
requiredAliases = new ArrayList<>(aliasesForDataStream.size());
for (String resolvedExpression : resolvedExpressions) {
DataStreamAlias dataStreamAlias = dataStreamAliases.get(resolvedExpression);
if (dataStreamAlias != null && dataStreamAlias.getDataStreams().contains(dsName)) {
if (requiredDataStreamAlias.test(dataStreamAlias) == false) {
// we have a non-required alias for this data stream so no need to check further
return null;
}
if (requiredAliases == null) {
requiredAliases = new ArrayList<>();
}
requiredAliases.add(dataStreamAlias.getName());
}
requiredAliases.add(dataStreamAlias.getName());
} else {
// we have a non-required alias for this data stream so no need to check further
return null;
}
}
if (requiredAliases == null) {
Expand Down Expand Up @@ -1029,7 +1034,7 @@ public static boolean isAllIndices(Collection<String> aliasesOrIndices) {
* @return true if the provided array explicitly maps to all indices, false otherwise
*/
static boolean isExplicitAllPattern(Collection<String> aliasesOrIndices) {
return aliasesOrIndices != null && aliasesOrIndices.size() == 1 && Metadata.ALL.equals(aliasesOrIndices.iterator().next());
return aliasesOrIndices != null && aliasesOrIndices.size() == 1 && aliasesOrIndices.contains(Metadata.ALL);
}

public SystemIndexAccessLevel getSystemIndexAccessLevel() {
Expand Down Expand Up @@ -1798,10 +1803,11 @@ public String get() {
*/
public ExpressionList(Context context, List<String> expressionStrings) {
List<Expression> expressionsList = new ArrayList<>(expressionStrings.size());
final boolean expandWildCardExpressions = context.getOptions().expandWildcardExpressions();
boolean wildcardSeen = false;
for (String expressionString : expressionStrings) {
boolean isExclusion = expressionString.startsWith("-") && wildcardSeen;
if (context.getOptions().expandWildcardExpressions() && isWildcard(expressionString)) {
boolean isExclusion = wildcardSeen && expressionString.startsWith("-");
if (expandWildCardExpressions && isWildcard(expressionString)) {
wildcardSeen = true;
expressionsList.add(new Expression(expressionString, true, isExclusion));
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.elasticsearch.node.ResponseCollectorService;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
Expand Down Expand Up @@ -247,22 +246,18 @@ private ShardIterator preferenceActiveShardIterator(
}
if (preference.charAt(0) == '_') {
preferenceType = Preference.parse(preference);
switch (preferenceType) {
case PREFER_NODES:
final Set<String> nodesIds = Arrays.stream(
preference.substring(Preference.PREFER_NODES.type().length() + 1).split(",")
).collect(Collectors.toSet());
return indexShard.preferNodeActiveInitializingShardsIt(nodesIds);
case LOCAL:
return indexShard.preferNodeActiveInitializingShardsIt(Collections.singleton(localNodeId));
case ONLY_LOCAL:
return indexShard.onlyNodeActiveInitializingShardsIt(localNodeId);
case ONLY_NODES:
String nodeAttributes = preference.substring(Preference.ONLY_NODES.type().length() + 1);
return indexShard.onlyNodeSelectorActiveInitializingShardsIt(nodeAttributes.split(","), nodes);
default:
throw new IllegalArgumentException("unknown preference [" + preferenceType + "]");
}
return switch (preferenceType) {
Copy link
Member Author

@original-brownbear original-brownbear Jul 20, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is just cosmetic for the most part but also does save a bit of byte code size.

case PREFER_NODES -> indexShard.preferNodeActiveInitializingShardsIt(
Sets.newHashSet(preference.substring(Preference.PREFER_NODES.type().length() + 1).split(","))
);
case LOCAL -> indexShard.preferNodeActiveInitializingShardsIt(Collections.singleton(localNodeId));
case ONLY_LOCAL -> indexShard.onlyNodeActiveInitializingShardsIt(localNodeId);
case ONLY_NODES -> indexShard.onlyNodeSelectorActiveInitializingShardsIt(
preference.substring(Preference.ONLY_NODES.type().length() + 1).split(","),
nodes
);
case SHARDS -> throw new IllegalArgumentException("unexpected preference [" + Preference.SHARDS + "]");
};
}
}
// if not, then use it as the index
Expand Down
29 changes: 14 additions & 15 deletions server/src/main/java/org/elasticsearch/indices/IndicesService.java
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.gateway.MetaStateService;
import org.elasticsearch.gateway.MetadataStateFormat;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.CloseUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexMode;
Expand Down Expand Up @@ -394,7 +395,7 @@ protected void doStop() {
ExecutorService indicesStopExecutor = Executors.newFixedThreadPool(5, daemonThreadFactory(settings, "indices_shutdown"));

// Copy indices because we modify it asynchronously in the body of the loop
final Set<Index> indices = this.indices.values().stream().map(s -> s.index()).collect(Collectors.toSet());
final Set<Index> indices = this.indices.values().stream().map(AbstractIndexComponent::index).collect(Collectors.toSet());
final CountDownLatch latch = new CountDownLatch(indices.size());
for (final Index index : indices) {
indicesStopExecutor.execute(
Expand Down Expand Up @@ -1703,15 +1704,6 @@ interface IndexDeletionAllowedPredicate {
private final IndexDeletionAllowedPredicate ALWAYS_TRUE = (Index index, IndexSettings indexSettings) -> true;

public AliasFilter buildAliasFilter(ClusterState state, String index, Set<String> resolvedExpressions) {
/* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch
* of dependencies we pass in a function that can perform the parsing. */
CheckedFunction<BytesReference, QueryBuilder, IOException> filterParser = bytes -> {
try (
XContentParser parser = XContentHelper.createParserNotCompressed(parserConfig, bytes, XContentHelper.xContentType(bytes))
) {
return parseTopLevelQuery(parser);
}
};
String[] aliases = indexNameExpressionResolver.filteringAliases(state, index, resolvedExpressions);
if (aliases == null) {
return AliasFilter.EMPTY;
Expand All @@ -1721,13 +1713,14 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set<String
IndexAbstraction ia = state.metadata().getIndicesLookup().get(index);
DataStream dataStream = ia.getParentDataStream();
if (dataStream != null) {
final var dsAliases = metadata.dataStreamAliases();
String dataStreamName = dataStream.getName();
List<QueryBuilder> filters = Arrays.stream(aliases)
.map(name -> metadata.dataStreamAliases().get(name))
.filter(dataStreamAlias -> dataStreamAlias.getFilter(dataStreamName) != null)
.map(dataStreamAlias -> {
.map(name -> dsAliases.get(name).getFilter(dataStreamName))
.filter(Objects::nonNull)
.map(filter -> {
try {
return filterParser.apply(dataStreamAlias.getFilter(dataStreamName).uncompressed());
return parseFilter(filter.uncompressed());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
Expand All @@ -1748,7 +1741,13 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set<String
}
} else {
IndexMetadata indexMetadata = metadata.index(index);
return AliasFilter.of(ShardSearchRequest.parseAliasFilter(filterParser, indexMetadata, aliases), aliases);
return AliasFilter.of(ShardSearchRequest.parseAliasFilter(this::parseFilter, indexMetadata, aliases), aliases);
}
}

private QueryBuilder parseFilter(BytesReference bytes) throws IOException {
try (XContentParser parser = XContentHelper.createParserNotCompressed(parserConfig, bytes, XContentHelper.xContentType(bytes))) {
return parseTopLevelQuery(parser);
}
}

Expand Down