|
22 | 22 | import com.carrotsearch.hppc.ObjectHashSet;
|
23 | 23 | import com.carrotsearch.hppc.cursors.ObjectCursor;
|
24 | 24 | import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
25 |
| - |
26 | 25 | import org.apache.logging.log4j.Logger;
|
27 | 26 | import org.apache.lucene.util.CollectionUtil;
|
28 | 27 | import org.elasticsearch.cluster.Diff;
|
|
33 | 32 | import org.elasticsearch.cluster.block.ClusterBlock;
|
34 | 33 | import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
35 | 34 | import org.elasticsearch.common.Nullable;
|
| 35 | +import org.elasticsearch.common.Strings; |
36 | 36 | import org.elasticsearch.common.UUIDs;
|
37 | 37 | import org.elasticsearch.common.collect.HppcMaps;
|
38 | 38 | import org.elasticsearch.common.collect.ImmutableOpenMap;
|
|
62 | 62 | import java.util.Comparator;
|
63 | 63 | import java.util.EnumSet;
|
64 | 64 | import java.util.HashMap;
|
| 65 | +import java.util.HashSet; |
65 | 66 | import java.util.Iterator;
|
66 | 67 | import java.util.List;
|
67 | 68 | import java.util.Map;
|
| 69 | +import java.util.Set; |
68 | 70 | import java.util.SortedMap;
|
69 | 71 | import java.util.TreeMap;
|
70 | 72 |
|
@@ -914,55 +916,70 @@ public MetaData build() {
|
914 | 916 | // while these datastructures aren't even used.
|
915 | 917 | // 2) The aliasAndIndexLookup can be updated instead of rebuilding it all the time.
|
916 | 918 |
|
917 |
| - // build all concrete indices arrays: |
918 |
| - // TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices. |
919 |
| - // When doing an operation across all indices, most of the time is spent on actually going to all shards and |
920 |
| - // do the required operations, the bottleneck isn't resolving expressions into concrete indices. |
921 |
| - List<String> allIndicesLst = new ArrayList<>(); |
| 919 | + final Set<String> allIndices = new HashSet<>(indices.size()); |
| 920 | + final List<String> allOpenIndices = new ArrayList<>(); |
| 921 | + final List<String> allClosedIndices = new ArrayList<>(); |
| 922 | + final Set<String> duplicateAliasesIndices = new HashSet<>(); |
922 | 923 | for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
|
923 |
| - allIndicesLst.add(cursor.value.getIndex().getName()); |
924 |
| - } |
925 |
| - String[] allIndices = allIndicesLst.toArray(new String[allIndicesLst.size()]); |
926 |
| - |
927 |
| - List<String> allOpenIndicesLst = new ArrayList<>(); |
928 |
| - List<String> allClosedIndicesLst = new ArrayList<>(); |
929 |
| - for (ObjectCursor<IndexMetaData> cursor : indices.values()) { |
930 |
| - IndexMetaData indexMetaData = cursor.value; |
| 924 | + final IndexMetaData indexMetaData = cursor.value; |
| 925 | + final String name = indexMetaData.getIndex().getName(); |
| 926 | + boolean added = allIndices.add(name); |
| 927 | + assert added : "double index named [" + name + "]"; |
931 | 928 | if (indexMetaData.getState() == IndexMetaData.State.OPEN) {
|
932 |
| - allOpenIndicesLst.add(indexMetaData.getIndex().getName()); |
| 929 | + allOpenIndices.add(indexMetaData.getIndex().getName()); |
933 | 930 | } else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
|
934 |
| - allClosedIndicesLst.add(indexMetaData.getIndex().getName()); |
| 931 | + allClosedIndices.add(indexMetaData.getIndex().getName()); |
| 932 | + } |
| 933 | + indexMetaData.getAliases().keysIt().forEachRemaining(duplicateAliasesIndices::add); |
| 934 | + } |
| 935 | + duplicateAliasesIndices.retainAll(allIndices); |
| 936 | + if (duplicateAliasesIndices.isEmpty() == false) { |
| 937 | + // iterate again and constructs a helpful message |
| 938 | + ArrayList<String> duplicates = new ArrayList<>(); |
| 939 | + for (ObjectCursor<IndexMetaData> cursor : indices.values()) { |
| 940 | + for (String alias: duplicateAliasesIndices) { |
| 941 | + if (cursor.value.getAliases().containsKey(alias)) { |
| 942 | + duplicates.add(alias + " (alias of " + cursor.value.getIndex() + ")"); |
| 943 | + } |
| 944 | + } |
935 | 945 | }
|
| 946 | + assert duplicates.size() > 0; |
| 947 | + throw new IllegalStateException("index and alias names need to be unique, but the following duplicates were found [" |
| 948 | + + Strings.collectionToCommaDelimitedString(duplicates)+ "]"); |
| 949 | + |
936 | 950 | }
|
937 |
| - String[] allOpenIndices = allOpenIndicesLst.toArray(new String[allOpenIndicesLst.size()]); |
938 |
| - String[] allClosedIndices = allClosedIndicesLst.toArray(new String[allClosedIndicesLst.size()]); |
939 | 951 |
|
940 | 952 | // build all indices map
|
941 | 953 | SortedMap<String, AliasOrIndex> aliasAndIndexLookup = new TreeMap<>();
|
942 | 954 | for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
|
943 | 955 | IndexMetaData indexMetaData = cursor.value;
|
944 |
| - aliasAndIndexLookup.put(indexMetaData.getIndex().getName(), new AliasOrIndex.Index(indexMetaData)); |
| 956 | + AliasOrIndex existing = aliasAndIndexLookup.put(indexMetaData.getIndex().getName(), new AliasOrIndex.Index(indexMetaData)); |
| 957 | + assert existing == null : "duplicate for " + indexMetaData.getIndex(); |
945 | 958 |
|
946 | 959 | for (ObjectObjectCursor<String, AliasMetaData> aliasCursor : indexMetaData.getAliases()) {
|
947 | 960 | AliasMetaData aliasMetaData = aliasCursor.value;
|
948 |
| - AliasOrIndex aliasOrIndex = aliasAndIndexLookup.get(aliasMetaData.getAlias()); |
949 |
| - if (aliasOrIndex == null) { |
950 |
| - aliasOrIndex = new AliasOrIndex.Alias(aliasMetaData, indexMetaData); |
951 |
| - aliasAndIndexLookup.put(aliasMetaData.getAlias(), aliasOrIndex); |
952 |
| - } else if (aliasOrIndex instanceof AliasOrIndex.Alias) { |
953 |
| - AliasOrIndex.Alias alias = (AliasOrIndex.Alias) aliasOrIndex; |
954 |
| - alias.addIndex(indexMetaData); |
955 |
| - } else if (aliasOrIndex instanceof AliasOrIndex.Index) { |
956 |
| - AliasOrIndex.Index index = (AliasOrIndex.Index) aliasOrIndex; |
957 |
| - throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index " + index.getIndex().getIndex() + " have the same name"); |
958 |
| - } else { |
959 |
| - throw new IllegalStateException("unexpected alias [" + aliasMetaData.getAlias() + "][" + aliasOrIndex + "]"); |
960 |
| - } |
| 961 | + aliasAndIndexLookup.compute(aliasMetaData.getAlias(), (aliasName, alias) -> { |
| 962 | + if (alias == null) { |
| 963 | + return new AliasOrIndex.Alias(aliasMetaData, indexMetaData); |
| 964 | + } else { |
| 965 | + assert alias instanceof AliasOrIndex.Alias : alias.getClass().getName(); |
| 966 | + ((AliasOrIndex.Alias) alias).addIndex(indexMetaData); |
| 967 | + return alias; |
| 968 | + } |
| 969 | + }); |
961 | 970 | }
|
962 | 971 | }
|
963 | 972 | aliasAndIndexLookup = Collections.unmodifiableSortedMap(aliasAndIndexLookup);
|
| 973 | + // build all concrete indices arrays: |
| 974 | + // TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices. |
| 975 | + // When doing an operation across all indices, most of the time is spent on actually going to all shards and |
| 976 | + // do the required operations, the bottleneck isn't resolving expressions into concrete indices. |
| 977 | + String[] allIndicesArray = allIndices.toArray(new String[allIndices.size()]); |
| 978 | + String[] allOpenIndicesArray = allOpenIndices.toArray(new String[allOpenIndices.size()]); |
| 979 | + String[] allClosedIndicesArray = allClosedIndices.toArray(new String[allClosedIndices.size()]); |
| 980 | + |
964 | 981 | return new MetaData(clusterUUID, version, transientSettings, persistentSettings, indices.build(), templates.build(),
|
965 |
| - customs.build(), allIndices, allOpenIndices, allClosedIndices, aliasAndIndexLookup); |
| 982 | + customs.build(), allIndicesArray, allOpenIndicesArray, allClosedIndicesArray, aliasAndIndexLookup); |
966 | 983 | }
|
967 | 984 |
|
968 | 985 | public static String toXContent(MetaData metaData) throws IOException {
|
|
0 commit comments