Skip to content

Commit 014e1b1

Browse files
authored
Improve resiliency to auto-formatting in server (#48940)
Backport of #48450. Make a number of changes so that code in the `server` directory is more resilient to automatic formatting. This covers: * Reformatting multiline JSON to embed whitespace in the strings * Move some comments around to they aren't auto-formatted to a strange place. This also required moving some `&&` and `||` operators from the end-of-line to start-of-line`. * Add helper method `reformatJson()`, to strip whitespace from a JSON document using XContent methods. This is sometimes necessary where a test is comparing some machine-generated JSON with an expected value. Also, `HyperLogLogPlusPlus.java` is now excluded from formatting because it contains large data tables that don't reformat well with the current settings, and changing the settings would be worse for the rest of the codebase.
1 parent dd92830 commit 014e1b1

File tree

22 files changed

+148
-90
lines changed

22 files changed

+148
-90
lines changed

build.gradle

-1
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,6 @@ subprojects {
114114

115115
spotless {
116116
java {
117-
118117
removeUnusedImports()
119118
eclipse().configFile rootProject.file('.eclipseformat.xml')
120119
trimTrailingWhitespace()

buildSrc/src/main/resources/checkstyle_suppressions.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
configuration of classes that aren't in packages. -->
2222
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]Dummy.java" checks="PackageDeclaration" />
2323

24-
<!-- Intentionally has long example curl commands to coinncide with sibling Painless tests. -->
24+
<!-- Intentionally has long example curl commands to coincide with sibling Painless tests. -->
2525
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />
2626

2727
<!--

server/build.gradle

+16
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,22 @@ dependencies {
140140
compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
141141
compileTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
142142

143+
// Until this project is always being formatted with spotless, we need to
144+
// guard against `spotless()` not existing.
145+
try {
146+
spotless {
147+
java {
148+
// Contains large data tables that do not format well.
149+
targetExclude 'src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java'
150+
}
151+
}
152+
}
153+
catch (Exception e) {
154+
if (e.getMessage().contains("Could not find method spotless") == false) {
155+
throw e;
156+
}
157+
}
158+
143159
forbiddenPatterns {
144160
exclude '**/*.json'
145161
exclude '**/*.jmx'

server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java

+15-9
Original file line numberDiff line numberDiff line change
@@ -145,25 +145,31 @@ public enum QueryType {
145145
INTERSECTS {
146146
@Override
147147
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
148-
// part of the other range must touch this range
149-
// this: |---------------|
150-
// other: |------|
148+
/*
149+
* part of the other range must touch this range
150+
* this: |---------------|
151+
* other: |------|
152+
*/
151153
return from.compareTo(otherTo) <= 0 && to.compareTo(otherFrom) >= 0;
152154
}
153155
}, WITHIN {
154156
@Override
155157
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
156-
// other range must entirely lie within this range
157-
// this: |---------------|
158-
// other: |------|
158+
/*
159+
* other range must entirely lie within this range
160+
* this: |---------------|
161+
* other: |------|
162+
*/
159163
return from.compareTo(otherFrom) <= 0 && to.compareTo(otherTo) >= 0;
160164
}
161165
}, CONTAINS {
162166
@Override
163167
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
164-
// this and other range must overlap
165-
// this: |------|
166-
// other: |---------------|
168+
/*
169+
* this and other range must overlap
170+
* this: |------|
171+
* other: |---------------|
172+
*/
167173
return from.compareTo(otherFrom) >= 0 && to.compareTo(otherTo) <= 0;
168174
}
169175
}, CROSSES {

server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java

+4-2
Original file line numberDiff line numberDiff line change
@@ -110,10 +110,12 @@ private void innerRun() throws IOException {
110110
} else {
111111
ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs.scoreDocs;
112112
final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(numShards, scoreDocs);
113-
if (scoreDocs.length == 0) { // no docs to fetch -- sidestep everything and return
113+
// no docs to fetch -- sidestep everything and return
114+
if (scoreDocs.length == 0) {
115+
// we have to release contexts here to free up resources
114116
phaseResults.stream()
115117
.map(SearchPhaseResult::queryResult)
116-
.forEach(this::releaseIrrelevantSearchContext); // we have to release contexts here to free up resources
118+
.forEach(this::releaseIrrelevantSearchContext);
117119
finishPhase.run();
118120
} else {
119121
final ScoreDoc[] lastEmittedDocPerShard = isScrollSearch ?

server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java

+4-3
Original file line numberDiff line numberDiff line change
@@ -117,10 +117,11 @@ public RoutingNodes(ClusterState clusterState, boolean readOnly) {
117117
assignedShardsAdd(shard);
118118
if (shard.relocating()) {
119119
relocatingShards++;
120-
entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(),
121-
k -> new LinkedHashMap<>()); // LinkedHashMap to preserve order
122-
// add the counterpart shard with relocatingNodeId reflecting the source from which
120+
// LinkedHashMap to preserve order.
121+
// Add the counterpart shard with relocatingNodeId reflecting the source from which
123122
// it's relocating from.
123+
entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(),
124+
k -> new LinkedHashMap<>());
124125
ShardRouting targetShardRouting = shard.getTargetRelocatingShard();
125126
addInitialRecovery(targetShardRouting, indexShard.primary);
126127
previousValue = entries.put(targetShardRouting.shardId(), targetShardRouting);

server/src/main/java/org/elasticsearch/common/lucene/Lucene.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,8 @@ public static void cleanLuceneIndex(Directory directory) throws IOException {
256256
.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD)
257257
.setMergePolicy(NoMergePolicy.INSTANCE) // no merges
258258
.setCommitOnClose(false) // no commits
259-
.setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append...
259+
.setOpenMode(IndexWriterConfig.OpenMode.CREATE) // force creation - don't append...
260+
))
260261
{
261262
// do nothing and close this will kick of IndexFileDeleter which will remove all pending files
262263
}

server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java

+26-26
Original file line numberDiff line numberDiff line change
@@ -514,15 +514,15 @@ public boolean equals(Object o) {
514514
if (this == o) return true;
515515
if (!(o instanceof Item)) return false;
516516
Item other = (Item) o;
517-
return Objects.equals(index, other.index) &&
518-
Objects.equals(type, other.type) &&
519-
Objects.equals(id, other.id) &&
520-
Objects.equals(doc, other.doc) &&
521-
Arrays.equals(fields, other.fields) && // otherwise we are comparing pointers
522-
Objects.equals(perFieldAnalyzer, other.perFieldAnalyzer) &&
523-
Objects.equals(routing, other.routing) &&
524-
Objects.equals(version, other.version) &&
525-
Objects.equals(versionType, other.versionType);
517+
return Objects.equals(index, other.index)
518+
&& Objects.equals(type, other.type)
519+
&& Objects.equals(id, other.id)
520+
&& Objects.equals(doc, other.doc)
521+
&& Arrays.equals(fields, other.fields) // otherwise we are comparing pointers
522+
&& Objects.equals(perFieldAnalyzer, other.perFieldAnalyzer)
523+
&& Objects.equals(routing, other.routing)
524+
&& Objects.equals(version, other.version)
525+
&& Objects.equals(versionType, other.versionType);
526526
}
527527
}
528528

@@ -1208,23 +1208,23 @@ protected int doHashCode() {
12081208

12091209
@Override
12101210
protected boolean doEquals(MoreLikeThisQueryBuilder other) {
1211-
return Arrays.equals(fields, other.fields) &&
1212-
Arrays.equals(likeTexts, other.likeTexts) &&
1213-
Arrays.equals(unlikeTexts, other.unlikeTexts) &&
1214-
Arrays.equals(likeItems, other.likeItems) &&
1215-
Arrays.equals(unlikeItems, other.unlikeItems) &&
1216-
Objects.equals(maxQueryTerms, other.maxQueryTerms) &&
1217-
Objects.equals(minTermFreq, other.minTermFreq) &&
1218-
Objects.equals(minDocFreq, other.minDocFreq) &&
1219-
Objects.equals(maxDocFreq, other.maxDocFreq) &&
1220-
Objects.equals(minWordLength, other.minWordLength) &&
1221-
Objects.equals(maxWordLength, other.maxWordLength) &&
1222-
Arrays.equals(stopWords, other.stopWords) && // otherwise we are comparing pointers
1223-
Objects.equals(analyzer, other.analyzer) &&
1224-
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
1225-
Objects.equals(boostTerms, other.boostTerms) &&
1226-
Objects.equals(include, other.include) &&
1227-
Objects.equals(failOnUnsupportedField, other.failOnUnsupportedField);
1211+
return Arrays.equals(fields, other.fields)
1212+
&& Arrays.equals(likeTexts, other.likeTexts)
1213+
&& Arrays.equals(unlikeTexts, other.unlikeTexts)
1214+
&& Arrays.equals(likeItems, other.likeItems)
1215+
&& Arrays.equals(unlikeItems, other.unlikeItems)
1216+
&& Objects.equals(maxQueryTerms, other.maxQueryTerms)
1217+
&& Objects.equals(minTermFreq, other.minTermFreq)
1218+
&& Objects.equals(minDocFreq, other.minDocFreq)
1219+
&& Objects.equals(maxDocFreq, other.maxDocFreq)
1220+
&& Objects.equals(minWordLength, other.minWordLength)
1221+
&& Objects.equals(maxWordLength, other.maxWordLength)
1222+
&& Arrays.equals(stopWords, other.stopWords) // otherwise we are comparing pointers
1223+
&& Objects.equals(analyzer, other.analyzer)
1224+
&& Objects.equals(minimumShouldMatch, other.minimumShouldMatch)
1225+
&& Objects.equals(boostTerms, other.boostTerms)
1226+
&& Objects.equals(include, other.include)
1227+
&& Objects.equals(failOnUnsupportedField, other.failOnUnsupportedField);
12281228
}
12291229

12301230
@Override

server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -243,8 +243,8 @@ public synchronized void applyClusterState(final ClusterChangedEvent event) {
243243
// TODO: feels hacky, a block disables state persistence, and then we clean the allocated shards, maybe another flag in blocks?
244244
if (state.blocks().disableStatePersistence()) {
245245
for (AllocatedIndex<? extends Shard> indexService : indicesService) {
246-
indicesService.removeIndex(indexService.index(), NO_LONGER_ASSIGNED,
247-
"cleaning index (disabled block persistence)"); // also cleans shards
246+
// also cleans shards
247+
indicesService.removeIndex(indexService.index(), NO_LONGER_ASSIGNED, "cleaning index (disabled block persistence)");
248248
}
249249
return;
250250
}

server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java

+27-13
Original file line numberDiff line numberDiff line change
@@ -748,8 +748,22 @@ public static long memoryUsage(int precision) {
748748
-527.016999999993, -664.681000000099, -680.306000000099, -704.050000000047, -850.486000000034, -757.43200000003,
749749
-713.308999999892, } };
750750

751-
private static final long[] THRESHOLDS = new long[] { 10, 20, 40, 80, 220, 400, 900, 1800, 3100, 6500, 11500, 20000, 50000, 120000,
752-
350000 };
751+
private static final long[] THRESHOLDS = new long[] {
752+
10,
753+
20,
754+
40,
755+
80,
756+
220,
757+
400,
758+
900,
759+
1800,
760+
3100,
761+
6500,
762+
11500,
763+
20000,
764+
50000,
765+
120000,
766+
350000 };
753767

754768
private final BigArrays bigArrays;
755769
private final OpenBitSet algorithm;
@@ -773,15 +787,15 @@ public HyperLogLogPlusPlus(int precision, BigArrays bigArrays, long initialBucke
773787
hashSet = new Hashset(initialBucketCount);
774788
final double alpha;
775789
switch (p) {
776-
case 4:
777-
alpha = 0.673;
778-
break;
779-
case 5:
780-
alpha = 0.697;
781-
break;
782-
default:
783-
alpha = 0.7213 / (1 + 1.079 / m);
784-
break;
790+
case 4:
791+
alpha = 0.673;
792+
break;
793+
case 5:
794+
alpha = 0.697;
795+
break;
796+
default:
797+
alpha = 0.7213 / (1 + 1.079 / m);
798+
break;
785799
}
786800
alphaMM = alpha * m * m;
787801
}
@@ -1050,8 +1064,8 @@ public int hashCode(long bucket) {
10501064

10511065
public boolean equals(long bucket, HyperLogLogPlusPlus other) {
10521066
return Objects.equals(p, other.p)
1053-
&& Objects.equals(algorithm.get(bucket), other.algorithm.get(bucket))
1054-
&& Objects.equals(getComparableData(bucket), other.getComparableData(bucket));
1067+
&& Objects.equals(algorithm.get(bucket), other.algorithm.get(bucket))
1068+
&& Objects.equals(getComparableData(bucket), other.getComparableData(bucket));
10551069
}
10561070

10571071
/**

server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,8 @@ public WordScorer(IndexReader reader, Terms terms, String field, double realWord
6262
// division by zero, by scoreUnigram.
6363
final long nTerms = terms.size();
6464
this.numTerms = nTerms == -1 ? reader.maxDoc() : nTerms;
65-
this.termsEnum = new FreqTermsEnum(reader, field, !useTotalTermFreq, useTotalTermFreq, null,
66-
BigArrays.NON_RECYCLING_INSTANCE); // non recycling for now
65+
// non recycling for now
66+
this.termsEnum = new FreqTermsEnum(reader, field, !useTotalTermFreq, useTotalTermFreq, null, BigArrays.NON_RECYCLING_INSTANCE);
6767
this.reader = reader;
6868
this.realWordLikelihood = realWordLikelihood;
6969
this.separator = separator;

server/src/main/java/org/elasticsearch/transport/TcpTransport.java

+8-8
Original file line numberDiff line numberDiff line change
@@ -782,15 +782,15 @@ private static int readHeaderBuffer(BytesReference headerBuffer) throws IOExcept
782782
}
783783

784784
private static boolean appearsToBeHTTPRequest(BytesReference headerBuffer) {
785-
return bufferStartsWith(headerBuffer, "GET") ||
786-
bufferStartsWith(headerBuffer, "POST") ||
787-
bufferStartsWith(headerBuffer, "PUT") ||
788-
bufferStartsWith(headerBuffer, "HEAD") ||
789-
bufferStartsWith(headerBuffer, "DELETE") ||
785+
return bufferStartsWith(headerBuffer, "GET")
786+
|| bufferStartsWith(headerBuffer, "POST")
787+
|| bufferStartsWith(headerBuffer, "PUT")
788+
|| bufferStartsWith(headerBuffer, "HEAD")
789+
|| bufferStartsWith(headerBuffer, "DELETE")
790790
// Actually 'OPTIONS'. But we are only guaranteed to have read six bytes at this point.
791-
bufferStartsWith(headerBuffer, "OPTION") ||
792-
bufferStartsWith(headerBuffer, "PATCH") ||
793-
bufferStartsWith(headerBuffer, "TRACE");
791+
|| bufferStartsWith(headerBuffer, "OPTION")
792+
|| bufferStartsWith(headerBuffer, "PATCH")
793+
|| bufferStartsWith(headerBuffer, "TRACE");
794794
}
795795

796796
private static boolean appearsToBeHTTPResponse(BytesReference headerBuffer) {

0 commit comments

Comments
 (0)