Skip to content

Improve resiliency to auto-formatting in server #48940

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Nov 11, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,6 @@ subprojects {

spotless {
java {

removeUnusedImports()
eclipse().configFile rootProject.file('.eclipseformat.xml')
trimTrailingWhitespace()
Expand Down
2 changes: 1 addition & 1 deletion buildSrc/src/main/resources/checkstyle_suppressions.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
configuration of classes that aren't in packages. -->
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]Dummy.java" checks="PackageDeclaration" />

<!-- Intentionally has long example curl commands to coinncide with sibling Painless tests. -->
<!-- Intentionally has long example curl commands to coincide with sibling Painless tests. -->
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />

<!--
Expand Down
16 changes: 16 additions & 0 deletions server/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,22 @@ dependencies {
compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
compileTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"

// Until this project is always being formatted with spotless, we need to
// guard against `spotless()` not existing.
try {
spotless {
java {
// Contains large data tables that do not format well.
targetExclude 'src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java'
}
}
}
catch (Exception e) {
if (e.getMessage().contains("Could not find method spotless") == false) {
throw e;
}
}

forbiddenPatterns {
exclude '**/*.json'
exclude '**/*.jmx'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,25 +145,31 @@ public enum QueryType {
INTERSECTS {
@Override
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
// part of the other range must touch this range
// this: |---------------|
// other: |------|
/*
* part of the other range must touch this range
* this: |---------------|
* other: |------|
*/
return from.compareTo(otherTo) <= 0 && to.compareTo(otherFrom) >= 0;
}
}, WITHIN {
@Override
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
// other range must entirely lie within this range
// this: |---------------|
// other: |------|
/*
* other range must entirely lie within this range
* this: |---------------|
* other: |------|
*/
return from.compareTo(otherFrom) <= 0 && to.compareTo(otherTo) >= 0;
}
}, CONTAINS {
@Override
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
// this and other range must overlap
// this: |------|
// other: |---------------|
/*
* this and other range must overlap
* this: |------|
* other: |---------------|
*/
return from.compareTo(otherFrom) >= 0 && to.compareTo(otherTo) <= 0;
}
}, CROSSES {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,12 @@ private void innerRun() throws IOException {
} else {
ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs.scoreDocs;
final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(numShards, scoreDocs);
if (scoreDocs.length == 0) { // no docs to fetch -- sidestep everything and return
// no docs to fetch -- sidestep everything and return
if (scoreDocs.length == 0) {
// we have to release contexts here to free up resources
phaseResults.stream()
.map(SearchPhaseResult::queryResult)
.forEach(this::releaseIrrelevantSearchContext); // we have to release contexts here to free up resources
.forEach(this::releaseIrrelevantSearchContext);
finishPhase.run();
} else {
final ScoreDoc[] lastEmittedDocPerShard = isScrollSearch ?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,10 +117,11 @@ public RoutingNodes(ClusterState clusterState, boolean readOnly) {
assignedShardsAdd(shard);
if (shard.relocating()) {
relocatingShards++;
entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(),
k -> new LinkedHashMap<>()); // LinkedHashMap to preserve order
// add the counterpart shard with relocatingNodeId reflecting the source from which
// LinkedHashMap to preserve order.
// Add the counterpart shard with relocatingNodeId reflecting the source from which
// it's relocating from.
entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(),
k -> new LinkedHashMap<>());
ShardRouting targetShardRouting = shard.getTargetRelocatingShard();
addInitialRecovery(targetShardRouting, indexShard.primary);
previousValue = entries.put(targetShardRouting.shardId(), targetShardRouting);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,8 @@ public static void cleanLuceneIndex(Directory directory) throws IOException {
.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD)
.setMergePolicy(NoMergePolicy.INSTANCE) // no merges
.setCommitOnClose(false) // no commits
.setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append...
.setOpenMode(IndexWriterConfig.OpenMode.CREATE) // force creation - don't append...
))
{
// do nothing and close this will kick of IndexFileDeleter which will remove all pending files
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -514,15 +514,15 @@ public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Item)) return false;
Item other = (Item) o;
return Objects.equals(index, other.index) &&
Objects.equals(type, other.type) &&
Objects.equals(id, other.id) &&
Objects.equals(doc, other.doc) &&
Arrays.equals(fields, other.fields) && // otherwise we are comparing pointers
Objects.equals(perFieldAnalyzer, other.perFieldAnalyzer) &&
Objects.equals(routing, other.routing) &&
Objects.equals(version, other.version) &&
Objects.equals(versionType, other.versionType);
return Objects.equals(index, other.index)
&& Objects.equals(type, other.type)
&& Objects.equals(id, other.id)
&& Objects.equals(doc, other.doc)
&& Arrays.equals(fields, other.fields) // otherwise we are comparing pointers
&& Objects.equals(perFieldAnalyzer, other.perFieldAnalyzer)
&& Objects.equals(routing, other.routing)
&& Objects.equals(version, other.version)
&& Objects.equals(versionType, other.versionType);
}
}

Expand Down Expand Up @@ -1208,23 +1208,23 @@ protected int doHashCode() {

@Override
protected boolean doEquals(MoreLikeThisQueryBuilder other) {
return Arrays.equals(fields, other.fields) &&
Arrays.equals(likeTexts, other.likeTexts) &&
Arrays.equals(unlikeTexts, other.unlikeTexts) &&
Arrays.equals(likeItems, other.likeItems) &&
Arrays.equals(unlikeItems, other.unlikeItems) &&
Objects.equals(maxQueryTerms, other.maxQueryTerms) &&
Objects.equals(minTermFreq, other.minTermFreq) &&
Objects.equals(minDocFreq, other.minDocFreq) &&
Objects.equals(maxDocFreq, other.maxDocFreq) &&
Objects.equals(minWordLength, other.minWordLength) &&
Objects.equals(maxWordLength, other.maxWordLength) &&
Arrays.equals(stopWords, other.stopWords) && // otherwise we are comparing pointers
Objects.equals(analyzer, other.analyzer) &&
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
Objects.equals(boostTerms, other.boostTerms) &&
Objects.equals(include, other.include) &&
Objects.equals(failOnUnsupportedField, other.failOnUnsupportedField);
return Arrays.equals(fields, other.fields)
&& Arrays.equals(likeTexts, other.likeTexts)
&& Arrays.equals(unlikeTexts, other.unlikeTexts)
&& Arrays.equals(likeItems, other.likeItems)
&& Arrays.equals(unlikeItems, other.unlikeItems)
&& Objects.equals(maxQueryTerms, other.maxQueryTerms)
&& Objects.equals(minTermFreq, other.minTermFreq)
&& Objects.equals(minDocFreq, other.minDocFreq)
&& Objects.equals(maxDocFreq, other.maxDocFreq)
&& Objects.equals(minWordLength, other.minWordLength)
&& Objects.equals(maxWordLength, other.maxWordLength)
&& Arrays.equals(stopWords, other.stopWords) // otherwise we are comparing pointers
&& Objects.equals(analyzer, other.analyzer)
&& Objects.equals(minimumShouldMatch, other.minimumShouldMatch)
&& Objects.equals(boostTerms, other.boostTerms)
&& Objects.equals(include, other.include)
&& Objects.equals(failOnUnsupportedField, other.failOnUnsupportedField);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,8 +243,8 @@ public synchronized void applyClusterState(final ClusterChangedEvent event) {
// TODO: feels hacky, a block disables state persistence, and then we clean the allocated shards, maybe another flag in blocks?
if (state.blocks().disableStatePersistence()) {
for (AllocatedIndex<? extends Shard> indexService : indicesService) {
indicesService.removeIndex(indexService.index(), NO_LONGER_ASSIGNED,
"cleaning index (disabled block persistence)"); // also cleans shards
// also cleans shards
indicesService.removeIndex(indexService.index(), NO_LONGER_ASSIGNED, "cleaning index (disabled block persistence)");
}
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -748,8 +748,22 @@ public static long memoryUsage(int precision) {
-527.016999999993, -664.681000000099, -680.306000000099, -704.050000000047, -850.486000000034, -757.43200000003,
-713.308999999892, } };

private static final long[] THRESHOLDS = new long[] { 10, 20, 40, 80, 220, 400, 900, 1800, 3100, 6500, 11500, 20000, 50000, 120000,
350000 };
private static final long[] THRESHOLDS = new long[] {
10,
20,
40,
80,
220,
400,
900,
1800,
3100,
6500,
11500,
20000,
50000,
120000,
350000 };

private final BigArrays bigArrays;
private final OpenBitSet algorithm;
Expand All @@ -773,15 +787,15 @@ public HyperLogLogPlusPlus(int precision, BigArrays bigArrays, long initialBucke
hashSet = new Hashset(initialBucketCount);
final double alpha;
switch (p) {
case 4:
alpha = 0.673;
break;
case 5:
alpha = 0.697;
break;
default:
alpha = 0.7213 / (1 + 1.079 / m);
break;
case 4:
alpha = 0.673;
break;
case 5:
alpha = 0.697;
break;
default:
alpha = 0.7213 / (1 + 1.079 / m);
break;
}
alphaMM = alpha * m * m;
}
Expand Down Expand Up @@ -1050,8 +1064,8 @@ public int hashCode(long bucket) {

public boolean equals(long bucket, HyperLogLogPlusPlus other) {
return Objects.equals(p, other.p)
&& Objects.equals(algorithm.get(bucket), other.algorithm.get(bucket))
&& Objects.equals(getComparableData(bucket), other.getComparableData(bucket));
&& Objects.equals(algorithm.get(bucket), other.algorithm.get(bucket))
&& Objects.equals(getComparableData(bucket), other.getComparableData(bucket));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ public WordScorer(IndexReader reader, Terms terms, String field, double realWord
// division by zero, by scoreUnigram.
final long nTerms = terms.size();
this.numTerms = nTerms == -1 ? reader.maxDoc() : nTerms;
this.termsEnum = new FreqTermsEnum(reader, field, !useTotalTermFreq, useTotalTermFreq, null,
BigArrays.NON_RECYCLING_INSTANCE); // non recycling for now
// non recycling for now
this.termsEnum = new FreqTermsEnum(reader, field, !useTotalTermFreq, useTotalTermFreq, null, BigArrays.NON_RECYCLING_INSTANCE);
this.reader = reader;
this.realWordLikelihood = realWordLikelihood;
this.separator = separator;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -782,15 +782,15 @@ private static int readHeaderBuffer(BytesReference headerBuffer) throws IOExcept
}

private static boolean appearsToBeHTTPRequest(BytesReference headerBuffer) {
return bufferStartsWith(headerBuffer, "GET") ||
bufferStartsWith(headerBuffer, "POST") ||
bufferStartsWith(headerBuffer, "PUT") ||
bufferStartsWith(headerBuffer, "HEAD") ||
bufferStartsWith(headerBuffer, "DELETE") ||
return bufferStartsWith(headerBuffer, "GET")
|| bufferStartsWith(headerBuffer, "POST")
|| bufferStartsWith(headerBuffer, "PUT")
|| bufferStartsWith(headerBuffer, "HEAD")
|| bufferStartsWith(headerBuffer, "DELETE")
// Actually 'OPTIONS'. But we are only guaranteed to have read six bytes at this point.
bufferStartsWith(headerBuffer, "OPTION") ||
bufferStartsWith(headerBuffer, "PATCH") ||
bufferStartsWith(headerBuffer, "TRACE");
|| bufferStartsWith(headerBuffer, "OPTION")
|| bufferStartsWith(headerBuffer, "PATCH")
|| bufferStartsWith(headerBuffer, "TRACE");
}

private static boolean appearsToBeHTTPResponse(BytesReference headerBuffer) {
Expand Down
Loading