Skip to content

Commit 4be00e0

Browse files
committed
Merge branch 'master' into compile-with-jdk-9
* master: test: replaced try-catch statements with expectThrows(...) Add getWarmer and getTranslog method to NodeIndicesStats (#28092) fix doc mistake Added ASN support for Ingest GeoIP plugin. Fix global aggregation that requires breadth first and scores (#27942) Introduce Gradle wrapper Ignore GIT_COMMIT when calculating commit hash Re-enable bwc tests after #27881 was backported
2 parents 4e137e5 + fdb9b50 commit 4be00e0

File tree

19 files changed

+631
-74
lines changed

19 files changed

+631
-74
lines changed

.gitignore

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,6 @@ nbactions.xml
2020
.gradle/
2121
build/
2222

23-
# gradle wrapper
24-
/gradle/
25-
gradlew
26-
gradlew.bat
27-
2823
# maven stuff (to be removed when trunk becomes 4.x)
2924
*-execution-hints.log
3025
target/

CONTRIBUTING.md

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -99,9 +99,8 @@ However, since Elasticsearch, supports JDK 8 the build supports compiling with
9999
JDK 9 and testing on a JDK 8 runtime; to do this, set `JAVA_8_HOME` pointing to
100100
the Java home of a JDK 8 installation.
101101

102-
Make sure you have [Gradle](http://gradle.org) installed, as
103-
Elasticsearch uses it as its build system. Gradle must be at least
104-
version 4.3 in order to build successfully.
102+
Elasticsearch uses the Gradle wrapper for its build. You can execute Gradle
103+
using the wrapper via the `gradlew` script in the root of the repository.
105104

106105
We support development in the Eclipse and IntelliJ IDEs. For Eclipse, the
107106
minimum version that we support is [Eclipse Oxygen][eclipse] (version 4.7). For

build.gradle

Lines changed: 28 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,13 @@ import org.elasticsearch.gradle.VersionCollection
2525
import org.elasticsearch.gradle.VersionProperties
2626
import org.gradle.plugins.ide.eclipse.model.SourceFolder
2727

28+
import org.gradle.api.tasks.wrapper.Wrapper
29+
import org.gradle.api.tasks.wrapper.Wrapper.DistributionType
30+
import org.gradle.util.GradleVersion
31+
import org.gradle.util.DistributionLocator
32+
2833
import java.nio.file.Path
34+
import java.security.MessageDigest
2935

3036
// common maven publishing configuration
3137
subprojects {
@@ -139,8 +145,7 @@ task verifyVersions {
139145
* after the backport of the backcompat code is complete.
140146
*/
141147
allprojects {
142-
// TODO: re-enable after https://github.com/elastic/elasticsearch/pull/27881 is backported
143-
ext.bwc_tests_enabled = false
148+
ext.bwc_tests_enabled = true
144149
}
145150

146151
task verifyBwcTestsEnabled {
@@ -403,6 +408,27 @@ task run(type: Run) {
403408
impliesSubProjects = true
404409
}
405410

411+
task wrapper(type: Wrapper)
412+
413+
gradle.projectsEvaluated {
414+
415+
allprojects {
416+
tasks.withType(Wrapper) { Wrapper wrapper ->
417+
wrapper.distributionType = DistributionType.ALL
418+
419+
wrapper.doLast {
420+
final DistributionLocator locator = new DistributionLocator()
421+
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
422+
final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH))
423+
final URI sha256Uri = new URI(distributionUri.toString() + ".sha256")
424+
final String sha256Sum = new String(sha256Uri.toURL().bytes)
425+
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
426+
}
427+
}
428+
}
429+
430+
}
431+
406432
/* Remove assemble on all qa projects because we don't need to publish
407433
* artifacts for them. */
408434
gradle.projectsEvaluated {

buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@ package org.elasticsearch.gradle
2121
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
2222
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
2323
import org.apache.tools.ant.taskdefs.condition.Os
24+
import org.eclipse.jgit.lib.Constants
25+
import org.eclipse.jgit.lib.RepositoryBuilder
2426
import org.elasticsearch.gradle.precommit.PrecommitTasks
2527
import org.gradle.api.GradleException
2628
import org.gradle.api.InvalidUserDataException
@@ -529,6 +531,17 @@ class BuildPlugin implements Plugin<Project> {
529531
if (jarTask.manifest.attributes.containsKey('Change') == false) {
530532
logger.warn('Building without git revision id.')
531533
jarTask.manifest.attributes('Change': 'Unknown')
534+
} else {
535+
/*
536+
* The info-scm plugin assumes that if GIT_COMMIT is set it was set by Jenkins to the commit hash for this build.
537+
* However, that assumption is wrong as this build could be a sub-build of another Jenkins build for which GIT_COMMIT
538+
* is the commit hash for that build. Therefore, if GIT_COMMIT is set we calculate the commit hash ourselves.
539+
*/
540+
if (System.getenv("GIT_COMMIT") != null) {
541+
final String hash = new RepositoryBuilder().findGitDir(project.buildDir).build().resolve(Constants.HEAD).name
542+
final String shortHash = hash?.substring(0, 7)
543+
jarTask.manifest.attributes('Change': shortHash)
544+
}
532545
}
533546
}
534547
// add license/notice files

core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import org.elasticsearch.common.io.stream.StreamInput;
2727
import org.elasticsearch.common.io.stream.StreamOutput;
2828
import org.elasticsearch.common.io.stream.Streamable;
29-
import org.elasticsearch.common.xcontent.ToXContent.Params;
3029
import org.elasticsearch.common.xcontent.ToXContentFragment;
3130
import org.elasticsearch.common.xcontent.XContentBuilder;
3231
import org.elasticsearch.index.Index;
@@ -43,6 +42,8 @@
4342
import org.elasticsearch.index.shard.DocsStats;
4443
import org.elasticsearch.index.shard.IndexingStats;
4544
import org.elasticsearch.index.store.StoreStats;
45+
import org.elasticsearch.index.translog.TranslogStats;
46+
import org.elasticsearch.index.warmer.WarmerStats;
4647
import org.elasticsearch.search.suggest.completion.CompletionStats;
4748

4849
import java.io.IOException;
@@ -117,6 +118,11 @@ public FlushStats getFlush() {
117118
return stats.getFlush();
118119
}
119120

121+
@Nullable
122+
public WarmerStats getWarmer() {
123+
return stats.getWarmer();
124+
}
125+
120126
@Nullable
121127
public FieldDataStats getFieldData() {
122128
return stats.getFieldData();
@@ -142,6 +148,11 @@ public SegmentsStats getSegments() {
142148
return stats.getSegments();
143149
}
144150

151+
@Nullable
152+
public TranslogStats getTranslog() {
153+
return stats.getTranslog();
154+
}
155+
145156
@Nullable
146157
public RecoveryStats getRecoveryStats() {
147158
return stats.getRecoveryStats();

core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121

2222
import org.apache.lucene.index.LeafReaderContext;
2323
import org.apache.lucene.search.DocIdSetIterator;
24+
import org.apache.lucene.search.MatchAllDocsQuery;
25+
import org.apache.lucene.search.Query;
2426
import org.apache.lucene.search.Scorer;
2527
import org.apache.lucene.search.Weight;
2628
import org.apache.lucene.util.packed.PackedInts;
@@ -59,16 +61,22 @@ private static class Entry {
5961
final List<Entry> entries = new ArrayList<>();
6062
BucketCollector collector;
6163
final SearchContext searchContext;
64+
final boolean isGlobal;
6265
LeafReaderContext context;
6366
PackedLongValues.Builder docDeltas;
6467
PackedLongValues.Builder buckets;
6568
long maxBucket = -1;
6669
boolean finished = false;
6770
LongHash selectedBuckets;
6871

69-
/** Sole constructor. */
70-
public BestBucketsDeferringCollector(SearchContext context) {
72+
/**
73+
* Sole constructor.
74+
* @param context The search context
75+
* @param isGlobal Whether this collector visits all documents (global context)
76+
*/
77+
public BestBucketsDeferringCollector(SearchContext context, boolean isGlobal) {
7178
this.searchContext = context;
79+
this.isGlobal = isGlobal;
7280
}
7381

7482
@Override
@@ -144,11 +152,11 @@ public void prepareSelectedBuckets(long... selectedBuckets) throws IOException {
144152
}
145153
this.selectedBuckets = hash;
146154

147-
boolean needsScores = collector.needsScores();
155+
boolean needsScores = needsScores();
148156
Weight weight = null;
149157
if (needsScores) {
150-
weight = searchContext.searcher()
151-
.createNormalizedWeight(searchContext.query(), true);
158+
Query query = isGlobal ? new MatchAllDocsQuery() : searchContext.query();
159+
weight = searchContext.searcher().createNormalizedWeight(query, true);
152160
}
153161
for (Entry entry : entries) {
154162
final LeafBucketCollector leafCollector = collector.getLeafCollector(entry.context);

core/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
import org.elasticsearch.search.aggregations.Aggregator;
2323
import org.elasticsearch.search.aggregations.AggregatorFactories;
2424
import org.elasticsearch.search.aggregations.BucketCollector;
25+
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
2526
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
2627
import org.elasticsearch.search.internal.SearchContext;
2728

@@ -61,10 +62,20 @@ protected void doPreCollection() throws IOException {
6162
collectableSubAggregators = BucketCollector.wrap(collectors);
6263
}
6364

65+
public static boolean descendsFromGlobalAggregator(Aggregator parent) {
66+
while (parent != null) {
67+
if (parent.getClass() == GlobalAggregator.class) {
68+
return true;
69+
}
70+
parent = parent.parent();
71+
}
72+
return false;
73+
}
74+
6475
public DeferringBucketCollector getDeferringCollector() {
6576
// Default impl is a collector that selects the best buckets
6677
// but an alternative defer policy may be based on best docs.
67-
return new BestBucketsDeferringCollector(context());
78+
return new BestBucketsDeferringCollector(context(), descendsFromGlobalAggregator(parent()));
6879
}
6980

7081
/**
@@ -74,7 +85,7 @@ public DeferringBucketCollector getDeferringCollector() {
7485
* recording of all doc/bucketIds from the first pass and then the sub class
7586
* should call {@link #runDeferredCollections(long...)} for the selected set
7687
* of buckets that survive the pruning.
77-
*
88+
*
7889
* @param aggregator
7990
* the child aggregator
8091
* @return true if the aggregator should be deferred until a first pass at

core/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@
2727
import org.apache.lucene.index.RandomIndexWriter;
2828
import org.apache.lucene.index.Term;
2929
import org.apache.lucene.search.IndexSearcher;
30+
import org.apache.lucene.search.MatchAllDocsQuery;
31+
import org.apache.lucene.search.Query;
3032
import org.apache.lucene.search.ScoreDoc;
3133
import org.apache.lucene.search.TermQuery;
3234
import org.apache.lucene.search.TopDocs;
@@ -41,6 +43,8 @@
4143
import java.util.HashSet;
4244
import java.util.Set;
4345

46+
import static org.mockito.Mockito.when;
47+
4448
public class BestBucketsDeferringCollectorTests extends AggregatorTestCase {
4549

4650
public void testReplay() throws Exception {
@@ -59,17 +63,38 @@ public void testReplay() throws Exception {
5963
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
6064

6165
TermQuery termQuery = new TermQuery(new Term("field", String.valueOf(randomInt(maxNumValues))));
66+
Query rewrittenQuery = indexSearcher.rewrite(termQuery);
6267
TopDocs topDocs = indexSearcher.search(termQuery, numDocs);
6368

6469
SearchContext searchContext = createSearchContext(indexSearcher, createIndexSettings());
65-
BestBucketsDeferringCollector collector = new BestBucketsDeferringCollector(searchContext);
70+
when(searchContext.query()).thenReturn(rewrittenQuery);
71+
BestBucketsDeferringCollector collector = new BestBucketsDeferringCollector(searchContext, false) {
72+
@Override
73+
public boolean needsScores() {
74+
return true;
75+
}
76+
};
6677
Set<Integer> deferredCollectedDocIds = new HashSet<>();
6778
collector.setDeferredCollector(Collections.singleton(bla(deferredCollectedDocIds)));
6879
collector.preCollection();
6980
indexSearcher.search(termQuery, collector);
7081
collector.postCollection();
7182
collector.replay(0);
7283

84+
assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size());
85+
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
86+
assertTrue("expected docid [" + scoreDoc.doc + "] is missing", deferredCollectedDocIds.contains(scoreDoc.doc));
87+
}
88+
89+
topDocs = indexSearcher.search(new MatchAllDocsQuery(), numDocs);
90+
collector = new BestBucketsDeferringCollector(searchContext, true);
91+
deferredCollectedDocIds = new HashSet<>();
92+
collector.setDeferredCollector(Collections.singleton(bla(deferredCollectedDocIds)));
93+
collector.preCollection();
94+
indexSearcher.search(new MatchAllDocsQuery(), collector);
95+
collector.postCollection();
96+
collector.replay(0);
97+
7398
assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size());
7499
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
75100
assertTrue("expected docid [" + scoreDoc.doc + "] is missing", deferredCollectedDocIds.contains(scoreDoc.doc));

core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,14 +46,21 @@
4646
import org.elasticsearch.index.mapper.NumberFieldMapper;
4747
import org.elasticsearch.index.query.QueryBuilders;
4848
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
49+
import org.elasticsearch.search.SearchHit;
4950
import org.elasticsearch.search.aggregations.AggregationBuilder;
5051
import org.elasticsearch.search.aggregations.AggregationBuilders;
5152
import org.elasticsearch.search.aggregations.Aggregator;
5253
import org.elasticsearch.search.aggregations.AggregatorTestCase;
5354
import org.elasticsearch.search.aggregations.BucketOrder;
5455
import org.elasticsearch.search.aggregations.InternalAggregation;
56+
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
57+
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
5558
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
5659
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
60+
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
61+
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
62+
import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits;
63+
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
5764
import org.elasticsearch.search.aggregations.support.ValueType;
5865

5966
import java.io.IOException;
@@ -67,6 +74,8 @@
6774
import java.util.function.BiFunction;
6875
import java.util.function.Function;
6976

77+
import static org.hamcrest.Matchers.equalTo;
78+
import static org.hamcrest.Matchers.greaterThan;
7079
import static org.hamcrest.Matchers.instanceOf;
7180

7281
public class TermsAggregatorTests extends AggregatorTestCase {
@@ -933,6 +942,63 @@ public void testMixLongAndDouble() throws Exception {
933942
}
934943
}
935944

945+
public void testGlobalAggregationWithScore() throws IOException {
946+
try (Directory directory = newDirectory()) {
947+
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
948+
Document document = new Document();
949+
document.add(new SortedDocValuesField("keyword", new BytesRef("a")));
950+
indexWriter.addDocument(document);
951+
document = new Document();
952+
document.add(new SortedDocValuesField("keyword", new BytesRef("c")));
953+
indexWriter.addDocument(document);
954+
document = new Document();
955+
document.add(new SortedDocValuesField("keyword", new BytesRef("e")));
956+
indexWriter.addDocument(document);
957+
try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) {
958+
IndexSearcher indexSearcher = newIndexSearcher(indexReader);
959+
String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString();
960+
Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values());
961+
GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global")
962+
.subAggregation(
963+
new TermsAggregationBuilder("terms", ValueType.STRING)
964+
.executionHint(executionHint)
965+
.collectMode(collectionMode)
966+
.field("keyword")
967+
.order(BucketOrder.key(true))
968+
.subAggregation(
969+
new TermsAggregationBuilder("sub_terms", ValueType.STRING)
970+
.executionHint(executionHint)
971+
.collectMode(collectionMode)
972+
.field("keyword").order(BucketOrder.key(true))
973+
.subAggregation(
974+
new TopHitsAggregationBuilder("top_hits")
975+
.storedField("_none_")
976+
)
977+
)
978+
);
979+
980+
MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType();
981+
fieldType.setName("keyword");
982+
fieldType.setHasDocValues(true);
983+
984+
InternalGlobal result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), globalBuilder, fieldType);
985+
InternalMultiBucketAggregation<?, ?> terms = result.getAggregations().get("terms");
986+
assertThat(terms.getBuckets().size(), equalTo(3));
987+
for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) {
988+
InternalMultiBucketAggregation<?, ?> subTerms = bucket.getAggregations().get("sub_terms");
989+
assertThat(subTerms.getBuckets().size(), equalTo(1));
990+
MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0);
991+
InternalTopHits topHits = subBucket.getAggregations().get("top_hits");
992+
assertThat(topHits.getHits().getHits().length, equalTo(1));
993+
for (SearchHit hit : topHits.getHits()) {
994+
assertThat(hit.getScore(), greaterThan(0f));
995+
}
996+
}
997+
}
998+
}
999+
}
1000+
}
1001+
9361002
private IndexReader createIndexWithLongs() throws IOException {
9371003
Directory directory = newDirectory();
9381004
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);

0 commit comments

Comments
 (0)