Skip to content

Commit 062d1ae

Browse files
committed
Merge branch 'master' into java-home-sweet-java-home
* master: Enable skipping fetching latest for BWC builds (elastic#29497) Add remote cluster client (elastic#29495) Ensure flush happens on shard idle Adds SpanGapQueryBuilder in the query DSL (elastic#28636) Control max size and count of warning headers (elastic#28427) Make index APIs work without types. (elastic#29479) Deprecate filtering on `_type`. (elastic#29468) Fix auto-generated ID example format (elastic#29461) Fix typo in max number of threads check docs (elastic#29469) Add primary term to translog header (elastic#29227) Add a helper method to get a random java.util.TimeZone (elastic#29487) Move TimeValue into elasticsearch-core project (elastic#29486)
2 parents d02143a + 03ce3dd commit 062d1ae

File tree

95 files changed

+1682
-664
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

95 files changed

+1682
-664
lines changed

TESTING.asciidoc

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -498,6 +498,13 @@ will contain your change.
498498
. Push both branches to your remote repository.
499499
. Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`.
500500

501+
== Skip fetching latest
502+
503+
For some BWC testing scenarios, you want to use the local clone of the
504+
repository without fetching latest. For these use cases, you can set the system
505+
property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip
506+
fetching the latest from the remote.
507+
501508
== Test coverage analysis
502509

503510
Generating test coverage reports for Elasticsearch is currently not possible through Gradle.

distribution/bwc/build.gradle

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,16 @@ subprojects {
5454

5555
final String remote = System.getProperty("tests.bwc.remote", "elastic")
5656

57+
final boolean gitFetchLatest
58+
final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true")
59+
if ("true".equals(gitFetchLatestProperty)) {
60+
gitFetchLatest = true
61+
} else if ("false".equals(gitFetchLatestProperty)) {
62+
gitFetchLatest = false
63+
} else {
64+
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]")
65+
}
66+
5767
task createClone(type: LoggedExec) {
5868
onlyIf { checkoutDir.exists() == false }
5969
commandLine = ['git', 'clone', rootDir, checkoutDir]
@@ -83,7 +93,7 @@ subprojects {
8393
}
8494

8595
task fetchLatest(type: LoggedExec) {
86-
onlyIf { project.gradle.startParameter.isOffline() == false }
96+
onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest }
8797
dependsOn addRemote
8898
workingDir = checkoutDir
8999
commandLine = ['git', 'fetch', '--all']

docs/painless/painless-getting-started.asciidoc

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@ their last name:
239239

240240
[source,js]
241241
----------------------------------------------------------------
242-
POST hockey/player/_update_by_query
242+
POST hockey/_update_by_query
243243
{
244244
"script": {
245245
"lang": "painless",
@@ -260,7 +260,7 @@ names start with a consonant and end with a vowel:
260260

261261
[source,js]
262262
----------------------------------------------------------------
263-
POST hockey/player/_update_by_query
263+
POST hockey/_update_by_query
264264
{
265265
"script": {
266266
"lang": "painless",
@@ -281,7 +281,7 @@ remove all of the vowels in all of their last names:
281281

282282
[source,js]
283283
----------------------------------------------------------------
284-
POST hockey/player/_update_by_query
284+
POST hockey/_update_by_query
285285
{
286286
"script": {
287287
"lang": "painless",
@@ -297,7 +297,7 @@ method so it supports `$1` and `\1` for replacements:
297297

298298
[source,js]
299299
----------------------------------------------------------------
300-
POST hockey/player/_update_by_query
300+
POST hockey/_update_by_query
301301
{
302302
"script": {
303303
"lang": "painless",
@@ -319,7 +319,7 @@ This will make all of the vowels in the hockey player's last names upper case:
319319

320320
[source,js]
321321
----------------------------------------------------------------
322-
POST hockey/player/_update_by_query
322+
POST hockey/_update_by_query
323323
{
324324
"script": {
325325
"lang": "painless",
@@ -337,7 +337,7 @@ last names upper case:
337337

338338
[source,js]
339339
----------------------------------------------------------------
340-
POST hockey/player/_update_by_query
340+
POST hockey/_update_by_query
341341
{
342342
"script": {
343343
"lang": "painless",

docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ Example:
3838

3939
[source,js]
4040
--------------------------------------------------
41-
GET news/article/_search
41+
GET news/_search
4242
{
4343
"query" : {
4444
"match" : {"content" : "Bird flu"}
@@ -153,7 +153,7 @@ We can drill down into examples of these documents to see why pozmantier is conn
153153

154154
[source,js]
155155
--------------------------------------------------
156-
GET news/article/_search
156+
GET news/_search
157157
{
158158
"query": {
159159
"simple_query_string": {
@@ -221,7 +221,7 @@ with the `filter_duplicate_text` setting turned on:
221221

222222
[source,js]
223223
--------------------------------------------------
224-
GET news/article/_search
224+
GET news/_search
225225
{
226226
"query": {
227227
"match": {
@@ -424,7 +424,7 @@ context:
424424

425425
[source,js]
426426
--------------------------------------------------
427-
GET news/article/_search
427+
GET news/_search
428428
{
429429
"query" : {
430430
"match" : {
@@ -463,7 +463,7 @@ will be analyzed using the `source_fields` parameter:
463463

464464
[source,js]
465465
--------------------------------------------------
466-
GET news/article/_search
466+
GET news/_search
467467
{
468468
"query" : {
469469
"match" : {

docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,7 @@ had a value.
217217

218218
[source,js]
219219
--------------------------------------------------
220-
GET latency/data/_search
220+
GET latency/_search
221221
{
222222
"size": 0,
223223
"aggs" : {

docs/reference/docs/delete-by-query.asciidoc

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ Back to the API format, this will delete tweets from the `twitter` index:
7575

7676
[source,js]
7777
--------------------------------------------------
78-
POST twitter/_doc/_delete_by_query?conflicts=proceed
78+
POST twitter/_delete_by_query?conflicts=proceed
7979
{
8080
"query": {
8181
"match_all": {}
@@ -85,12 +85,12 @@ POST twitter/_doc/_delete_by_query?conflicts=proceed
8585
// CONSOLE
8686
// TEST[setup:twitter]
8787

88-
It's also possible to delete documents of multiple indexes and multiple
89-
types at once, just like the search API:
88+
It's also possible to delete documents of multiple indexes at once, just like
89+
the search API:
9090

9191
[source,js]
9292
--------------------------------------------------
93-
POST twitter,blog/_docs,post/_delete_by_query
93+
POST twitter,blog/_delete_by_query
9494
{
9595
"query": {
9696
"match_all": {}

docs/reference/docs/index_.asciidoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -229,14 +229,14 @@ The result of the above index operation is:
229229
},
230230
"_index" : "twitter",
231231
"_type" : "_doc",
232-
"_id" : "6a8ca01c-7896-48e9-81cc-9f70661fcb32",
232+
"_id" : "W0tpsmIBdwcYyG50zbta",
233233
"_version" : 1,
234234
"_seq_no" : 0,
235235
"_primary_term" : 1,
236236
"result": "created"
237237
}
238238
--------------------------------------------------
239-
// TESTRESPONSE[s/6a8ca01c-7896-48e9-81cc-9f70661fcb32/$body._id/ s/"successful" : 2/"successful" : 1/]
239+
// TESTRESPONSE[s/W0tpsmIBdwcYyG50zbta/$body._id/ s/"successful" : 2/"successful" : 1/]
240240

241241
[float]
242242
[[index-routing]]

docs/reference/docs/update-by-query.asciidoc

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ Back to the API format, this will update tweets from the `twitter` index:
6767

6868
[source,js]
6969
--------------------------------------------------
70-
POST twitter/_doc/_update_by_query?conflicts=proceed
70+
POST twitter/_update_by_query?conflicts=proceed
7171
--------------------------------------------------
7272
// CONSOLE
7373
// TEST[setup:twitter]
@@ -145,12 +145,12 @@ This API doesn't allow you to move the documents it touches, just modify their
145145
source. This is intentional! We've made no provisions for removing the document
146146
from its original location.
147147

148-
It's also possible to do this whole thing on multiple indexes and multiple
149-
types at once, just like the search API:
148+
It's also possible to do this whole thing on multiple indexes at once, just
149+
like the search API:
150150

151151
[source,js]
152152
--------------------------------------------------
153-
POST twitter,blog/_doc,post/_update_by_query
153+
POST twitter,blog/_update_by_query
154154
--------------------------------------------------
155155
// CONSOLE
156156
// TEST[s/^/PUT twitter\nPUT blog\n/]

docs/reference/modules/cluster/misc.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,4 +82,4 @@ Enable or disable allocation for persistent tasks:
8282
This setting does not affect the persistent tasks that are already being executed.
8383
Only newly created persistent tasks, or tasks that must be reassigned (after a node
8484
left the cluster, for example), are impacted by this setting.
85-
--
85+
--

docs/reference/modules/http.asciidoc

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ http://en.wikipedia.org/wiki/Chunked_transfer_encoding[HTTP chunking].
2020

2121
The settings in the table below can be configured for HTTP. Note that none of
2222
them are dynamically updatable so for them to take effect they should be set in
23-
`elasticsearch.yml`.
23+
the Elasticsearch <<settings, configuration file>>.
2424

2525
[cols="<,<",options="header",]
2626
|=======================================================================
@@ -100,6 +100,12 @@ simple message will be returned. Defaults to `true`
100100

101101
|`http.pipelining.max_events` |The maximum number of events to be queued up in memory before a HTTP connection is closed, defaults to `10000`.
102102

103+
|`http.max_warning_header_count` |The maximum number of warning headers in
104+
client HTTP responses, defaults to unbounded.
105+
106+
|`http.max_warning_header_size` |The maximum total size of warning headers in
107+
client HTTP responses, defaults to unbounded.
108+
103109
|=======================================================================
104110

105111
It also uses the common

docs/reference/search/search.asciidoc

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,7 @@ that match the query. The query can either be provided using a simple
1212
All search APIs can be applied across multiple types within an index, and
1313
across multiple indices with support for the
1414
<<multi-index,multi index syntax>>. For
15-
example, we can search on all documents across all types within the
16-
twitter index:
15+
example, we can search on all documents within the twitter index:
1716

1817
[source,js]
1918
--------------------------------------------------
@@ -22,15 +21,6 @@ GET /twitter/_search?q=user:kimchy
2221
// CONSOLE
2322
// TEST[setup:twitter]
2423

25-
We can also search within specific types:
26-
27-
[source,js]
28-
--------------------------------------------------
29-
GET /twitter/tweet,user/_search?q=user:kimchy
30-
--------------------------------------------------
31-
// CONSOLE
32-
// TEST[setup:twitter]
33-
3424
We can also search all tweets with a certain tag across several indices
3525
(for example, when each user has his own index):
3626

docs/reference/setup/bootstrap-checks.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ that the Elasticsearch process has the rights to create enough threads
114114
under normal use. This check is enforced only on Linux. If you are on
115115
Linux, to pass the maximum number of threads check, you must configure
116116
your system to allow the Elasticsearch process the ability to create at
117-
least 2048 threads. This can be done via `/etc/security/limits.conf`
117+
least 4096 threads. This can be done via `/etc/security/limits.conf`
118118
using the `nproc` setting (note that you might have to increase the
119119
limits for the `root` user too).
120120

server/src/main/java/org/elasticsearch/common/unit/TimeValue.java renamed to libs/elasticsearch-core/src/main/java/org/elasticsearch/common/unit/TimeValue.java

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,12 @@
1919

2020
package org.elasticsearch.common.unit;
2121

22-
import org.elasticsearch.common.xcontent.ToXContentFragment;
23-
import org.elasticsearch.common.xcontent.XContentBuilder;
24-
2522
import java.io.IOException;
2623
import java.util.Locale;
2724
import java.util.Objects;
2825
import java.util.concurrent.TimeUnit;
2926

30-
public class TimeValue implements Comparable<TimeValue>, ToXContentFragment {
27+
public class TimeValue implements Comparable<TimeValue> {
3128

3229
/** How many nano-seconds in one milli-second */
3330
public static final long NSEC_PER_MSEC = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS);
@@ -352,9 +349,4 @@ public int compareTo(TimeValue timeValue) {
352349
double otherValue = ((double) timeValue.duration) * timeValue.timeUnit.toNanos(1);
353350
return Double.compare(thisValue, otherValue);
354351
}
355-
356-
@Override
357-
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
358-
return builder.value(toString());
359-
}
360352
}

server/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java renamed to libs/elasticsearch-core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java

Lines changed: 0 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,10 @@
1919

2020
package org.elasticsearch.common.unit;
2121

22-
import org.elasticsearch.common.io.stream.BytesStreamOutput;
23-
import org.elasticsearch.common.io.stream.StreamInput;
2422
import org.elasticsearch.test.ESTestCase;
2523

26-
import java.io.IOException;
2724
import java.util.concurrent.TimeUnit;
2825

29-
import static org.elasticsearch.common.unit.TimeValue.timeValueNanos;
30-
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
3126
import static org.hamcrest.CoreMatchers.instanceOf;
3227
import static org.hamcrest.CoreMatchers.not;
3328
import static org.hamcrest.Matchers.containsString;
@@ -154,31 +149,6 @@ private String randomTimeUnit() {
154149
return randomFrom("nanos", "micros", "ms", "s", "m", "h", "d");
155150
}
156151

157-
private void assertEqualityAfterSerialize(TimeValue value, int expectedSize) throws IOException {
158-
BytesStreamOutput out = new BytesStreamOutput();
159-
out.writeTimeValue(value);
160-
assertEquals(expectedSize, out.size());
161-
162-
StreamInput in = out.bytes().streamInput();
163-
TimeValue inValue = in.readTimeValue();
164-
165-
assertThat(inValue, equalTo(value));
166-
assertThat(inValue.duration(), equalTo(value.duration()));
167-
assertThat(inValue.timeUnit(), equalTo(value.timeUnit()));
168-
}
169-
170-
public void testSerialize() throws Exception {
171-
assertEqualityAfterSerialize(new TimeValue(100, TimeUnit.DAYS), 3);
172-
assertEqualityAfterSerialize(timeValueNanos(-1), 2);
173-
assertEqualityAfterSerialize(timeValueNanos(1), 2);
174-
assertEqualityAfterSerialize(timeValueSeconds(30), 2);
175-
176-
final TimeValue timeValue = new TimeValue(randomIntBetween(0, 1024), randomFrom(TimeUnit.values()));
177-
BytesStreamOutput out = new BytesStreamOutput();
178-
out.writeZLong(timeValue.duration());
179-
assertEqualityAfterSerialize(timeValue, 1 + out.bytes().length());
180-
}
181-
182152
public void testFailOnUnknownUnits() {
183153
try {
184154
TimeValue.parseTimeValue("23tw", null, "test");

modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/20_empty_bucket.yml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,14 +35,12 @@
3535
- do:
3636
search:
3737
index: empty_bucket_idx
38-
type: test
3938

4039
- match: {hits.total: 2}
4140

4241
- do:
4342
search:
4443
index: empty_bucket_idx
45-
type: test
4644
body: {"aggs": {"histo": {"histogram": {"field": "val1", "interval": 1, "min_doc_count": 0}, "aggs": { "mfs" : { "matrix_stats": {"fields": ["value", "val1"]} } } } } }
4745

4846
- match: {hits.total: 2}

0 commit comments

Comments
 (0)