Skip to content

Commit 1b2d52b

Browse files
committed
Merge branch 'main' into fixed-parse-tsid
* main: (176 commits) Fix RandomSamplerAggregatorTests testAggregationSamplingNestedAggsScaled test failure (elastic#89958) [Downsampling] Replace document map with SMILE encoded doc (elastic#89495) Remove full cluster state from error logging in MasterService (elastic#89960) [ML] Truncate categorization fields (elastic#89827) [TSDB] Removed `summary` and `histogram` metric types (elastic#89937) Update testNodeSelectorRouting so that it does not depend on iteration order (elastic#89879) Make sure listener is resolved when file queue is cleared (elastic#89929) [Stable plugin api] Extensible annotation (elastic#89903) Fix double sending of response in TransportOpenIdConnectPrepareAuthenticationAction (elastic#89930) Make sure ivy repo directory exists before downloading artifacts Use 'file://' scheme for local repository URL Use DRA artifacts for release build CI jobs Log unsuccessful attempts to get credentials from web identity tokens (elastic#88241) Script: Write Field API path manipulation (elastic#89889) Fetch health info action (elastic#89820) Fix memory leak in TransportDeleteExpiredDataAction (elastic#89935) [ML] Performance improvements for categorization jobs (elastic#89824) [DOCS] Revert changes for ES_JAVA_OPTS (elastic#89931) Fix deadlock bug exposed by a test (elastic#89934) [Downsampling] Remove `FieldValueFetcher` validator (elastic#89497) ...
2 parents eac40ec + 258833f commit 1b2d52b

File tree

801 files changed

+25252
-7614
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

801 files changed

+25252
-7614
lines changed

.ci/bwcVersions

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,4 +72,5 @@ BWC_VERSION:
7272
- "8.3.3"
7373
- "8.4.0"
7474
- "8.4.1"
75+
- "8.4.2"
7576
- "8.5.0"

.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,14 @@
2222
export BEATS_DIR=$(pwd)/distribution/docker/build/artifacts/beats
2323
2424
mkdir -p ${BEATS_DIR}
25-
curl --fail -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
26-
curl --fail -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
25+
curl --fail -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
26+
curl --fail -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
2727
28-
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false \
28+
# Fetch ML artifacts
29+
export ML_IVY_REPO=$(mktemp -d)
30+
mkdir -p ${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}
31+
curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/ml-cpp-${ES_VERSION}-deps.zip" https://artifacts-snapshot.elastic.co/ml-cpp/${ES_VERSION}-SNAPSHOT/downloads/ml-cpp/ml-cpp-${ES_VERSION}-SNAPSHOT-deps.zip
32+
curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/ml-cpp-${ES_VERSION}-nodeps.zip" https://artifacts-snapshot.elastic.co/ml-cpp/${ES_VERSION}-SNAPSHOT/downloads/ml-cpp/ml-cpp-${ES_VERSION}-SNAPSHOT-nodeps.zip
33+
34+
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false -Dbuild.ml_cpp.repo=file://${ML_IVY_REPO} \
2935
-Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef build

.ci/jobs.t/elastic+elasticsearch+pull-request+release-tests.yml

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,14 @@
4444
export BEATS_DIR=$(pwd)/distribution/docker/build/artifacts/beats
4545
4646
mkdir -p ${BEATS_DIR}
47-
curl -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
48-
curl -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
47+
curl --fail -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
48+
curl --fail -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://artifacts-snapshot.elastic.co/beats/${ES_VERSION}-SNAPSHOT/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
4949
50-
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false \
50+
# Fetch ML artifacts
51+
export ML_IVY_REPO=$(mktemp -d)
52+
mkdir -p ${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}
53+
curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/ml-cpp-${ES_VERSION}-deps.zip" https://artifacts-snapshot.elastic.co/ml-cpp/${ES_VERSION}-SNAPSHOT/downloads/ml-cpp/ml-cpp-${ES_VERSION}-SNAPSHOT-deps.zip
54+
curl --fail -o "${ML_IVY_REPO}/maven/org/elasticsearch/ml/ml-cpp/${ES_VERSION}/ml-cpp-${ES_VERSION}-nodeps.zip" https://artifacts-snapshot.elastic.co/ml-cpp/${ES_VERSION}-SNAPSHOT/downloads/ml-cpp/ml-cpp-${ES_VERSION}-SNAPSHOT-nodeps.zip
55+
56+
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false -Dbuild.ml_cpp.repo=file://${ML_IVY_REPO} \
5157
-Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef build

.ci/snapshotBwcVersions

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
BWC_VERSION:
22
- "7.17.7"
3-
- "8.4.1"
3+
- "8.4.2"
44
- "8.5.0"
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
name: Docs Preview Links
2+
3+
on:
4+
pull_request_target:
5+
types: [opened]
6+
paths:
7+
- '**.asciidoc'
8+
9+
jobs:
10+
doc-preview:
11+
runs-on: ubuntu-latest
12+
steps:
13+
- uses: actions/github-script@v6
14+
name: Add doc preview links
15+
with:
16+
script: |
17+
const pr = context.payload.pull_request;
18+
const comment = `Documentation preview:
19+
- ✨ [Changed pages](https://${context.repo.repo}_${pr.number}.docs-preview.app.elstc.co/diff)`;
20+
21+
github.rest.issues.createComment({
22+
issue_number: context.issue.number,
23+
owner: context.repo.owner,
24+
repo: context.repo.repo,
25+
body: comment,
26+
});

.github/workflows/gradle-wrapper-validation.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
name: "Validate Gradle Wrapper"
22
on: [push]
33

4+
permissions:
5+
contents: read
6+
47
jobs:
58
validation:
69
name: "Validation"

README.asciidoc

Lines changed: 150 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,156 @@ If you prefer to install and manage Elasticsearch yourself, you can download
2626
the latest version from
2727
https://www.elastic.co/downloads/elasticsearch[elastic.co/downloads/elasticsearch].
2828

29-
For more installation options, see the
30-
https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Elasticsearch installation
31-
documentation].
29+
=== Run Elasticsearch locally
30+
31+
////
32+
IMPORTANT: This content is replicated in the Elasticsearch guide.
33+
If you make changes, you must also update setup/set-up-local-dev-deployment.asciidoc.
34+
////
35+
36+
To try out Elasticsearch on your own machine, we recommend using Docker
37+
and running both Elasticsearch and Kibana.
38+
Docker images are available from the https://www.docker.elastic.co[Elastic Docker registry].
39+
40+
NOTE: Starting in Elasticsearch 8.0, security is enabled by default.
41+
The first time you start Elasticsearch, TLS encryption is configured automatically,
42+
a password is generated for the `elastic` user,
43+
and a Kibana enrollment token is created so you can connect Kibana to your secured cluster.
44+
45+
For other installation options, see the
46+
https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Elasticsearch installation documentation].
47+
48+
**Start Elasticsearch**
49+
50+
. Install and start https://www.docker.com/products/docker-desktop[Docker
51+
Desktop]. Go to **Preferences > Resources > Advanced** and set Memory to at least 4GB.
52+
53+
. Start an Elasticsearch container:
54+
+
55+
----
56+
docker network create elastic
57+
docker pull docker.elastic.co/elasticsearch/elasticsearch:{version} <1>
58+
docker run --name elasticsearch --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t docker.elastic.co/elasticsearch/elasticsearch:{version}
59+
----
60+
<1> Replace {version} with the version of Elasticsearch you want to run.
61+
+
62+
When you start Elasticsearch for the first time, the generated `elastic` user password and
63+
Kibana enrollment token are output to the terminal.
64+
+
65+
NOTE: You might need to scroll back a bit in the terminal to view the password
66+
and enrollment token.
67+
68+
. Copy the generated password and enrollment token and save them in a secure
69+
location. These values are shown only when you start Elasticsearch for the first time.
70+
You'll use these to enroll Kibana with your Elasticsearch cluster and log in.
71+
72+
**Start Kibana**
73+
74+
Kibana enables you to easily send requests to Elasticsearch and analyze, visualize, and manage data interactively.
75+
76+
. In a new terminal session, start Kibana and connect it to your Elasticsearch container:
77+
+
78+
----
79+
docker pull docker.elastic.co/kibana/kibana:{version} <1>
80+
docker run --name kibana --net elastic -p 5601:5601 docker.elastic.co/kibana/kibana:{version}
81+
----
82+
<1> Replace {version} with the version of Kibana you want to run.
83+
+
84+
When you start Kibana, a unique URL is output to your terminal.
85+
86+
. To access Kibana, open the generated URL in your browser.
87+
88+
.. Paste the enrollment token that you copied when starting
89+
Elasticsearch and click the button to connect your Kibana instance with Elasticsearch.
90+
91+
.. Log in to Kibana as the `elastic` user with the password that was generated
92+
when you started Elasticsearch.
93+
94+
**Send requests to Elasticsearch**
95+
96+
You send data and other requests to Elasticsearch through REST APIs.
97+
You can interact with Elasticsearch using any client that sends HTTP requests,
98+
such as the https://www.elastic.co/guide/en/elasticsearch/client/index.html[Elasticsearch
99+
language clients] and https://curl.se[curl].
100+
Kibana's developer console provides an easy way to experiment and test requests.
101+
To access the console, go to **Management > Dev Tools**.
102+
103+
**Add data**
104+
105+
You index data into Elasticsearch by sending JSON objects (documents) through the REST APIs.
106+
Whether you have structured or unstructured text, numerical data, or geospatial data,
107+
Elasticsearch efficiently stores and indexes it in a way that supports fast searches.
108+
109+
For timestamped data such as logs and metrics, you typically add documents to a
110+
data stream made up of multiple auto-generated backing indices.
111+
112+
To add a single document to an index, submit an HTTP post request that targets the index.
113+
114+
----
115+
POST /customer/_doc/1
116+
{
117+
"firstname": "Jennifer",
118+
"lastname": "Walters"
119+
}
120+
----
121+
122+
This request automatically creates the `customer` index if it doesn't exist,
123+
adds a new document that has an ID of 1, and
124+
stores and indexes the `firstname` and `lastname` fields.
125+
126+
The new document is available immediately from any node in the cluster.
127+
You can retrieve it with a GET request that specifies its document ID:
128+
129+
----
130+
GET /customer/_doc/1
131+
----
132+
133+
To add multiple documents in one request, use the `_bulk` API.
134+
Bulk data must be newline-delimited JSON (NDJSON).
135+
Each line must end in a newline character (`\n`), including the last line.
136+
137+
----
138+
PUT customer/_bulk
139+
{ "create": { } }
140+
{ "firstname": "Monica","lastname":"Rambeau"}
141+
{ "create": { } }
142+
{ "firstname": "Carol","lastname":"Danvers"}
143+
{ "create": { } }
144+
{ "firstname": "Wanda","lastname":"Maximoff"}
145+
{ "create": { } }
146+
{ "firstname": "Jennifer","lastname":"Takeda"}
147+
----
148+
149+
**Search**
150+
151+
Indexed documents are available for search in near real-time.
152+
The following search matches all customers with a first name of _Jennifer_
153+
in the `customer` index.
154+
155+
----
156+
GET customer/_search
157+
{
158+
"query" : {
159+
"match" : { "firstname": "Jennifer" }
160+
}
161+
}
162+
----
163+
164+
**Explore**
165+
166+
You can use Discover in Kibana to interactively search and filter your data.
167+
From there, you can start creating visualizations and building and sharing dashboards.
168+
169+
To get started, create a _data view_ that connects to one or more Elasticsearch indices,
170+
data streams, or index aliases.
171+
172+
. Go to **Management > Stack Management > Kibana > Data Views**.
173+
. Select **Create data view**.
174+
. Enter a name for the data view and a pattern that matches one or more indices,
175+
such as _customer_.
176+
. Select **Save data view to Kibana**.
177+
178+
To start exploring, go to **Analytics > Discover**.
32179

33180
[[upgrade]]
34181
== Upgrade

TESTING.asciidoc

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ password: `elastic-password`.
113113
You may need to customize the cluster configuration for the ./gradlew run task.
114114
You can simply find the task in the source code and configure it there.
115115
(The task is currently defined in build-tools-internal/src/main/groovy/elasticsearch.run.gradle)
116-
However, this requires modifying a source controlled file and subject to accidental commits.
116+
However, this requires modifying a source controlled file and is subject to accidental commits.
117117
Alternatively, you can use a Gradle init script to inject custom build logic with the -I flag to configure this task locally.
118118

119119
For example:
@@ -142,6 +142,40 @@ Now the http.p12 file will be placed in the config directory of the running clus
142142
Assuming you have the http.ssl.keystore setup correctly, you can now use HTTPS with ./gradlew run without the risk
143143
of accidentally committing your local configurations.
144144

145+
==== Multiple nodes in the test cluster for ./gradlew run
146+
147+
Another desired customization for ./gradlew run might be to run multiple
148+
nodes with different setting for each node. For example, you may want to debug a coordinating only node that fans out
149+
to one or more data nodes. To do this, increase the numberOfNodes and add specific configuration for each
150+
of the nodes. For example, the following will instruct the first node (:9200) to be a coordinating only node,
151+
and all other nodes to be master, data_hot, data_content nodes.
152+
-------------------------------------
153+
testClusters.register("runTask") {
154+
...
155+
numberOfNodes = 2
156+
def cluster = testClusters.named("runTask").get()
157+
cluster.getNodes().each { node ->
158+
node.setting('cluster.initial_master_nodes', cluster.getLastNode().getName())
159+
node.setting('node.roles', '[master,data_hot,data_content]')
160+
}
161+
cluster.getFirstNode().setting('node.roles', '[]')
162+
...
163+
}
164+
-------------------------------------
165+
166+
You can also place this config in custom init script (see above) to avoid accidental commits.
167+
If you are passing in the --debug-jvm flag with multiple nodes, you will need multiple remote debuggers running. One
168+
for each node listening at port 5007, 5008, 5009, and so on. Ensure that each remote debugger has auto restart enabled.
169+
170+
==== Manually testing cross cluster search
171+
172+
Use ./gradlew run-ccs to launch 2 clusters wired together for the purposes of cross cluster search.
173+
For example send a search request "my_remote_cluster:*/_search" to the querying cluster (:9200) to query data
174+
in the fulfilling cluster.
175+
176+
If you are passing in the --debug-jvm flag, you will need two remote debuggers running. One at port 5007 and another
177+
one at port 5008. Ensure that each remote debugger has auto restart enabled.
178+
145179
=== Test case filtering.
146180

147181
You can run a single test, provided that you specify the Gradle project. See the documentation on

benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
import org.elasticsearch.script.ScoreScript;
4141
import org.elasticsearch.script.ScriptModule;
4242
import org.elasticsearch.search.lookup.SearchLookup;
43+
import org.elasticsearch.search.lookup.SourceLookup;
4344
import org.openjdk.jmh.annotations.Benchmark;
4445
import org.openjdk.jmh.annotations.BenchmarkMode;
4546
import org.openjdk.jmh.annotations.Fork;
@@ -88,7 +89,8 @@ public class ScriptScoreBenchmark {
8889
private final CircuitBreakerService breakerService = new NoneCircuitBreakerService();
8990
private final SearchLookup lookup = new SearchLookup(
9091
fieldTypes::get,
91-
(mft, lookup, fdo) -> mft.fielddataBuilder(FieldDataContext.noRuntimeFields("benchmark")).build(fieldDataCache, breakerService)
92+
(mft, lookup, fdo) -> mft.fielddataBuilder(FieldDataContext.noRuntimeFields("benchmark")).build(fieldDataCache, breakerService),
93+
new SourceLookup.ReaderSourceProvider()
9294
);
9395

9496
@Param({ "expression", "metal", "painless_cast", "painless_def" })

benchmarks/src/main/java/org/elasticsearch/benchmark/search/fetch/subphase/FetchSourcePhaseBenchmark.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,8 +79,7 @@ private BytesReference buildBigExample(String extraText) throws IOException {
7979

8080
@Benchmark
8181
public BytesReference filterObjects() throws IOException {
82-
SourceLookup lookup = new SourceLookup();
83-
lookup.setSource(sourceBytes);
82+
SourceLookup lookup = new SourceLookup(new SourceLookup.BytesSourceProvider(sourceBytes));
8483
Object value = lookup.filter(fetchContext);
8584
return FetchSourcePhase.objectToBytes(value, XContentType.JSON, Math.min(1024, lookup.internalSourceRef().length()));
8685
}

build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/GitInfo.java

Lines changed: 18 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -75,18 +75,25 @@ public static GitInfo gitInfo(File rootDir) {
7575
head = dotGit.resolve("HEAD");
7676
gitDir = dotGit;
7777
} else {
78-
// this is a git worktree, follow the pointer to the repository
79-
final Path workTree = Paths.get(readFirstLine(dotGit).substring("gitdir:".length()).trim());
80-
if (Files.exists(workTree) == false) {
81-
return new GitInfo("unknown", "unknown");
82-
}
83-
head = workTree.resolve("HEAD");
84-
final Path commonDir = Paths.get(readFirstLine(workTree.resolve("commondir")));
85-
if (commonDir.isAbsolute()) {
86-
gitDir = commonDir;
78+
// this is a git worktree or submodule, follow the pointer to the repository
79+
final Path reference = Paths.get(readFirstLine(dotGit).substring("gitdir:".length()).trim());
80+
if (reference.getParent().endsWith("modules")) {
81+
// this is a git submodule so follow the reference to the git repo
82+
gitDir = rootDir.toPath().resolve(reference);
83+
head = gitDir.resolve("HEAD");
8784
} else {
88-
// this is the common case
89-
gitDir = workTree.resolve(commonDir);
85+
// this is a worktree so resolve the root repo directory
86+
if (Files.exists(reference) == false) {
87+
return new GitInfo("unknown", "unknown");
88+
}
89+
head = reference.resolve("HEAD");
90+
final Path commonDir = Paths.get(readFirstLine(reference.resolve("commondir")));
91+
if (commonDir.isAbsolute()) {
92+
gitDir = commonDir;
93+
} else {
94+
// this is the common case
95+
gitDir = reference.resolve(commonDir);
96+
}
9097
}
9198
}
9299
final String ref = readFirstLine(head);

0 commit comments

Comments
 (0)