Skip to content

Commit 63acdb6

Browse files
committed
Merge branch 'upstream/main' into number-support-single-type
* upstream/main: (265 commits) Disable openid connect tests due to missing fixture (elastic#89478) Add periodic job for single processor node testing Updates to changelog processing after docs redesign (elastic#89463) Better support for multi cluster for run task (elastic#89442) Mute failing tests (elastic#89465) [ML] Performance improvements related to ECS Grok pattern usage (elastic#89424) Add source fallback support for date and date_nanos mapped types (elastic#89440) Reuse Info in lifecycle step (elastic#89419) feature: support metrics for multi value fields (elastic#88818) Upgrade OpenTelemetry API and remove workaround (elastic#89438) Remove LegacyClusterTaskResultActionListener (elastic#89459) Add YAML spec docs about matching errors (elastic#89370) Remove redundant cluster upgrade tests for auth tokens (elastic#89417) Return 400 error for GetUserPrivileges call with API keys (elastic#89333) User Profile - Detailed errors in hasPrivileges response (elastic#89224) Rollover min_* conditions docs and highlight (elastic#89434) REST tests for percentiles_bucket agg (elastic#88029) REST tests for cumulative pipeline aggs (elastic#88966) Clean-up file watcher keys. (elastic#89429) fix a typo in Security.java (elastic#89248) ... # Conflicts: # server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
2 parents dddd3e1 + e949dff commit 63acdb6

File tree

1,354 files changed

+43280
-11289
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,354 files changed

+43280
-11289
lines changed

.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-unix.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@
1414
name: os
1515
values:
1616
- "centos-7&&immutable"
17-
- "amazon&&immutable"
17+
- "amazon-2&&immutable"
18+
- "amazon-2022&&immutable"
1819
- "debian-10&&immutable"
1920
- "debian-11&&immutable"
2021
- "opensuse-15-1&&immutable"

.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@
2222
export BEATS_DIR=$(pwd)/distribution/docker/build/artifacts/beats
2323
2424
mkdir -p ${BEATS_DIR}
25-
curl -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
26-
curl -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
25+
curl --fail -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
26+
curl --fail -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
2727
2828
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false \
2929
-Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef build
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
---
2+
- job:
3+
name: elastic+elasticsearch+%BRANCH%+periodic+single-processor-node-tests
4+
display-name: "elastic / elasticsearch # %BRANCH% - single processor node tests"
5+
description: "Testing with node.processors set to '1' for the Elasticsearch %BRANCH% branch.\n"
6+
node: "general-purpose && docker"
7+
builders:
8+
- inject:
9+
properties-file: '.ci/java-versions.properties'
10+
properties-content: |
11+
JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
12+
RUNTIME_JAVA_HOME=$HOME/.java/$ES_RUNTIME_JAVA
13+
JAVA11_HOME=$HOME/.java/java11
14+
- shell: |
15+
#!/usr/local/bin/runbld --redirect-stderr
16+
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true check
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
jjbb-template: periodic-trigger-lgc.yml
3+
vars:
4+
- periodic-job: elastic+elasticsearch+%BRANCH%+periodic+single-processor-node-tests
5+
- lgc-job: elastic+elasticsearch+%BRANCH%+intake
6+
- cron: "H H/12 * * *"

.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
type: label-expression
3232
name: os
3333
values:
34-
- rocky-linux-8-packaging
34+
- rhel-8-packaging
3535
- ubuntu-20.04-packaging
3636
- axis:
3737
type: user-defined

.ci/scripts/packaging-test.sh

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,13 @@ if [ -f "/etc/os-release" ] ; then
4343
sudo apt-get install -y --allow-downgrades lintian=2.15.0
4444
fi
4545
fi
46+
if [[ "$ID" == "rhel" ]] ; then
47+
# Downgrade containerd if necessary to work around runc bug
48+
# See: https://github.com/opencontainers/runc/issues/3551
49+
if containerd -version | grep -sF 1.6.7; then
50+
sudo yum downgrade -y containerd.io
51+
fi
52+
fi
4653
else
4754
cat /etc/issue || true
4855
fi

.github/CODEOWNERS

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,7 @@ x-pack/plugin/core/src/main/resources/monitoring-logstash-mb.json @elastic/infra
1515
x-pack/plugin/core/src/main/resources/monitoring-logstash.json @elastic/infra-monitoring-ui
1616
x-pack/plugin/core/src/main/resources/monitoring-mb-ilm-policy.json @elastic/infra-monitoring-ui
1717
x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @elastic/infra-monitoring-ui
18+
19+
# Elastic Agent
20+
x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet @elastic/elastic-agent-control-plane
21+
x-pack/plugin/core/src/main/resources/fleet-* @elastic/elastic-agent-control-plane

.github/PULL_REQUEST_TEMPLATE.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ attention.
77
-->
88

99
- Have you signed the [contributor license agreement](https://www.elastic.co/contributor-agreement)?
10-
- Have you followed the [contributor guidelines](https://github.com/elastic/elasticsearch/blob/master/CONTRIBUTING.md)?
10+
- Have you followed the [contributor guidelines](https://github.com/elastic/elasticsearch/blob/main/CONTRIBUTING.md)?
1111
- If submitting code, have you built your formula locally prior to submission with `gradle check`?
12-
- If submitting code, is your pull request against master? Unless there is a good reason otherwise, we prefer pull requests against master and will backport as needed.
12+
- If submitting code, is your pull request against main? Unless there is a good reason otherwise, we prefer pull requests against main and will backport as needed.
1313
- If submitting code, have you checked that your submission is for an [OS and architecture that we support](https://www.elastic.co/support/matrix#show_os)?
14-
- If you are submitting this code for a class then read our [policy](https://github.com/elastic/elasticsearch/blob/master/CONTRIBUTING.md#contributing-as-part-of-a-class) for that.
14+
- If you are submitting this code for a class then read our [policy](https://github.com/elastic/elasticsearch/blob/main/CONTRIBUTING.md#contributing-as-part-of-a-class) for that.

BUILDING.md

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,38 @@ E.g. [configuration-cache support](https://github.com/elastic/elasticsearch/issu
6363
There are a few guidelines to follow that should make your life easier to make changes to the elasticsearch build.
6464
Please add a member of the `es-delivery` team as a reviewer if you're making non-trivial changes to the build.
6565

66+
#### Adding or updating a dependency
67+
68+
We rely on [Gradle dependency verification](https://docs.gradle.org/current/userguide/dependency_verification.html) to mitigate the security risks and avoid integrating compromised dependencies.
69+
70+
This requires to have third party dependencies and their checksums listed in `gradle/verification-metadata.xml`.
71+
72+
For updated or newly added dependencies you need to add an entry to this verification file or update the existing one:
73+
```
74+
<component group="asm" name="asm" version="3.1">
75+
<artifact name="asm-3.1.jar">
76+
<sha256 value="333ff5369043975b7e031b8b27206937441854738e038c1f47f98d072a20437a" origin="official site"/>
77+
</artifact>
78+
</component>
79+
```
80+
81+
You can also automate the generation of this entry by running your build using the `--write-verification-metadata` commandline option:
82+
```
83+
>./gradlew --write-verification-metadata sha256 precommit
84+
```
85+
86+
The `--write-verification-metadata` Gradle option is generally able to resolve reachable configurations,
87+
but we use detached configurations for a certain set of plugins and tasks. Therefore, please ensure you run this option with a task that
88+
uses the changed dependencies. In most cases, `precommit` or `check` are good candidates.
89+
90+
We prefer sha256 checksums as md5 and sha1 are not considered safe anymore these days. The generated entry
91+
will have the `origin` attribute been set to `Generated by Gradle`.
92+
93+
>A manual confirmation of the Gradle generated checksums is currently not mandatory.
94+
>If you want to add a level of verification you can manually confirm the checksum (e.g by looking it up on the website of the library)
95+
>Please replace the content of the `origin` attribute by `official site` in that case.
96+
>
97+
6698
#### Custom Plugin and Task implementations
6799

68100
Build logic that is used across multiple subprojects should considered to be moved into a Gradle plugin with according Gradle task implmentation.

CHANGELOG.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
# Elasticsearch Changlog
1+
# Elasticsearch Changelog
22

33
Please see the [release notes](https://www.elastic.co/guide/en/elasticsearch/reference/current/es-release-notes.html) in the reference manual.

TRACING.md

Lines changed: 156 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,156 @@
1+
# Tracing in Elasticsearch
2+
3+
Elasticsearch is instrumented using the [OpenTelemetry][otel] API, which allows
4+
us to gather traces and analyze what Elasticsearch is doing.
5+
6+
7+
## How is tracing implemented?
8+
9+
The Elasticsearch server code contains a [`tracing`][tracing] package, which is
10+
an abstraction over the OpenTelemetry API. All locations in the code that
11+
perform instrumentation and tracing must use these abstractions.
12+
13+
Separately, there is the [`apm`](./modules/apm/) module, which works with the
14+
OpenTelemetry API directly to record trace data. Underneath the OTel API, we
15+
use Elastic's [APM agent for Java][agent], which attaches at runtime to the
16+
Elasticsearch JVM and removes the need for Elasticsearch to hard-code the use of
17+
an OTel implementation. Note that while it is possible to programmatically start
18+
the APM agent, the Security Manager permissions required make this essentially
19+
impossible.
20+
21+
22+
## How is tracing configured?
23+
24+
You must supply configuration and credentials for the APM server (see below).
25+
You must also set `tracing.apm.enabled` to `true`, but this can be toggled at
26+
runtime.
27+
28+
All APM settings live under `tracing.apm`. All settings related to the Java agent
29+
go under `tracing.apm.agent`. Anything you set under there will be propagated to
30+
the agent.
31+
32+
For agent settings that can be changed dynamically, you can use the cluster
33+
settings REST API. For example, to change the sampling rate:
34+
35+
curl -XPUT \
36+
-H "Content-type: application/json" \
37+
-u "$USERNAME:$PASSWORD" \
38+
-d '{ "persistent": { "tracing.apm.agent.transaction_sample_rate": "0.75" } }' \
39+
https://localhost:9200/_cluster/settings
40+
41+
42+
### More details about configuration
43+
44+
For context, the APM agent pulls configuration from [multiple
45+
sources][agent-config], with a hierarchy that means, for example, that options
46+
set in the config file cannot be overridden via system properties.
47+
48+
Now, in order to send tracing data to the APM server, ES needs to be configured with
49+
either a `secret_key` or an `api_key`. We could configure these in the agent via
50+
system properties, but then their values would be available to any Java code in
51+
Elasticsearch that can read system properties.
52+
53+
Instead, when Elasticsearch bootstraps itself, it compiles all APM settings
54+
together, including any `secret_key` or `api_key` values from the ES keystore,
55+
and writes out a temporary APM config file containing all static configuration
56+
(i.e. values that cannot change after the agent starts). This file is deleted
57+
as soon as possible after ES starts up. Settings that are not sensitive and can
58+
be changed dynamically are configured via system properties. Calls to the ES
59+
settings REST API are translated into system property writes, which the agent
60+
later picks up and applies.
61+
62+
## Where is tracing data sent?
63+
64+
You need to have an APM server running somewhere. For example, you can create a
65+
deployment in [Elastic Cloud](https://www.elastic.co/cloud/) with Elastic's APM
66+
integration.
67+
68+
## What do we trace?
69+
70+
We primarily trace "tasks". The tasks framework in Elasticsearch allows work to
71+
be scheduled for execution, cancelled, executed in a different thread pool, and
72+
so on. Tracing a task results in a "span", which represents the execution of the
73+
task in the tracing system. We also instrument REST requests, which are not (at
74+
present) modelled by tasks.
75+
76+
A span can be associated with a parent span, which allows all spans in, for
77+
example, a REST request to be grouped together. Spans can track work across
78+
different Elasticsearch nodes.
79+
80+
Elasticsearch also supports distributed tracing via [W3c Trace Context][w3c]
81+
headers. If clients of Elasticsearch send these headers with their requests,
82+
then that data will be forwarded to the APM server in order to yield a trace
83+
across systems.
84+
85+
In rare circumstances, it is possible to avoid tracing a task using
86+
`TaskManager#register(String,String,TaskAwareRequest,boolean)`. For example,
87+
Machine Learning uses tasks to record which models are loaded on each node. Such
88+
tasks are long-lived and are not suitable candidates for APM tracing.
89+
90+
## Thread contexts and nested spans
91+
92+
When a span is started, Elasticsearch tracks information about that span in the
93+
current [thread context][thread-context]. If a new thread context is created,
94+
then the current span information must not propagated but instead renamed, so
95+
that (1) it doesn't interfere when new trace information is set in the context,
96+
and (2) the previous trace information is available to establish a parent /
97+
child span relationship. This is done with `ThreadContext#newTraceContext()`.
98+
99+
Sometimes we need to detach new spans from their parent. For example, creating
100+
an index starts some related background tasks, but these shouldn't be associated
101+
with the REST request, otherwise all the background task spans will be
102+
associated with the REST request for as long as Elasticsearch is running.
103+
`ThreadContext` provides the `clearTraceContext`() method for this purpose.
104+
105+
## How to I trace something that isn't a task?
106+
107+
First work out if you can turn it into a task. No, really.
108+
109+
If you can't do that, you'll need to ensure that your class can get access to a
110+
`Tracer` instance (this is available to inject, or you'll need to pass it when
111+
your class is created). Then you need to call the appropriate methods on the
112+
tracer when a span should start and end. You'll also need to manage the creation
113+
of new trace contexts when child spans need to be created.
114+
115+
## What additional attributes should I set?
116+
117+
That's up to you. Be careful not to capture anything that could leak sensitive
118+
or personal information.
119+
120+
## What is "scope" and when should I used it?
121+
122+
Usually you won't need to.
123+
124+
That said, sometimes you may want more details to be captured about a particular
125+
section of code. You can think of "scope" as representing the currently active
126+
tracing context. Using scope allows the APM agent to do the following:
127+
128+
* Enables automatic correlation between the "active span" and logging, where
129+
logs have also been captured.
130+
* Enables capturing any exceptions thrown when the span is active, and linking
131+
those exceptions to the span
132+
* Allows the sampling profiler to be used as it allows samples to be linked to
133+
the active span (if any), so the agent can automatically get extra spans
134+
without manual instrumentation.
135+
136+
However, a scope must be closed in the same thread in which it was opened, which
137+
cannot be guaranteed when using tasks, making scope largely useless to
138+
Elasticsearch.
139+
140+
In the OpenTelemetry documentation, spans, scope and context are fairly
141+
straightforward to use, since `Scope` is an `AutoCloseable` and so can be
142+
easily created and cleaned up use try-with-resources blocks. Unfortunately,
143+
Elasticsearch is a complex piece of software, and also extremely asynchronous,
144+
so the typical OpenTelemetry examples do not work.
145+
146+
Nonetheless, it is possible to manually use scope where we need more detail by
147+
explicitly opening a scope via the `Tracer`.
148+
149+
150+
[otel]: https://opentelemetry.io/
151+
[thread-context]: ./server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java).
152+
[w3c]: https://www.w3.org/TR/trace-context/
153+
[tracing]: ./server/src/main/java/org/elasticsearch/tracing/
154+
[config]: ./modules/apm/src/main/config/elasticapm.properties
155+
[agent-config]: https://www.elastic.co/guide/en/apm/agent/java/master/configuration.html
156+
[agent]: https://www.elastic.co/guide/en/apm/agent/java/current/index.html

benchmarks/src/main/java/org/elasticsearch/benchmark/search/fetch/subphase/FetchSourcePhaseBenchmark.java

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@
88
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
99
import org.elasticsearch.search.fetch.subphase.FetchSourcePhase;
1010
import org.elasticsearch.search.lookup.SourceLookup;
11-
import org.elasticsearch.xcontent.DeprecationHandler;
12-
import org.elasticsearch.xcontent.NamedXContentRegistry;
1311
import org.elasticsearch.xcontent.XContentBuilder;
1412
import org.elasticsearch.xcontent.XContentParser;
1513
import org.elasticsearch.xcontent.XContentParserConfiguration;
@@ -108,8 +106,7 @@ public BytesReference filterXContentOnBuilder() throws IOException {
108106
XContentType.JSON.toParsedMediaType()
109107
);
110108
try (
111-
XContentParser parser = XContentType.JSON.xContent()
112-
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, sourceBytes.streamInput())
109+
XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, sourceBytes.streamInput())
113110
) {
114111
builder.copyCurrentStructure(parser);
115112
return BytesReference.bytes(builder);
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
distributionBase=GRADLE_USER_HOME
22
distributionPath=wrapper/dists
3-
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-all.zip
3+
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip
44
zipStoreBase=GRADLE_USER_HOME
55
zipStorePath=wrapper/dists
6-
distributionSha256Sum=97a52d145762adc241bad7fd18289bf7f6801e08ece6badf80402fe2b9f250b1
6+
distributionSha256Sum=db9c8211ed63f61f60292c69e80d89196f9eb36665e369e7f00ac4cc841c2219

build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest
3939
"""
4040

4141
when:
42-
def result = gradleRunner("setupDistro", '-g', testProjectDir.newFolder('GUH').path).build()
42+
def result = gradleRunner("setupDistro", '-g', gradleUserHome).build()
4343

4444
then:
4545
result.task(":distribution:archives:${testArchiveProjectName}:buildExpanded").outcome == TaskOutcome.SUCCESS

build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
package org.elasticsearch.gradle.internal
1010

11+
import spock.lang.TempDir
1112
import spock.lang.Unroll
1213
import com.github.tomakehurst.wiremock.WireMockServer
1314

@@ -126,7 +127,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest {
126127
when:
127128
def result = WiremockFixture.withWireMock(mockRepoUrl, mockedContent) { server ->
128129
buildFile << repositoryMockSetup(server, jdkVendor, jdkVersion)
129-
gradleRunner('getJdk', '-i', '-g', testProjectDir.newFolder().toString()).build()
130+
gradleRunner('getJdk', '-i', '-g', gradleUserHome).build()
130131
}
131132

132133
then:
@@ -179,13 +180,12 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest {
179180
def result = WiremockFixture.withWireMock(mockRepoUrl, mockedContent) { server ->
180181
buildFile << repositoryMockSetup(server, VENDOR_ADOPTIUM, ADOPT_JDK_VERSION)
181182

182-
def commonGradleUserHome = testProjectDir.newFolder().toString()
183183
// initial run
184-
def firstResult = gradleRunner('clean', 'getJdk', '-i', '--warning-mode', 'all', '-g', commonGradleUserHome).build()
184+
def firstResult = gradleRunner('clean', 'getJdk', '-i', '--warning-mode', 'all', '-g', gradleUserHome).build()
185185
// assert the output of an executed transform is shown
186186
assertOutputContains(firstResult.output, "Unpacking $expectedArchiveName using $transformType")
187187
// run against up-to-date transformations
188-
gradleRunner('clean', 'getJdk', '-i', '--warning-mode', 'all', '-g', commonGradleUserHome).build()
188+
gradleRunner('clean', 'getJdk', '-i', '--warning-mode', 'all', '-g', gradleUserHome).build()
189189
}
190190

191191
then:

build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ if (buildNumber && performanceTest == null) {
2323
fileset(dir: projectDir) {
2424
Set<File> fileSet = fileTree(projectDir) {
2525
include("**/*.hprof")
26-
include("**/reaper.log")
2726
include("**/build/test-results/**/*.xml")
2827
include("**/build/testclusters/**")
2928
exclude("**/build/testclusters/**/data/**")
@@ -49,6 +48,8 @@ if (buildNumber && performanceTest == null) {
4948
}
5049

5150
fileset(dir: "${gradle.gradleUserHomeDir}/workers", followsymlinks: false)
51+
52+
fileset(dir: "${project.projectDir}/.gradle/reaper", followsymlinks: false)
5253
}
5354
} catch (Exception e) {
5455
logger.lifecycle("Failed to archive additional logs", e)

0 commit comments

Comments
 (0)