diff --git a/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-unix.yml b/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-unix.yml
index 62dda744b4c9d..6aec8b04a2f30 100644
--- a/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-unix.yml
+++ b/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-unix.yml
@@ -14,7 +14,8 @@
name: os
values:
- "centos-7&&immutable"
- - "amazon&&immutable"
+ - "amazon-2&&immutable"
+ - "amazon-2022&&immutable"
- "debian-10&&immutable"
- "debian-11&&immutable"
- "opensuse-15-1&&immutable"
diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml
index c4050517d3918..5ffcfc25cde2f 100644
--- a/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml
+++ b/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml
@@ -22,8 +22,8 @@
export BEATS_DIR=$(pwd)/distribution/docker/build/artifacts/beats
mkdir -p ${BEATS_DIR}
- curl -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
- curl -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
+ curl --fail -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
+ curl --fail -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false \
-Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef build
diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-node-tests.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-node-tests.yml
new file mode 100644
index 0000000000000..66b12f380c701
--- /dev/null
+++ b/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-node-tests.yml
@@ -0,0 +1,16 @@
+---
+- job:
+ name: elastic+elasticsearch+%BRANCH%+periodic+single-processor-node-tests
+ display-name: "elastic / elasticsearch # %BRANCH% - single processor node tests"
+ description: "Testing with node.processors set to '1' for the Elasticsearch %BRANCH% branch.\n"
+ node: "general-purpose && docker"
+ builders:
+ - inject:
+ properties-file: '.ci/java-versions.properties'
+ properties-content: |
+ JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
+ RUNTIME_JAVA_HOME=$HOME/.java/$ES_RUNTIME_JAVA
+ JAVA11_HOME=$HOME/.java/java11
+ - shell: |
+ #!/usr/local/bin/runbld --redirect-stderr
+ $WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.configure_test_clusters_with_one_processor=true check
diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-tests-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-tests-trigger.yml
new file mode 100644
index 0000000000000..40ad9e9dd5446
--- /dev/null
+++ b/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-tests-trigger.yml
@@ -0,0 +1,6 @@
+---
+jjbb-template: periodic-trigger-lgc.yml
+vars:
+ - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+single-processor-node-tests
+ - lgc-job: elastic+elasticsearch+%BRANCH%+intake
+ - cron: "H H/12 * * *"
diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml
index 1942bc53ded11..7d52ec346b2ed 100644
--- a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml
+++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml
@@ -31,7 +31,7 @@
type: label-expression
name: os
values:
- - rocky-linux-8-packaging
+ - rhel-8-packaging
- ubuntu-20.04-packaging
- axis:
type: user-defined
diff --git a/.ci/scripts/packaging-test.sh b/.ci/scripts/packaging-test.sh
index 7ef82371f6ad7..7b0e8f3320bed 100755
--- a/.ci/scripts/packaging-test.sh
+++ b/.ci/scripts/packaging-test.sh
@@ -43,6 +43,13 @@ if [ -f "/etc/os-release" ] ; then
sudo apt-get install -y --allow-downgrades lintian=2.15.0
fi
fi
+ if [[ "$ID" == "rhel" ]] ; then
+ # Downgrade containerd if necessary to work around runc bug
+ # See: https://github.com/opencontainers/runc/issues/3551
+ if containerd -version | grep -sF 1.6.7; then
+ sudo yum downgrade -y containerd.io
+ fi
+ fi
else
cat /etc/issue || true
fi
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 1f9ace96bd3ee..7a95e4eaef1b1 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -15,3 +15,7 @@ x-pack/plugin/core/src/main/resources/monitoring-logstash-mb.json @elastic/infra
x-pack/plugin/core/src/main/resources/monitoring-logstash.json @elastic/infra-monitoring-ui
x-pack/plugin/core/src/main/resources/monitoring-mb-ilm-policy.json @elastic/infra-monitoring-ui
x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @elastic/infra-monitoring-ui
+
+# Elastic Agent
+x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet @elastic/elastic-agent-control-plane
+x-pack/plugin/core/src/main/resources/fleet-* @elastic/elastic-agent-control-plane
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 57f0992b9172d..ae934e1be5886 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -7,8 +7,8 @@ attention.
-->
- Have you signed the [contributor license agreement](https://www.elastic.co/contributor-agreement)?
-- Have you followed the [contributor guidelines](https://github.com/elastic/elasticsearch/blob/master/CONTRIBUTING.md)?
+- Have you followed the [contributor guidelines](https://github.com/elastic/elasticsearch/blob/main/CONTRIBUTING.md)?
- If submitting code, have you built your formula locally prior to submission with `gradle check`?
-- If submitting code, is your pull request against master? Unless there is a good reason otherwise, we prefer pull requests against master and will backport as needed.
+- If submitting code, is your pull request against main? Unless there is a good reason otherwise, we prefer pull requests against main and will backport as needed.
- If submitting code, have you checked that your submission is for an [OS and architecture that we support](https://www.elastic.co/support/matrix#show_os)?
-- If you are submitting this code for a class then read our [policy](https://github.com/elastic/elasticsearch/blob/master/CONTRIBUTING.md#contributing-as-part-of-a-class) for that.
+- If you are submitting this code for a class then read our [policy](https://github.com/elastic/elasticsearch/blob/main/CONTRIBUTING.md#contributing-as-part-of-a-class) for that.
diff --git a/BUILDING.md b/BUILDING.md
index 4d82791ce9413..7d3261c0327d1 100644
--- a/BUILDING.md
+++ b/BUILDING.md
@@ -63,6 +63,38 @@ E.g. [configuration-cache support](https://github.com/elastic/elasticsearch/issu
There are a few guidelines to follow that should make your life easier to make changes to the elasticsearch build.
Please add a member of the `es-delivery` team as a reviewer if you're making non-trivial changes to the build.
+#### Adding or updating a dependency
+
+We rely on [Gradle dependency verification](https://docs.gradle.org/current/userguide/dependency_verification.html) to mitigate the security risks and avoid integrating compromised dependencies.
+
+This requires to have third party dependencies and their checksums listed in `gradle/verification-metadata.xml`.
+
+For updated or newly added dependencies you need to add an entry to this verification file or update the existing one:
+```
+
+
+
+
+
+```
+
+You can also automate the generation of this entry by running your build using the `--write-verification-metadata` commandline option:
+```
+>./gradlew --write-verification-metadata sha256 precommit
+```
+
+The `--write-verification-metadata` Gradle option is generally able to resolve reachable configurations,
+but we use detached configurations for a certain set of plugins and tasks. Therefore, please ensure you run this option with a task that
+uses the changed dependencies. In most cases, `precommit` or `check` are good candidates.
+
+We prefer sha256 checksums as md5 and sha1 are not considered safe anymore these days. The generated entry
+will have the `origin` attribute been set to `Generated by Gradle`.
+
+>A manual confirmation of the Gradle generated checksums is currently not mandatory.
+>If you want to add a level of verification you can manually confirm the checksum (e.g by looking it up on the website of the library)
+>Please replace the content of the `origin` attribute by `official site` in that case.
+>
+
#### Custom Plugin and Task implementations
Build logic that is used across multiple subprojects should considered to be moved into a Gradle plugin with according Gradle task implmentation.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1a965ee4b6eb0..769855531e35c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,3 @@
-# Elasticsearch Changlog
+# Elasticsearch Changelog
Please see the [release notes](https://www.elastic.co/guide/en/elasticsearch/reference/current/es-release-notes.html) in the reference manual.
diff --git a/TRACING.md b/TRACING.md
new file mode 100644
index 0000000000000..181d18e7ff760
--- /dev/null
+++ b/TRACING.md
@@ -0,0 +1,156 @@
+# Tracing in Elasticsearch
+
+Elasticsearch is instrumented using the [OpenTelemetry][otel] API, which allows
+us to gather traces and analyze what Elasticsearch is doing.
+
+
+## How is tracing implemented?
+
+The Elasticsearch server code contains a [`tracing`][tracing] package, which is
+an abstraction over the OpenTelemetry API. All locations in the code that
+perform instrumentation and tracing must use these abstractions.
+
+Separately, there is the [`apm`](./modules/apm/) module, which works with the
+OpenTelemetry API directly to record trace data. Underneath the OTel API, we
+use Elastic's [APM agent for Java][agent], which attaches at runtime to the
+Elasticsearch JVM and removes the need for Elasticsearch to hard-code the use of
+an OTel implementation. Note that while it is possible to programmatically start
+the APM agent, the Security Manager permissions required make this essentially
+impossible.
+
+
+## How is tracing configured?
+
+You must supply configuration and credentials for the APM server (see below).
+You must also set `tracing.apm.enabled` to `true`, but this can be toggled at
+runtime.
+
+All APM settings live under `tracing.apm`. All settings related to the Java agent
+go under `tracing.apm.agent`. Anything you set under there will be propagated to
+the agent.
+
+For agent settings that can be changed dynamically, you can use the cluster
+settings REST API. For example, to change the sampling rate:
+
+ curl -XPUT \
+ -H "Content-type: application/json" \
+ -u "$USERNAME:$PASSWORD" \
+ -d '{ "persistent": { "tracing.apm.agent.transaction_sample_rate": "0.75" } }' \
+ https://localhost:9200/_cluster/settings
+
+
+### More details about configuration
+
+For context, the APM agent pulls configuration from [multiple
+sources][agent-config], with a hierarchy that means, for example, that options
+set in the config file cannot be overridden via system properties.
+
+Now, in order to send tracing data to the APM server, ES needs to be configured with
+either a `secret_key` or an `api_key`. We could configure these in the agent via
+system properties, but then their values would be available to any Java code in
+Elasticsearch that can read system properties.
+
+Instead, when Elasticsearch bootstraps itself, it compiles all APM settings
+together, including any `secret_key` or `api_key` values from the ES keystore,
+and writes out a temporary APM config file containing all static configuration
+(i.e. values that cannot change after the agent starts). This file is deleted
+as soon as possible after ES starts up. Settings that are not sensitive and can
+be changed dynamically are configured via system properties. Calls to the ES
+settings REST API are translated into system property writes, which the agent
+later picks up and applies.
+
+## Where is tracing data sent?
+
+You need to have an APM server running somewhere. For example, you can create a
+deployment in [Elastic Cloud](https://www.elastic.co/cloud/) with Elastic's APM
+integration.
+
+## What do we trace?
+
+We primarily trace "tasks". The tasks framework in Elasticsearch allows work to
+be scheduled for execution, cancelled, executed in a different thread pool, and
+so on. Tracing a task results in a "span", which represents the execution of the
+task in the tracing system. We also instrument REST requests, which are not (at
+present) modelled by tasks.
+
+A span can be associated with a parent span, which allows all spans in, for
+example, a REST request to be grouped together. Spans can track work across
+different Elasticsearch nodes.
+
+Elasticsearch also supports distributed tracing via [W3c Trace Context][w3c]
+headers. If clients of Elasticsearch send these headers with their requests,
+then that data will be forwarded to the APM server in order to yield a trace
+across systems.
+
+In rare circumstances, it is possible to avoid tracing a task using
+`TaskManager#register(String,String,TaskAwareRequest,boolean)`. For example,
+Machine Learning uses tasks to record which models are loaded on each node. Such
+tasks are long-lived and are not suitable candidates for APM tracing.
+
+## Thread contexts and nested spans
+
+When a span is started, Elasticsearch tracks information about that span in the
+current [thread context][thread-context]. If a new thread context is created,
+then the current span information must not propagated but instead renamed, so
+that (1) it doesn't interfere when new trace information is set in the context,
+and (2) the previous trace information is available to establish a parent /
+child span relationship. This is done with `ThreadContext#newTraceContext()`.
+
+Sometimes we need to detach new spans from their parent. For example, creating
+an index starts some related background tasks, but these shouldn't be associated
+with the REST request, otherwise all the background task spans will be
+associated with the REST request for as long as Elasticsearch is running.
+`ThreadContext` provides the `clearTraceContext`() method for this purpose.
+
+## How to I trace something that isn't a task?
+
+First work out if you can turn it into a task. No, really.
+
+If you can't do that, you'll need to ensure that your class can get access to a
+`Tracer` instance (this is available to inject, or you'll need to pass it when
+your class is created). Then you need to call the appropriate methods on the
+tracer when a span should start and end. You'll also need to manage the creation
+of new trace contexts when child spans need to be created.
+
+## What additional attributes should I set?
+
+That's up to you. Be careful not to capture anything that could leak sensitive
+or personal information.
+
+## What is "scope" and when should I used it?
+
+Usually you won't need to.
+
+That said, sometimes you may want more details to be captured about a particular
+section of code. You can think of "scope" as representing the currently active
+tracing context. Using scope allows the APM agent to do the following:
+
+* Enables automatic correlation between the "active span" and logging, where
+ logs have also been captured.
+* Enables capturing any exceptions thrown when the span is active, and linking
+ those exceptions to the span
+* Allows the sampling profiler to be used as it allows samples to be linked to
+ the active span (if any), so the agent can automatically get extra spans
+ without manual instrumentation.
+
+However, a scope must be closed in the same thread in which it was opened, which
+cannot be guaranteed when using tasks, making scope largely useless to
+Elasticsearch.
+
+In the OpenTelemetry documentation, spans, scope and context are fairly
+straightforward to use, since `Scope` is an `AutoCloseable` and so can be
+easily created and cleaned up use try-with-resources blocks. Unfortunately,
+Elasticsearch is a complex piece of software, and also extremely asynchronous,
+so the typical OpenTelemetry examples do not work.
+
+Nonetheless, it is possible to manually use scope where we need more detail by
+explicitly opening a scope via the `Tracer`.
+
+
+[otel]: https://opentelemetry.io/
+[thread-context]: ./server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java).
+[w3c]: https://www.w3.org/TR/trace-context/
+[tracing]: ./server/src/main/java/org/elasticsearch/tracing/
+[config]: ./modules/apm/src/main/config/elasticapm.properties
+[agent-config]: https://www.elastic.co/guide/en/apm/agent/java/master/configuration.html
+[agent]: https://www.elastic.co/guide/en/apm/agent/java/current/index.html
diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/fetch/subphase/FetchSourcePhaseBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/fetch/subphase/FetchSourcePhaseBenchmark.java
index ff93e41e9915d..a2eba3ac68332 100644
--- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/fetch/subphase/FetchSourcePhaseBenchmark.java
+++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/fetch/subphase/FetchSourcePhaseBenchmark.java
@@ -8,8 +8,6 @@
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.FetchSourcePhase;
import org.elasticsearch.search.lookup.SourceLookup;
-import org.elasticsearch.xcontent.DeprecationHandler;
-import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration;
@@ -108,8 +106,7 @@ public BytesReference filterXContentOnBuilder() throws IOException {
XContentType.JSON.toParsedMediaType()
);
try (
- XContentParser parser = XContentType.JSON.xContent()
- .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, sourceBytes.streamInput())
+ XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, sourceBytes.streamInput())
) {
builder.copyCurrentStructure(parser);
return BytesReference.bytes(builder);
diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties
index b871071c412e2..e939ec976751d 100644
--- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties
+++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=97a52d145762adc241bad7fd18289bf7f6801e08ece6badf80402fe2b9f250b1
+distributionSha256Sum=db9c8211ed63f61f60292c69e80d89196f9eb36665e369e7f00ac4cc841c2219
diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy
index 8d1a038331dca..d43dbec5ef6b6 100644
--- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy
+++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy
@@ -39,7 +39,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest
"""
when:
- def result = gradleRunner("setupDistro", '-g', testProjectDir.newFolder('GUH').path).build()
+ def result = gradleRunner("setupDistro", '-g', gradleUserHome).build()
then:
result.task(":distribution:archives:${testArchiveProjectName}:buildExpanded").outcome == TaskOutcome.SUCCESS
diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy
index 8300318fbdc16..ec546508c677d 100644
--- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy
+++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy
@@ -8,6 +8,7 @@
package org.elasticsearch.gradle.internal
+import spock.lang.TempDir
import spock.lang.Unroll
import com.github.tomakehurst.wiremock.WireMockServer
@@ -126,7 +127,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest {
when:
def result = WiremockFixture.withWireMock(mockRepoUrl, mockedContent) { server ->
buildFile << repositoryMockSetup(server, jdkVendor, jdkVersion)
- gradleRunner('getJdk', '-i', '-g', testProjectDir.newFolder().toString()).build()
+ gradleRunner('getJdk', '-i', '-g', gradleUserHome).build()
}
then:
@@ -179,13 +180,12 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest {
def result = WiremockFixture.withWireMock(mockRepoUrl, mockedContent) { server ->
buildFile << repositoryMockSetup(server, VENDOR_ADOPTIUM, ADOPT_JDK_VERSION)
- def commonGradleUserHome = testProjectDir.newFolder().toString()
// initial run
- def firstResult = gradleRunner('clean', 'getJdk', '-i', '--warning-mode', 'all', '-g', commonGradleUserHome).build()
+ def firstResult = gradleRunner('clean', 'getJdk', '-i', '--warning-mode', 'all', '-g', gradleUserHome).build()
// assert the output of an executed transform is shown
assertOutputContains(firstResult.output, "Unpacking $expectedArchiveName using $transformType")
// run against up-to-date transformations
- gradleRunner('clean', 'getJdk', '-i', '--warning-mode', 'all', '-g', commonGradleUserHome).build()
+ gradleRunner('clean', 'getJdk', '-i', '--warning-mode', 'all', '-g', gradleUserHome).build()
}
then:
diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle
index 50db02d9e21a1..f8024f65cf04f 100644
--- a/build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle
+++ b/build-tools-internal/src/main/groovy/elasticsearch.build-complete.gradle
@@ -23,7 +23,6 @@ if (buildNumber && performanceTest == null) {
fileset(dir: projectDir) {
Set fileSet = fileTree(projectDir) {
include("**/*.hprof")
- include("**/reaper.log")
include("**/build/test-results/**/*.xml")
include("**/build/testclusters/**")
exclude("**/build/testclusters/**/data/**")
@@ -49,6 +48,8 @@ if (buildNumber && performanceTest == null) {
}
fileset(dir: "${gradle.gradleUserHomeDir}/workers", followsymlinks: false)
+
+ fileset(dir: "${project.projectDir}/.gradle/reaper", followsymlinks: false)
}
} catch (Exception e) {
logger.lifecycle("Failed to archive additional logs", e)
diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle
index 706334479ec28..a020262b5b852 100644
--- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle
+++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle
@@ -6,10 +6,11 @@
* Side Public License, v 1.
*/
+
import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask
import org.elasticsearch.gradle.internal.info.BuildParams
-import org.elasticsearch.gradle.testclusters.TestDistribution
import org.elasticsearch.gradle.testclusters.TestClustersAware
+import org.elasticsearch.gradle.testclusters.TestDistribution
// Common config when running with a FIPS-140 runtime JVM
if (BuildParams.inFipsJvm) {
@@ -31,7 +32,13 @@ if (BuildParams.inFipsJvm) {
copy 'fips_java.policy'
copy 'cacerts.bcfks'
}
- def extraFipsJarsConfiguration = configurations.detachedConfiguration(bcFips, bcTlsFips)
+
+ def extraFipsJarsConfiguration = configurations.create("fipsImplementation") {
+ withDependencies {
+ add(bcFips)
+ add(bcTlsFips)
+ }
+ }
project.afterEvaluate {
// ensure that bouncycastle is on classpath for the all of test types, must happen in evaluateAfter since the rest tests explicitly
diff --git a/build-tools-internal/src/main/groovy/elasticsearch.run-ccs.gradle b/build-tools-internal/src/main/groovy/elasticsearch.run-ccs.gradle
new file mode 100644
index 0000000000000..a137758e17f7b
--- /dev/null
+++ b/build-tools-internal/src/main/groovy/elasticsearch.run-ccs.gradle
@@ -0,0 +1,60 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask
+import org.elasticsearch.gradle.testclusters.RunTask
+
+boolean proxyMode = true;
+
+def fulfillingCluster = testClusters.register('fulfilling-cluster') {
+ setting 'xpack.watcher.enabled', 'false'
+ setting 'xpack.ml.enabled', 'false'
+ setting 'xpack.license.self_generated.type', 'trial'
+
+ user username: 'elastic-admin', password: 'elastic-password', role: '_es_test_root'
+}
+
+def queryingCluster = testClusters.register('querying-cluster') {
+ setting 'xpack.watcher.enabled', 'false'
+ setting 'xpack.ml.enabled', 'false'
+ setting 'xpack.license.self_generated.type', 'trial'
+ if (proxyMode) {
+ setting 'cluster.remote.my_remote_cluster.mode', 'proxy'
+ setting 'cluster.remote.my_remote_cluster.proxy_address', {
+ "\"${fulfillingCluster.get().getAllTransportPortURI().get(0)}\""
+ }
+ } else {
+ setting 'cluster.remote.my_remote_cluster.seeds', {
+ fulfillingCluster.get().getAllTransportPortURI().collect { "\"$it\"" }.toString()
+ }
+ }
+ setting 'cluster.remote.connections_per_cluster', "1"
+
+ user username: 'elastic-admin', password: 'elastic-password', role: '_es_test_root'
+}
+
+// the following task is needed to make sure the fulfilling cluster is fully configured before starting both clusters
+// this allows the quering cluster to use configuration from the fulfilling cluster while honoring the RunTasks configuration (such as use port 9200)
+tasks.register('initfulfillingCluster', RunTask) {
+ useCluster testClusters.named("fulfilling-cluster")
+ initOnly = true //only initialize the testCluster, don't start it
+ portOffset = 1 //when only initializing, instruct to use one above the normal ports to avoid collisions when other cluster also initializes
+ //debug = true //this task doesn't honor the command line options for run-ccs, so need to statically configure debug
+}
+
+tasks.register("run-ccs", RunTask) {
+ dependsOn initfulfillingCluster
+ useCluster testClusters.named("fulfilling-cluster")
+ useCluster testClusters.named("querying-cluster")
+ doFirst {
+ println "** Querying cluster HTTP endpoints are: ${-> queryingCluster.get().allHttpSocketURI.join(",")}"
+ println "** Querying cluster transport endpoints are: ${-> queryingCluster.get().getAllTransportPortURI().join(",")}"
+ println "** Fulfilling cluster HTTP endpoints are: ${-> fulfillingCluster.get().allHttpSocketURI.join(",")}"
+ println "** Fulfilling cluster transport endpoints are: ${-> fulfillingCluster.get().getAllTransportPortURI().join(",")}"
+ }
+}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java
index cc8348d424e58..196835ccdd06d 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java
@@ -10,7 +10,9 @@
import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.internal.info.BuildParams;
+import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
import org.elasticsearch.gradle.testclusters.TestClustersPlugin;
+import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.provider.ProviderFactory;
@@ -36,6 +38,16 @@ public void apply(Project project) {
version -> (version.equals(VersionProperties.getElasticsearchVersion()) && BuildParams.isSnapshotBuild() == false)
|| BuildParams.getBwcVersions().unreleasedInfo(version) == null
);
+
+ if (shouldConfigureTestClustersWithOneProcessor()) {
+ NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project
+ .getExtensions()
+ .getByName(TestClustersPlugin.EXTENSION_NAME);
+ testClusters.configureEach(elasticsearchCluster -> elasticsearchCluster.setting("node.processors", "1"));
+ }
}
+ private boolean shouldConfigureTestClustersWithOneProcessor() {
+ return Boolean.parseBoolean(System.getProperty("tests.configure_test_clusters_with_one_processor", "false"));
+ }
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesPrecommitPlugin.java
index 1fbefef45c8e6..3bc6697930198 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesPrecommitPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesPrecommitPlugin.java
@@ -34,9 +34,6 @@ public TaskProvider extends Task> createTask(Project project) {
runtimeClasspath.fileCollection(dependency -> dependency instanceof ProjectDependency == false).minus(compileOnly)
);
});
-
- // we also create the updateShas helper task that is associated with dependencyLicenses
- project.getTasks().register("updateShas", UpdateShasTask.class, t -> t.setParentTask(dependencyLicenses));
return dependencyLicenses;
}
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java
index 59f8c2da0d718..71de2626d5fca 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java
@@ -7,7 +7,6 @@
*/
package org.elasticsearch.gradle.internal.precommit;
-import org.apache.commons.codec.binary.Hex;
import org.elasticsearch.gradle.internal.precommit.LicenseAnalyzer.LicenseInfo;
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
@@ -23,30 +22,21 @@
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputDirectory;
import org.gradle.api.tasks.InputFiles;
-import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Optional;
import org.gradle.api.tasks.OutputDirectory;
import org.gradle.api.tasks.TaskAction;
import java.io.File;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
-import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import java.util.stream.Collectors;
import javax.inject.Inject;
@@ -193,7 +183,7 @@ public void ignoreFile(String file) {
}
@TaskAction
- public void checkDependencies() throws IOException, NoSuchAlgorithmException {
+ public void checkDependencies() {
if (dependencies == null) {
throw new GradleException("No dependencies variable defined.");
}
@@ -214,12 +204,9 @@ public void checkDependencies() throws IOException, NoSuchAlgorithmException {
Map licenses = new HashMap<>();
Map notices = new HashMap<>();
Map sources = new HashMap<>();
- Set shaFiles = new HashSet<>();
for (File file : licensesDirAsFile.listFiles()) {
String name = file.getName();
- if (name.endsWith(SHA_EXTENSION)) {
- shaFiles.add(file);
- } else if (name.endsWith("-LICENSE") || name.endsWith("-LICENSE.txt")) {
+ if (name.endsWith("-LICENSE") || name.endsWith("-LICENSE.txt")) {
// TODO: why do we support suffix of LICENSE *and* LICENSE.txt??
licenses.put(name, false);
} else if (name.contains("-NOTICE") || name.contains("-NOTICE.txt")) {
@@ -233,18 +220,13 @@ public void checkDependencies() throws IOException, NoSuchAlgorithmException {
notices.keySet().removeAll(ignoreFiles);
sources.keySet().removeAll(ignoreFiles);
- checkDependencies(licenses, notices, sources, shaFiles);
+ checkDependencies(licenses, notices, sources);
licenses.forEach((item, exists) -> failIfAnyMissing(item, exists, "license"));
notices.forEach((item, exists) -> failIfAnyMissing(item, exists, "notice"));
sources.forEach((item, exists) -> failIfAnyMissing(item, exists, "sources"));
-
- if (shaFiles.isEmpty() == false) {
- throw new GradleException("Unused sha files found: \n" + joinFilenames(shaFiles));
- }
-
}
// This is just a marker output folder to allow this task being up-to-date.
@@ -261,18 +243,10 @@ private void failIfAnyMissing(String item, Boolean exists, String type) {
}
}
- private void checkDependencies(
- Map licenses,
- Map notices,
- Map sources,
- Set shaFiles
- ) throws NoSuchAlgorithmException, IOException {
+ private void checkDependencies(Map licenses, Map notices, Map sources) {
for (File dependency : dependencies) {
String jarName = dependency.getName();
String depName = regex.matcher(jarName).replaceFirst("");
-
- validateSha(shaFiles, dependency, jarName, depName);
-
String dependencyName = getDependencyName(mappings, depName);
logger.info("mapped dependency name {} to {} for license/notice check", depName, dependencyName);
checkFile(dependencyName, jarName, licenses, "LICENSE");
@@ -286,24 +260,6 @@ private void checkDependencies(
}
}
- private void validateSha(Set shaFiles, File dependency, String jarName, String depName) throws NoSuchAlgorithmException,
- IOException {
- if (ignoreShas.contains(depName)) {
- // local deps should not have sha files!
- if (getShaFile(jarName).exists()) {
- throw new GradleException("SHA file " + getShaFile(jarName) + " exists for ignored dependency " + depName);
- }
- } else {
- logger.info("Checking sha for {}", jarName);
- checkSha(dependency, jarName, shaFiles);
- }
- }
-
- private String joinFilenames(Set shaFiles) {
- List names = shaFiles.stream().map(File::getName).collect(Collectors.toList());
- return String.join("\n", names);
- }
-
public static String getDependencyName(Map mappings, String dependencyName) {
// order is the same for keys and values iteration since we use a linked hashmap
List mapped = new ArrayList<>(mappings.values());
@@ -319,30 +275,6 @@ public static String getDependencyName(Map mappings, String depe
return dependencyName;
}
- private void checkSha(File jar, String jarName, Set shaFiles) throws NoSuchAlgorithmException, IOException {
- File shaFile = getShaFile(jarName);
- if (shaFile.exists() == false) {
- throw new GradleException("Missing SHA for " + jarName + ". Run \"gradle updateSHAs\" to create them");
- }
-
- // TODO: shouldn't have to trim, sha files should not have trailing newline
- byte[] fileBytes = Files.readAllBytes(shaFile.toPath());
- String expectedSha = new String(fileBytes, StandardCharsets.UTF_8).trim();
-
- String sha = getSha1(jar);
-
- if (expectedSha.equals(sha) == false) {
- final String exceptionMessage = String.format(Locale.ROOT, """
- SHA has changed! Expected %s for %s but got %s.
- This usually indicates a corrupt dependency cache or artifacts changed upstream.
- Either wipe your cache, fix the upstream artifact, or delete %s and run updateShas
- """, expectedSha, jarName, sha, shaFile);
-
- throw new GradleException(exceptionMessage);
- }
- shaFiles.remove(shaFile);
- }
-
private void checkFile(String name, String jarName, Map counters, String type) {
String fileName = getFileName(name, counters, type);
@@ -375,27 +307,4 @@ public LinkedHashMap getMappings() {
return new LinkedHashMap<>(mappings);
}
- File getShaFile(String jarName) {
- return new File(licensesDir.get().getAsFile(), jarName + SHA_EXTENSION);
- }
-
- @Internal
- Set getShaFiles() {
- File licenseDirAsFile = licensesDir.get().getAsFile();
- File[] array = licenseDirAsFile.listFiles();
- if (array == null) {
- throw new GradleException("\"" + licenseDirAsFile.getPath() + "\" isn't a valid directory");
- }
-
- return Arrays.stream(array).filter(file -> file.getName().endsWith(SHA_EXTENSION)).collect(Collectors.toSet());
- }
-
- String getSha1(File file) throws IOException, NoSuchAlgorithmException {
- byte[] bytes = Files.readAllBytes(file.toPath());
-
- MessageDigest digest = MessageDigest.getInstance("SHA-1");
- char[] encoded = Hex.encodeHex(digest.digest(bytes));
- return String.copyValueOf(encoded);
- }
-
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/UpdateShasTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/UpdateShasTask.java
deleted file mode 100644
index e3140a9d71b6b..0000000000000
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/UpdateShasTask.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.gradle.internal.precommit;
-
-import org.gradle.api.DefaultTask;
-import org.gradle.api.logging.Logger;
-import org.gradle.api.logging.Logging;
-import org.gradle.api.tasks.Internal;
-import org.gradle.api.tasks.TaskAction;
-import org.gradle.api.tasks.TaskProvider;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.StandardOpenOption;
-import java.security.NoSuchAlgorithmException;
-import java.util.Set;
-
-/**
- * A task to update shas used by {@code DependencyLicensesCheck}
- */
-public class UpdateShasTask extends DefaultTask {
-
- private final Logger logger = Logging.getLogger(getClass());
-
- /** The parent dependency licenses task to use configuration from */
- private TaskProvider parentTask;
-
- public UpdateShasTask() {
- setDescription("Updates the sha files for the dependencyLicenses check");
- setOnlyIf(element -> parentTask.get().getLicensesDir() != null);
- }
-
- @TaskAction
- public void updateShas() throws NoSuchAlgorithmException, IOException {
- Set shaFiles = parentTask.get().getShaFiles();
-
- for (File dependency : parentTask.get().getDependencies()) {
- String jarName = dependency.getName();
- File shaFile = parentTask.get().getShaFile(jarName);
-
- if (shaFile.exists() == false) {
- createSha(dependency, jarName, shaFile);
- } else {
- shaFiles.remove(shaFile);
- }
- }
-
- for (File shaFile : shaFiles) {
- logger.lifecycle("Removing unused sha " + shaFile.getName());
- shaFile.delete();
- }
- }
-
- private void createSha(File dependency, String jarName, File shaFile) throws IOException, NoSuchAlgorithmException {
- logger.lifecycle("Adding sha for " + jarName);
-
- String sha = parentTask.get().getSha1(dependency);
-
- Files.write(shaFile.toPath(), sha.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE);
- }
-
- @Internal
- public DependencyLicensesTask getParentTask() {
- return parentTask.get();
- }
-
- public void setParentTask(TaskProvider parentTask) {
- this.parentTask = parentTask;
- }
-}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java
index fb6ddc5e1be16..c93320dc2b498 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java
@@ -12,6 +12,7 @@
import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitTaskPlugin;
import org.elasticsearch.gradle.internal.precommit.ValidateYamlAgainstSchemaTask;
+import org.gradle.api.Action;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.file.Directory;
@@ -22,6 +23,7 @@
import org.gradle.api.tasks.util.PatternSet;
import java.io.File;
+import java.util.function.Function;
import javax.inject.Inject;
@@ -67,10 +69,14 @@ public void apply(Project project) {
task.dependsOn(validateChangelogsAgainstYamlTask);
});
- project.getTasks().register("generateReleaseNotes", GenerateReleaseNotesTask.class).configure(task -> {
+ final Function> configureGenerateTask = shouldConfigureYamlFiles -> task -> {
task.setGroup("Documentation");
- task.setDescription("Generates release notes from changelog files held in this checkout");
- task.setChangelogs(yamlFiles);
+ if (shouldConfigureYamlFiles) {
+ task.setChangelogs(yamlFiles);
+ task.setDescription("Generates release notes from changelog files held in this checkout");
+ } else {
+ task.setDescription("Generates stub release notes e.g. after feature freeze");
+ }
task.setReleaseNotesIndexTemplate(projectDirectory.file(RESOURCES + "templates/release-notes-index.asciidoc"));
task.setReleaseNotesIndexFile(projectDirectory.file("docs/reference/release-notes.asciidoc"));
@@ -100,7 +106,12 @@ public void apply(Project project) {
task.setMigrationIndexFile(projectDirectory.file("docs/reference/migration/index.asciidoc"));
task.dependsOn(validateChangelogsTask);
- });
+ };
+
+ project.getTasks().register("generateReleaseNotes", GenerateReleaseNotesTask.class).configure(configureGenerateTask.apply(true));
+ project.getTasks()
+ .register("generateStubReleaseNotes", GenerateReleaseNotesTask.class)
+ .configure(configureGenerateTask.apply(false));
project.getTasks().register("pruneChangelogs", PruneChangelogsTask.class).configure(task -> {
task.setGroup("Documentation");
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java
index 14114314ad4de..acbd79fe28194 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java
@@ -8,6 +8,8 @@
package org.elasticsearch.gradle.internal.release;
+import com.google.common.annotations.VisibleForTesting;
+
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
import org.gradle.api.file.ConfigurableFileCollection;
@@ -30,6 +32,21 @@ public class ValidateChangelogEntryTask extends DefaultTask {
private final ConfigurableFileCollection changelogs;
private final ProjectLayout projectLayout;
+ public static final String TRIPLE_BACKTICK = "```";
+ private static final String CODE_BLOCK_ERROR = """
+ [%s] uses a triple-backtick in the [%s] section, but it must be
+ formatted as a Asciidoc code block. For example:
+
+ [source,yaml]
+ ----
+ {
+ "metrics.time" : 10,
+ "metrics.time.min" : 1,
+ "metrics.time.max" : 500
+ }
+ ----
+ """;
+
@Inject
public ValidateChangelogEntryTask(ObjectFactory objectFactory, ProjectLayout projectLayout) {
this.changelogs = objectFactory.fileCollection();
@@ -43,37 +60,60 @@ public void executeTask() {
.stream()
.collect(Collectors.toMap(file -> rootDir.relativize(file.toURI()).toString(), ChangelogEntry::parse));
+ changelogs.forEach(ValidateChangelogEntryTask::validate);
+ }
+
+ @VisibleForTesting
+ static void validate(String path, ChangelogEntry entry) {
// We don't try to find all such errors, because we expect them to be rare e.g. only
// when a new file is added.
- changelogs.forEach((path, entry) -> {
- final String type = entry.getType();
-
- if (type.equals("known-issue") == false && type.equals("security") == false) {
- if (entry.getPr() == null) {
- throw new GradleException(
- "[" + path + "] must provide a [pr] number (only 'known-issue' and " + "'security' entries can omit this"
- );
- }
-
- if (entry.getArea() == null) {
- throw new GradleException(
- "[" + path + "] must provide an [area] (only 'known-issue' and " + "'security' entries can omit this"
- );
- }
+ final String type = entry.getType();
+
+ if (type.equals("known-issue") == false && type.equals("security") == false) {
+ if (entry.getPr() == null) {
+ throw new GradleException(
+ "[" + path + "] must provide a [pr] number (only 'known-issue' and 'security' entries can omit this"
+ );
}
- if ((type.equals("breaking") || type.equals("breaking-java")) && entry.getBreaking() == null) {
+ if (entry.getArea() == null) {
+ throw new GradleException("[" + path + "] must provide an [area] (only 'known-issue' and 'security' entries can omit this");
+ }
+ }
+
+ if (type.equals("breaking") || type.equals("breaking-java")) {
+ if (entry.getBreaking() == null) {
throw new GradleException(
"[" + path + "] has type [" + type + "] and must supply a [breaking] section with further information"
);
}
- if (type.equals("deprecation") && entry.getDeprecation() == null) {
+ if (entry.getBreaking().getDetails().contains(TRIPLE_BACKTICK)) {
+ throw new GradleException(CODE_BLOCK_ERROR.formatted(path, "breaking.details"));
+ }
+ if (entry.getBreaking().getImpact().contains(TRIPLE_BACKTICK)) {
+ throw new GradleException(CODE_BLOCK_ERROR.formatted(path, "breaking.impact"));
+ }
+ }
+
+ if (type.equals("deprecation")) {
+ if (entry.getDeprecation() == null) {
throw new GradleException(
"[" + path + "] has type [deprecation] and must supply a [deprecation] section with further information"
);
}
- });
+
+ if (entry.getDeprecation().getDetails().contains(TRIPLE_BACKTICK)) {
+ throw new GradleException(CODE_BLOCK_ERROR.formatted(path, "deprecation.details"));
+ }
+ if (entry.getDeprecation().getImpact().contains(TRIPLE_BACKTICK)) {
+ throw new GradleException(CODE_BLOCK_ERROR.formatted(path, "deprecation.impact"));
+ }
+ }
+
+ if (entry.getHighlight() != null && entry.getHighlight().getBody().contains(TRIPLE_BACKTICK)) {
+ throw new GradleException(CODE_BLOCK_ERROR.formatted(path, "highlight.body"));
+ }
}
@InputFiles
diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion
index 72906051c5c71..7501d508f743f 100644
--- a/build-tools-internal/src/main/resources/minimumGradleVersion
+++ b/build-tools-internal/src/main/resources/minimumGradleVersion
@@ -1 +1 @@
-7.5
\ No newline at end of file
+7.5.1
\ No newline at end of file
diff --git a/build-tools-internal/src/main/resources/templates/release-highlights.asciidoc b/build-tools-internal/src/main/resources/templates/release-highlights.asciidoc
index f07ba9c5d4db3..bd8ef8602530b 100644
--- a/build-tools-internal/src/main/resources/templates/release-highlights.asciidoc
+++ b/build-tools-internal/src/main/resources/templates/release-highlights.asciidoc
@@ -32,14 +32,18 @@ if (notableHighlights.isEmpty()) { %>
<% for (highlight in notableHighlights) { %>
[discrete]
[[${ highlight.anchor }]]
-=== {es-pull}${highlight.pr}[${highlight.title}]
+=== ${highlight.title}
${highlight.body.trim()}
+
+{es-pull}${highlight.pr}[#${highlight.pr}]
<% } %>
// end::notable-highlights[]
<% } %>
<% for (highlight in nonNotableHighlights) { %>
[discrete]
[[${ highlight.anchor }]]
-=== {es-pull}${highlight.pr}[${highlight.title}]
+=== ${highlight.title}
${highlight.body.trim()}
+
+{es-pull}${highlight.pr}[#${highlight.pr}]
<% } %>
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTaskTests.java
index e6b1f5c90b72e..1a9284276043c 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTaskTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTaskTests.java
@@ -26,8 +26,6 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.StandardOpenOption;
-import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.Map;
@@ -41,8 +39,6 @@ public class DependencyLicensesTaskTests {
@Rule
public ExpectedException expectedException = ExpectedException.none();
- private UpdateShasTask updateShas;
-
private TaskProvider task;
private Project project;
@@ -53,7 +49,6 @@ public class DependencyLicensesTaskTests {
public void prepare() {
project = createProject();
task = createDependencyLicensesTask(project);
- updateShas = createUpdateShasTask(project, task);
dependency = project.getDependencies().localGroovy();
task.configure(new Action() {
@Override
@@ -87,19 +82,6 @@ public void givenProjectWithoutLicensesDirNorDependenciesThenShouldReturnSilentl
task.get().checkDependencies();
}
- @Test
- public void givenProjectWithDependencyButNoShaFileThenShouldReturnException() throws Exception {
- expectedException.expect(GradleException.class);
- expectedException.expectMessage(containsString("Missing SHA for "));
-
- File licensesDir = getLicensesDir(project);
- createFileIn(licensesDir, "groovy-all-LICENSE.txt", PERMISSIVE_LICENSE_TEXT);
- createFileIn(licensesDir, "groovy-all-NOTICE.txt", "");
-
- project.getDependencies().add("implementation", project.getDependencies().localGroovy());
- task.get().checkDependencies();
- }
-
@Test
public void givenProjectWithDependencyButNoLicenseFileThenShouldReturnException() throws Exception {
expectedException.expect(GradleException.class);
@@ -108,7 +90,6 @@ public void givenProjectWithDependencyButNoLicenseFileThenShouldReturnException(
project.getDependencies().add("implementation", project.getDependencies().localGroovy());
getLicensesDir(project).mkdir();
- updateShas.updateShas();
task.get().checkDependencies();
}
@@ -121,7 +102,6 @@ public void givenProjectWithDependencyButNoNoticeFileThenShouldReturnException()
createFileIn(getLicensesDir(project), "groovy-LICENSE.txt", PERMISSIVE_LICENSE_TEXT);
- updateShas.updateShas();
task.get().checkDependencies();
}
@@ -135,7 +115,6 @@ public void givenProjectWithStrictDependencyButNoSourcesFileThenShouldReturnExce
createFileIn(getLicensesDir(project), "groovy-LICENSE.txt", STRICT_LICENSE_TEXT);
createFileIn(getLicensesDir(project), "groovy-NOTICE.txt", "");
- updateShas.updateShas();
task.get().checkDependencies();
}
@@ -147,7 +126,6 @@ public void givenProjectWithStrictDependencyAndEverythingInOrderThenShouldReturn
createFileIn(getLicensesDir(project), "groovy-NOTICE.txt", "");
createFileIn(getLicensesDir(project), "groovy-SOURCES.txt", "");
- updateShas.updateShas();
task.get().checkDependencies();
}
@@ -190,37 +168,6 @@ public void givenProjectWithANoticeButWithoutTheDependencyThenShouldThrowExcepti
task.get().checkDependencies();
}
- @Test
- public void givenProjectWithAShaButWithoutTheDependencyThenShouldThrowException() throws Exception {
- expectedException.expect(GradleException.class);
- expectedException.expectMessage(containsString("Unused sha files found: \n"));
-
- project.getDependencies().add("implementation", dependency);
-
- File licensesDir = getLicensesDir(project);
- createAllDefaultDependencyFiles(licensesDir, "groovy");
- createFileIn(licensesDir, "non-declared.sha1", "");
-
- task.get().checkDependencies();
- }
-
- @Test
- public void givenProjectWithADependencyWithWrongShaThenShouldThrowException() throws Exception {
- expectedException.expect(GradleException.class);
- expectedException.expectMessage(containsString("SHA has changed! Expected "));
-
- project.getDependencies().add("implementation", dependency);
-
- File licensesDir = getLicensesDir(project);
- createAllDefaultDependencyFiles(licensesDir, "groovy");
-
- Path groovySha = Files.list(licensesDir.toPath()).filter(file -> file.toFile().getName().contains("sha")).findFirst().get();
-
- Files.write(groovySha, new byte[] { 1 }, StandardOpenOption.CREATE);
-
- task.get().checkDependencies();
- }
-
@Test
public void givenProjectWithADependencyMappingThenShouldReturnSilently() throws Exception {
project.getDependencies().add("implementation", dependency);
@@ -261,14 +208,6 @@ public void givenProjectWithAIgnoreShaConfigurationAndNoShaFileThenShouldReturnS
task.get().checkDependencies();
}
- @Test
- public void givenProjectWithoutLicensesDirWhenAskingForShaFilesThenShouldThrowException() {
- expectedException.expect(GradleException.class);
- expectedException.expectMessage(containsString("isn't a valid directory"));
-
- task.get().getShaFiles();
- }
-
private Project createProject() {
Project project = ProjectBuilder.builder().build();
project.getPlugins().apply(JavaPlugin.class);
@@ -276,11 +215,9 @@ private Project createProject() {
return project;
}
- private void createAllDefaultDependencyFiles(File licensesDir, String dependencyName) throws IOException, NoSuchAlgorithmException {
+ private void createAllDefaultDependencyFiles(File licensesDir, String dependencyName) throws IOException {
createFileIn(licensesDir, dependencyName + "-LICENSE.txt", PERMISSIVE_LICENSE_TEXT);
createFileIn(licensesDir, dependencyName + "-NOTICE.txt", "");
-
- updateShas.updateShas();
}
private File getLicensesDir(Project project) {
@@ -300,13 +237,6 @@ private void createFileIn(File parent, String name, String content) throws IOExc
Files.write(file, content.getBytes(StandardCharsets.UTF_8));
}
- private UpdateShasTask createUpdateShasTask(Project project, TaskProvider dependencyLicensesTask) {
- UpdateShasTask task = project.getTasks().register("updateShas", UpdateShasTask.class).get();
-
- task.setParentTask(dependencyLicensesTask);
- return task;
- }
-
private TaskProvider createDependencyLicensesTask(Project project) {
TaskProvider task = project.getTasks()
.register("dependencyLicenses", DependencyLicensesTask.class, new Action() {
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/UpdateShasTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/UpdateShasTaskTests.java
deleted file mode 100644
index 174c5d0312486..0000000000000
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/UpdateShasTaskTests.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-package org.elasticsearch.gradle.internal.precommit;
-
-import org.apache.commons.io.FileUtils;
-import org.gradle.api.GradleException;
-import org.gradle.api.Project;
-import org.gradle.api.artifacts.Dependency;
-import org.gradle.api.file.FileCollection;
-import org.gradle.api.plugins.JavaPlugin;
-import org.gradle.api.tasks.TaskProvider;
-import org.gradle.testfixtures.ProjectBuilder;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardOpenOption;
-import java.security.NoSuchAlgorithmException;
-
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-public class UpdateShasTaskTests {
-
- public static final String GROOVY_JAR_REGEX = "groovy-\\d\\.\\d+\\.\\d+\\.jar";
- @Rule
- public ExpectedException expectedException = ExpectedException.none();
-
- private UpdateShasTask task;
-
- private Project project;
-
- private Dependency dependency;
-
- @Before
- public void prepare() throws IOException {
- project = createProject();
- task = createUpdateShasTask(project);
- dependency = project.getDependencies().localGroovy();
-
- }
-
- @Test
- public void whenDependencyDoesntExistThenShouldDeleteDependencySha() throws IOException, NoSuchAlgorithmException {
- File unusedSha = createFileIn(getLicensesDir(project), "test.sha1", "");
- task.updateShas();
-
- assertFalse(unusedSha.exists());
- }
-
- @Test
- public void whenDependencyExistsButShaNotThenShouldCreateNewShaFile() throws IOException, NoSuchAlgorithmException {
- project.getDependencies().add("implementation", dependency);
-
- getLicensesDir(project).mkdir();
- task.updateShas();
- Path groovySha = Files.list(getLicensesDir(project).toPath())
- .filter(p -> p.toFile().getName().matches(GROOVY_JAR_REGEX + ".sha1"))
- .findFirst()
- .get();
- assertTrue(groovySha.toFile().getName().startsWith("groovy"));
- }
-
- @Test
- public void whenDependencyAndWrongShaExistsThenShouldNotOverwriteShaFile() throws IOException, NoSuchAlgorithmException {
- project.getDependencies().add("implementation", dependency);
- File groovyJar = task.getParentTask()
- .getDependencies()
- .getFiles()
- .stream()
- .filter(f -> f.getName().matches(GROOVY_JAR_REGEX))
- .findFirst()
- .get();
- String groovyShaName = groovyJar.getName() + ".sha1";
- File groovySha = createFileIn(getLicensesDir(project), groovyShaName, "content");
- task.updateShas();
- assertThat(FileUtils.readFileToString(groovySha), equalTo("content"));
- }
-
- @Test
- public void whenLicensesDirDoesntExistThenShouldThrowException() throws IOException, NoSuchAlgorithmException {
- expectedException.expect(GradleException.class);
- expectedException.expectMessage(containsString("isn't a valid directory"));
-
- task.updateShas();
- }
-
- private Project createProject() {
- Project project = ProjectBuilder.builder().build();
- project.getPlugins().apply(JavaPlugin.class);
-
- return project;
- }
-
- private File getLicensesDir(Project project) {
- return getFile(project, "licenses");
- }
-
- private File getFile(Project project, String fileName) {
- return project.getProjectDir().toPath().resolve(fileName).toFile();
- }
-
- private File createFileIn(File parent, String name, String content) throws IOException {
- parent.mkdir();
-
- Path path = parent.toPath().resolve(name);
- File file = path.toFile();
-
- Files.write(path, content.getBytes(), StandardOpenOption.CREATE);
-
- return file;
- }
-
- private UpdateShasTask createUpdateShasTask(Project project) {
- UpdateShasTask task = project.getTasks().register("updateShas", UpdateShasTask.class).get();
-
- task.setParentTask(createDependencyLicensesTask(project));
- return task;
- }
-
- private TaskProvider createDependencyLicensesTask(Project project) {
- return project.getTasks()
- .register(
- "dependencyLicenses",
- DependencyLicensesTask.class,
- dependencyLicensesTask -> dependencyLicensesTask.setDependencies(getDependencies(project))
- );
- }
-
- private FileCollection getDependencies(Project project) {
- return project.getConfigurations().getByName("compileClasspath");
- }
-}
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java
index 7f510bef22661..db39c6eea7e86 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.java
@@ -60,11 +60,11 @@ public void generateFile_rendersCorrectMarkup() throws Exception {
}
private List getEntries() {
- ChangelogEntry entry1 = makeChangelogEntry(1, true);
- ChangelogEntry entry2 = makeChangelogEntry(2, true);
- ChangelogEntry entry3 = makeChangelogEntry(3, false);
+ ChangelogEntry entry123 = makeChangelogEntry(123, true);
+ ChangelogEntry entry456 = makeChangelogEntry(456, true);
+ ChangelogEntry entry789 = makeChangelogEntry(789, false);
// Return unordered list, to test correct re-ordering
- return List.of(entry2, entry1, entry3);
+ return List.of(entry456, entry123, entry789);
}
private ChangelogEntry makeChangelogEntry(int pr, boolean notable) {
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTaskTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTaskTest.java
new file mode 100644
index 0000000000000..ec7b47b057a97
--- /dev/null
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTaskTest.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.release;
+
+import org.gradle.api.GradleException;
+import org.hamcrest.Matchers;
+import org.junit.jupiter.api.Test;
+
+import java.util.stream.Stream;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.endsWith;
+
+class ValidateChangelogEntryTaskTest {
+
+ @Test
+ void test_prNumber_isRequired() {
+ ChangelogEntry changelog = new ChangelogEntry();
+ changelog.setType("enhancement");
+
+ final String message = doValidate(changelog);
+
+ assertThat(message, endsWith("must provide a [pr] number (only 'known-issue' and 'security' entries can omit this"));
+ }
+
+ @Test
+ void test_prNumber_notRequired() {
+ Stream.of("known-issue", "security").forEach(type -> {
+ ChangelogEntry changelog = new ChangelogEntry();
+ changelog.setType(type);
+
+ // Should not throw an exception!
+ ValidateChangelogEntryTask.validate("", changelog);
+ });
+ }
+
+ @Test
+ void test_area_isRequired() {
+ final ChangelogEntry changelog = new ChangelogEntry();
+ changelog.setType("enhancement");
+ changelog.setPr(123);
+
+ final String message = doValidate(changelog);
+
+ assertThat(message, endsWith("must provide an [area] (only 'known-issue' and 'security' entries can omit this"));
+ }
+
+ @Test
+ void test_breaking_requiresBreakingSection() {
+ Stream.of("breaking", "breaking-java").forEach(type -> {
+ final ChangelogEntry changelog = buildChangelog(type);
+
+ final String message = doValidate(changelog);
+
+ assertThat(message, endsWith("has type [" + type + "] and must supply a [breaking] section with further information"));
+ });
+ }
+
+ @Test
+ void test_breaking_rejectsTripleBackticksInDetails() {
+ Stream.of("breaking", "breaking-java").forEach(type -> {
+ final ChangelogEntry.Breaking breaking = new ChangelogEntry.Breaking();
+ breaking.setDetails("""
+ Some waffle.
+ ```
+ I AM CODE!
+ ```
+ """);
+
+ final ChangelogEntry changelog = buildChangelog(type);
+ changelog.setBreaking(breaking);
+
+ final String message = doValidate(changelog);
+
+ assertThat(message, containsString("uses a triple-backtick in the [breaking.details] section"));
+ });
+ }
+
+ @Test
+ void test_breaking_rejectsTripleBackticksInImpact() {
+ Stream.of("breaking", "breaking-java").forEach(type -> {
+ final ChangelogEntry.Breaking breaking = new ChangelogEntry.Breaking();
+ breaking.setDetails("Waffle waffle");
+ breaking.setImpact("""
+ More waffle.
+ ```
+ THERE ARE WEASEL RAKING THROUGH MY GARBAGE!
+ ```
+ """);
+
+ final ChangelogEntry changelog = buildChangelog(type);
+ changelog.setBreaking(breaking);
+
+ final String message = doValidate(changelog);
+
+ assertThat(message, containsString("uses a triple-backtick in the [breaking.impact] section"));
+ });
+ }
+
+ @Test
+ void test_deprecation_rejectsTripleBackticksInImpact() {
+ final ChangelogEntry.Deprecation deprecation = new ChangelogEntry.Deprecation();
+ deprecation.setDetails("Waffle waffle");
+ deprecation.setImpact("""
+ More waffle.
+ ```
+ THERE ARE WEASEL RAKING THROUGH MY GARBAGE!
+ ```
+ """);
+
+ final ChangelogEntry changelog = buildChangelog("deprecation");
+ changelog.setDeprecation(deprecation);
+
+ final String message = doValidate(changelog);
+
+ assertThat(message, containsString("uses a triple-backtick in the [deprecation.impact] section"));
+ }
+
+ @Test
+ void test_deprecation_rejectsTripleBackticksInDetails() {
+ final ChangelogEntry.Deprecation deprecation = new ChangelogEntry.Deprecation();
+ deprecation.setDetails("""
+ Some waffle.
+ ```
+ I AM CODE!
+ ```
+ """);
+
+ final ChangelogEntry changelog = buildChangelog("deprecation");
+ changelog.setDeprecation(deprecation);
+
+ final String message = doValidate(changelog);
+
+ assertThat(message, containsString("uses a triple-backtick in the [deprecation.details] section"));
+ }
+
+ @Test
+ void test_highlight_rejectsTripleBackticksInBody() {
+ final ChangelogEntry.Highlight highlight = new ChangelogEntry.Highlight();
+ highlight.setBody("""
+ Some waffle.
+ ```
+ I AM CODE!
+ ```
+ """);
+
+ final ChangelogEntry changelog = buildChangelog("enhancement");
+ changelog.setHighlight(highlight);
+
+ final String message = doValidate(changelog);
+
+ assertThat(message, containsString("uses a triple-backtick in the [highlight.body] section"));
+ }
+
+ private static ChangelogEntry buildChangelog(String type) {
+ final ChangelogEntry changelog = new ChangelogEntry();
+ changelog.setType(type);
+ changelog.setPr(123);
+ changelog.setArea("Infra/Core");
+ return changelog;
+ }
+
+ private String doValidate(ChangelogEntry entry) {
+ try {
+ ValidateChangelogEntryTask.validate("docs/123.yaml", entry);
+ throw new AssertionError("No exception thrown!");
+ } catch (Exception e) {
+ assertThat(e, Matchers.instanceOf(GradleException.class));
+ return e.getMessage();
+ }
+ }
+}
diff --git a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc
index a55a590a8bca5..19c713042a42b 100644
--- a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc
+++ b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc
@@ -20,20 +20,26 @@ Other versions:
// tag::notable-highlights[]
[discrete]
-[[notable_release_highlight_number_1]]
-=== {es-pull}1[Notable release highlight number 1]
-Notable release body number 1
+[[notable_release_highlight_number_123]]
+=== Notable release highlight number 123
+Notable release body number 123
+
+{es-pull}123[#123]
[discrete]
-[[notable_release_highlight_number_2]]
-=== {es-pull}2[Notable release highlight number 2]
-Notable release body number 2
+[[notable_release_highlight_number_456]]
+=== Notable release highlight number 456
+Notable release body number 456
+
+{es-pull}456[#456]
// end::notable-highlights[]
[discrete]
-[[notable_release_highlight_number_3]]
-=== {es-pull}3[Notable release highlight number 3]
-Notable release body number 3
+[[notable_release_highlight_number_789]]
+=== Notable release highlight number 789
+Notable release body number 789
+
+{es-pull}789[#789]
diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties
index b16a7d4a7667e..5c2a4389e124a 100644
--- a/build-tools-internal/version.properties
+++ b/build-tools-internal/version.properties
@@ -1,5 +1,5 @@
elasticsearch = 8.5.0
-lucene = 9.3.0-snapshot-b8d1fcfd0ec
+lucene = 9.3.0
bundled_jdk_vendor = openjdk
bundled_jdk = 18.0.2+9@f6ad4b4450fd4d298113270ec84f30ee
@@ -30,15 +30,17 @@ bouncycastle=1.64
# used by security and idp (need to be in sync due to cross-dependency in testing)
opensaml = 4.0.1
-# test dependencies
-randomizedrunner = 2.8.0
-junit = 4.12
-junit5 = 5.7.1
+# client dependencies
httpclient = 4.5.13
httpcore = 4.4.13
httpasyncclient = 4.1.5
-commonslogging = 1.1.3
+commonslogging = 1.2
commonscodec = 1.14
+
+# test dependencies
+randomizedrunner = 2.8.0
+junit = 4.12
+junit5 = 5.7.1
hamcrest = 2.1
mocksocket = 1.2
diff --git a/build-tools/reaper/src/main/java/org/elasticsearch/gradle/reaper/Reaper.java b/build-tools/reaper/src/main/java/org/elasticsearch/gradle/reaper/Reaper.java
index f5a24eba36872..e6c5b61e0a76c 100644
--- a/build-tools/reaper/src/main/java/org/elasticsearch/gradle/reaper/Reaper.java
+++ b/build-tools/reaper/src/main/java/org/elasticsearch/gradle/reaper/Reaper.java
@@ -83,17 +83,17 @@ private void reap() {
delete(inputFile);
}
}
- } catch (Exception e) {
+ } catch (Throwable e) {
+ failed = true;
logFailure("Failed to reap inputs", e);
}
}
- private void logFailure(String message, Exception e) {
+ private void logFailure(String message, Throwable e) {
System.err.println(message);
if (e != null) {
e.printStackTrace(System.err);
}
- failed = true;
}
private void delete(Path toDelete) {
diff --git a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/DistributionDownloadPluginFuncTest.groovy b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/DistributionDownloadPluginFuncTest.groovy
index 1e461665d7139..228223897ede9 100644
--- a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/DistributionDownloadPluginFuncTest.groovy
+++ b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/DistributionDownloadPluginFuncTest.groovy
@@ -49,8 +49,7 @@ class DistributionDownloadPluginFuncTest extends AbstractGradleFuncTest {
"""
when:
- def guh = new File(testProjectDir.getRoot(), "gradle-user-home").absolutePath;
- def runner = gradleRunner('clean', 'setupDistro', '-i', '-g', guh)
+ def runner = gradleRunner('clean', 'setupDistro', '-i', '-g', gradleUserHome)
def unpackingMessage = "Unpacking elasticsearch-${version}-linux-${Architecture.current().classifier}.tar.gz " +
"using SymbolicLinkPreservingUntarTransform"
def result = withMockedDistributionDownload(version, platform, runner) {
@@ -92,8 +91,7 @@ class DistributionDownloadPluginFuncTest extends AbstractGradleFuncTest {
"""
when:
- def customGradleUserHome = testProjectDir.newFolder().absolutePath;
- def runner = gradleRunner('setupDistro', '-i', '-g', customGradleUserHome)
+ def runner = gradleRunner('setupDistro', '-i', '-g', gradleUserHome)
def result = withMockedDistributionDownload(version, platform, runner) {
build()
}
diff --git a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy
index 5287d4a932587..6b662b8165034 100644
--- a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy
+++ b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/TestClustersPluginFuncTest.groovy
@@ -103,7 +103,7 @@ class TestClustersPluginFuncTest extends AbstractGradleFuncTest {
"""
when:
- def runner = gradleRunner("myTask", '-i', '-g', 'guh')
+ def runner = gradleRunner("myTask", '-i', '-g', gradleUserHome)
def runningClosure = { GradleRunner r -> r.build() }
withMockedDistributionDownload(runner, runningClosure)
def result = inputProperty == "distributionClasspath" ?
@@ -155,12 +155,12 @@ class TestClustersPluginFuncTest extends AbstractGradleFuncTest {
"""
when:
- withMockedDistributionDownload(gradleRunner("myTask", '-g', 'guh')) {
+ withMockedDistributionDownload(gradleRunner("myTask", '-g', gradleUserHome)) {
build()
}
fileChange.delegate = this
fileChange.call(this)
- def result = withMockedDistributionDownload(gradleRunner("myTask", '-i', '-g', 'guh')) {
+ def result = withMockedDistributionDownload(gradleRunner("myTask", '-i', '-g', gradleUserHome)) {
build()
}
diff --git a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/GradleTestPolicySetupPluginFuncTest.groovy b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/GradleTestPolicySetupPluginFuncTest.groovy
index 6d72dc0a611e5..2353b7e2f7d34 100644
--- a/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/GradleTestPolicySetupPluginFuncTest.groovy
+++ b/build-tools/src/integTest/groovy/org/elasticsearch/gradle/test/GradleTestPolicySetupPluginFuncTest.groovy
@@ -50,13 +50,13 @@ class GradleTestPolicySetupPluginFuncTest extends AbstractGradleFuncTest {
"""
when:
- def result = gradleRunner('test', '-g', "guh1").build()
+ def result = gradleRunner('test', '-g', gradleUserHome).build()
then:
result.task(":test").outcome == TaskOutcome.SUCCESS
when: // changing gradle user home
- result = gradleRunner('test', '-g', "guh2").build()
+ result = gradleRunner('test', '-g', gradleUserHome).build()
then: // still up-to-date
result.task(":test").outcome == TaskOutcome.UP_TO_DATE
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/ReaperService.java b/build-tools/src/main/java/org/elasticsearch/gradle/ReaperService.java
index ece27cef7b66f..d63efbe3e55cb 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/ReaperService.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/ReaperService.java
@@ -78,7 +78,7 @@ void shutdown() {
logger.info("Waiting for reaper to exit normally");
if (reaperProcess.waitFor() != 0) {
Path inputDir = getParameters().getInputDir().get().getAsFile().toPath();
- throw new GradleException("Reaper process failed. Check log at " + inputDir.resolve("error.log") + " for details");
+ throw new GradleException("Reaper process failed. Check log at " + inputDir.resolve("reaper.log") + " for details");
}
} catch (Exception e) {
throw new RuntimeException(e);
@@ -109,7 +109,7 @@ private synchronized void ensureReaperStarted() {
builder.redirectInput(ProcessBuilder.Redirect.PIPE);
File logFile = logFilePath().toFile();
builder.redirectOutput(logFile);
- builder.redirectError(logFile);
+ builder.redirectErrorStream();
reaperProcess = builder.start();
} catch (Exception e) {
throw new RuntimeException(e);
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java
index fcc4640ae43ca..bca06c302d2a5 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java
@@ -62,6 +62,8 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.LineNumberReader;
+import java.io.PrintWriter;
+import java.io.StringWriter;
import java.io.UncheckedIOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
@@ -489,6 +491,13 @@ public void freeze() {
configurationFrozen.set(true);
}
+ private static String throwableToString(Throwable t) {
+ StringWriter sw = new StringWriter();
+ PrintWriter pw = new PrintWriter(sw);
+ t.printStackTrace(pw);
+ return sw.toString();
+ }
+
@Override
public synchronized void start() {
LOGGER.info("Starting `{}`", this);
@@ -505,11 +514,9 @@ public synchronized void start() {
// make sure we always start fresh
if (Files.exists(workingDir)) {
if (preserveDataDir) {
- Files.list(workingDir)
- .filter(path -> path.equals(confPathData) == false)
- .forEach(path -> fileSystemOperations.delete(d -> d.delete(path)));
+ Files.list(workingDir).filter(path -> path.equals(confPathData) == false).forEach(this::uncheckedDeleteWithRetry);
} else {
- fileSystemOperations.delete(d -> d.delete(workingDir));
+ deleteWithRetry(workingDir);
}
}
isWorkingDirConfigured = true;
@@ -517,7 +524,13 @@ public synchronized void start() {
setupNodeDistribution(getExtractedDistributionDir());
createWorkingDir();
} catch (IOException e) {
- throw new UncheckedIOException("Failed to create working directory for " + this, e);
+ String msg = "Failed to create working directory for " + this + ", with: " + e + throwableToString(e);
+ logToProcessStdout(msg);
+ throw new UncheckedIOException(msg, e);
+ } catch (org.gradle.api.UncheckedIOException e) {
+ String msg = "Failed to create working directory for " + this + ", with: " + e + throwableToString(e);
+ logToProcessStdout(msg);
+ throw e;
}
copyExtraJars();
@@ -1192,9 +1205,75 @@ private void waitForProcessToExit(ProcessHandle processHandle) {
}
}
+ private static final int RETRY_DELETE_MILLIS = OS.current() == OS.WINDOWS ? 500 : 0;
+ private static final int MAX_RETRY_DELETE_TIMES = OS.current() == OS.WINDOWS ? 15 : 0;
+
+ /**
+ * Deletes a path, retrying if necessary.
+ *
+ * @param path the path to delete
+ * @throws IOException
+ * if an I/O error occurs
+ */
+ void deleteWithRetry(Path path) throws IOException {
+ try {
+ deleteWithRetry0(path);
+ } catch (InterruptedException x) {
+ throw new IOException("Interrupted while deleting.", x);
+ }
+ }
+
+ /** Unchecked variant of deleteWithRetry. */
+ void uncheckedDeleteWithRetry(Path path) {
+ try {
+ deleteWithRetry0(path);
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ } catch (InterruptedException x) {
+ throw new UncheckedIOException("Interrupted while deleting.", new IOException());
+ }
+ }
+
+ // The exception handling here is loathsome, but necessary!
+ private void deleteWithRetry0(Path path) throws IOException, InterruptedException {
+ int times = 0;
+ IOException ioe = null;
+ while (true) {
+ try {
+ fileSystemOperations.delete(d -> d.delete(path));
+ times++;
+ // Checks for absence of the file. Semantics of Files.exists() is not the same.
+ while (Files.notExists(path) == false) {
+ if (times > MAX_RETRY_DELETE_TIMES) {
+ throw new IOException("File still exists after " + times + " waits.");
+ }
+ Thread.sleep(RETRY_DELETE_MILLIS);
+ // retry
+ fileSystemOperations.delete(d -> d.delete(path));
+ times++;
+ }
+ break;
+ } catch (NoSuchFileException ignore) {
+ // already deleted, ignore
+ break;
+ } catch (org.gradle.api.UncheckedIOException | IOException x) {
+ if (x.getCause() instanceof NoSuchFileException) {
+ // already deleted, ignore
+ break;
+ }
+ // Backoff/retry in case another process is accessing the file
+ times++;
+ if (ioe == null) ioe = new IOException();
+ ioe.addSuppressed(x);
+ if (times > MAX_RETRY_DELETE_TIMES) throw ioe;
+ Thread.sleep(RETRY_DELETE_MILLIS);
+ }
+ }
+ }
+
private void createWorkingDir() throws IOException {
// Start configuration from scratch in case of a restart
- fileSystemOperations.delete(d -> d.delete(configFile.getParent()));
+ deleteWithRetry(configFile.getParent());
Files.createDirectories(configFile.getParent());
Files.createDirectories(confPathRepo);
Files.createDirectories(confPathData);
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
index 37fb9305b1bc6..f77d41f6cfd39 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
@@ -35,12 +35,16 @@ public class RunTask extends DefaultTestClustersTask {
private Boolean debug = false;
+ private Boolean initOnly = false;
+
private Boolean preserveData = false;
private Path dataDir = null;
private String keystorePassword = "";
+ private Integer offset = 0;
+
@Option(option = "debug-jvm", description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch.")
public void setDebug(boolean enabled) {
this.debug = enabled;
@@ -86,10 +90,36 @@ public String getDataDir() {
return dataDir.toString();
}
+ @Input
+ @Optional
+ Boolean getInitOnly() {
+ return initOnly;
+ }
+
+ /**
+ * Only initialize, but don't actually run. This is useful for multi-cluster run tasks.
+ */
+ public void setInitOnly(Boolean initOnly) {
+ this.initOnly = initOnly;
+ }
+
+ @Input
+ @Optional
+ public Integer getPortOffset() {
+ return offset;
+ }
+
+ /**
+ * Manually increase the port offset. This is useful for multi-cluster run tasks.
+ */
+ public void setPortOffset(Integer offset) {
+ this.offset = offset;
+ }
+
@Override
public void beforeStart() {
- int httpPort = 9200;
- int transportPort = 9300;
+ int httpPort = 9200 + offset;
+ int transportPort = 9300 + offset;
Map additionalSettings = System.getProperties()
.entrySet()
.stream()
@@ -126,12 +156,15 @@ public void beforeStart() {
}
if (debug) {
- enableDebug();
+ enableDebug(getPortOffset());
}
}
@TaskAction
public void runAndWait() throws IOException {
+ if (initOnly) {
+ return;
+ }
List toRead = new ArrayList<>();
List aliveChecks = new ArrayList<>();
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
index 11ad0a29f5b8d..c28309a218b08 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
@@ -104,7 +104,7 @@ public WorkResult delete(Object... objects) {
@Override
public void beforeStart() {
if (debugServer) {
- enableDebug();
+ enableDebug(0);
}
}
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
index 18f88b0dc4afc..550dcd6df8802 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
@@ -37,8 +37,8 @@ default void useCluster(Provider cluster) {
default void beforeStart() {}
- default void enableDebug() {
- int debugPort = 5007;
+ default void enableDebug(int portOffset) {
+ int debugPort = 5007 + portOffset;
for (ElasticsearchCluster cluster : getClusters()) {
for (ElasticsearchNode node : cluster.getNodes()) {
getLogger().lifecycle("Running elasticsearch in debug mode, {} expecting running debug server on port {}", node, debugPort);
diff --git a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy
index 7719c63f37710..1724d8176b563 100644
--- a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy
+++ b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy
@@ -12,11 +12,13 @@ import org.apache.commons.io.FileUtils
import org.elasticsearch.gradle.internal.test.ConfigurationCacheCompatibleAwareGradleRunner
import org.elasticsearch.gradle.internal.test.InternalAwareGradleRunner
import org.elasticsearch.gradle.internal.test.NormalizeOutputGradleRunner
+import org.elasticsearch.gradle.internal.test.TestResultExtension
import org.gradle.testkit.runner.BuildResult
import org.gradle.testkit.runner.GradleRunner
import org.junit.Rule
import org.junit.rules.TemporaryFolder
import spock.lang.Specification
+import spock.lang.TempDir
import java.lang.management.ManagementFactory
import java.util.jar.JarEntry
@@ -29,6 +31,9 @@ abstract class AbstractGradleFuncTest extends Specification {
@Rule
TemporaryFolder testProjectDir = new TemporaryFolder()
+ @TempDir
+ File gradleUserHome
+
File settingsFile
File buildFile
File propertiesFile
@@ -47,9 +52,9 @@ abstract class AbstractGradleFuncTest extends Specification {
}
def cleanup() {
-// if (Boolean.getBoolean('test.keep.samplebuild')) {
+ if (featureFailed()) {
FileUtils.copyDirectory(testProjectDir.root, new File("build/test-debug/" + testProjectDir.root.name))
-// }
+ }
}
File subProject(String subProjectPath) {
@@ -68,24 +73,23 @@ abstract class AbstractGradleFuncTest extends Specification {
subProjectBuild
}
- GradleRunner gradleRunner(String... arguments) {
+ GradleRunner gradleRunner(Object... arguments) {
return gradleRunner(testProjectDir.root, arguments)
}
- GradleRunner gradleRunner(File projectDir, String... arguments) {
+ GradleRunner gradleRunner(File projectDir, Object... arguments) {
return new NormalizeOutputGradleRunner(
- new ConfigurationCacheCompatibleAwareGradleRunner(
- new InternalAwareGradleRunner(
- GradleRunner.create()
- .withDebug(ManagementFactory.getRuntimeMXBean().getInputArguments()
- .toString().indexOf("-agentlib:jdwp") > 0
- )
- .withProjectDir(projectDir)
- .withPluginClasspath()
- .forwardOutput()
- ), configurationCacheCompatible),
- projectDir
- ).withArguments(arguments)
+ new ConfigurationCacheCompatibleAwareGradleRunner(
+ new InternalAwareGradleRunner(
+ GradleRunner.create()
+ .withDebug(ManagementFactory.getRuntimeMXBean().getInputArguments()
+ .toString().indexOf("-agentlib:jdwp") > 0
+ )
+ .withProjectDir(projectDir)
+ .withPluginClasspath()
+ .forwardOutput()
+ ), configurationCacheCompatible),
+ ).withArguments(arguments.collect { it.toString() })
}
def assertOutputContains(String givenOutput, String expected) {
@@ -205,6 +209,13 @@ checkstyle = "com.puppycrawl.tools:checkstyle:10.3"
}
+ boolean featureFailed() {
+ specificationContext.currentSpec.listeners
+ .findAll { it instanceof TestResultExtension.ErrorListener }
+ .any {
+ (it as TestResultExtension.ErrorListener).errorInfo != null }
+ }
+
static class ProjectConfigurer {
private File projectDir
diff --git a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/NormalizeOutputGradleRunner.java b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/NormalizeOutputGradleRunner.java
index 940d8277a5dba..0c535eb7e60fb 100644
--- a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/NormalizeOutputGradleRunner.java
+++ b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/NormalizeOutputGradleRunner.java
@@ -27,9 +27,10 @@
public class NormalizeOutputGradleRunner extends GradleRunner {
- public NormalizeOutputGradleRunner(GradleRunner delegate, File projectRootDir) {
+ private GradleRunner delegate;
+
+ public NormalizeOutputGradleRunner(GradleRunner delegate) {
this.delegate = delegate;
- this.projectRootDir = projectRootDir;
}
@Override
@@ -74,7 +75,8 @@ public List getArguments() {
@Override
public GradleRunner withArguments(List arguments) {
- return delegate.withArguments(arguments);
+ delegate.withArguments(arguments);
+ return this;
}
@Override
@@ -150,9 +152,6 @@ public BuildResult buildAndFail() throws InvalidRunnerConfigurationException, Un
return new NormalizedBuildResult(delegate.buildAndFail());
}
- private GradleRunner delegate;
- private File projectRootDir;
-
private class NormalizedBuildResult implements BuildResult {
private BuildResult delegate;
private String normalizedString;
@@ -164,7 +163,7 @@ private class NormalizedBuildResult implements BuildResult {
@Override
public String getOutput() {
if (normalizedString == null) {
- normalizedString = normalizeString(delegate.getOutput(), projectRootDir);
+ normalizedString = normalizeString(delegate.getOutput(), getProjectDir());
}
return normalizedString;
}
diff --git a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/TestResultExtension.java b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/TestResultExtension.java
new file mode 100644
index 0000000000000..c08f25843c721
--- /dev/null
+++ b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/TestResultExtension.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.test;
+
+import org.spockframework.runtime.AbstractRunListener;
+import org.spockframework.runtime.extension.IGlobalExtension;
+import org.spockframework.runtime.model.ErrorInfo;
+import org.spockframework.runtime.model.IterationInfo;
+import org.spockframework.runtime.model.SpecInfo;
+
+public class TestResultExtension implements IGlobalExtension {
+
+ @Override
+ public void visitSpec(SpecInfo spec) {
+ spec.addListener(new ErrorListener());
+ }
+
+ public static class ErrorListener extends AbstractRunListener {
+ ErrorInfo errorInfo;
+
+ @Override
+ public void beforeIteration(IterationInfo iteration) {
+ errorInfo = null;
+ }
+
+ @Override
+ public void error(ErrorInfo error) {
+ errorInfo = error;
+ }
+ }
+}
diff --git a/build-tools/src/testFixtures/resources/META-INF/services/org.spockframework.runtime.extension.IGlobalExtension b/build-tools/src/testFixtures/resources/META-INF/services/org.spockframework.runtime.extension.IGlobalExtension
new file mode 100644
index 0000000000000..7d1e63d129ca1
--- /dev/null
+++ b/build-tools/src/testFixtures/resources/META-INF/services/org.spockframework.runtime.extension.IGlobalExtension
@@ -0,0 +1,9 @@
+#
+# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+# or more contributor license agreements. Licensed under the Elastic License
+# 2.0 and the Server Side Public License, v 1; you may not use this file except
+# in compliance with, at your election, the Elastic License 2.0 or the Server
+# Side Public License, v 1.
+#
+
+org.elasticsearch.gradle.internal.test.TestResultExtension
\ No newline at end of file
diff --git a/build.gradle b/build.gradle
index 4d84dcbdb7ffd..e1e11e60e110e 100644
--- a/build.gradle
+++ b/build.gradle
@@ -42,6 +42,7 @@ plugins {
id 'elasticsearch.fips'
id 'elasticsearch.internal-testclusters'
id 'elasticsearch.run'
+ id 'elasticsearch.run-ccs'
id 'elasticsearch.release-tools'
id 'elasticsearch.versions'
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
index 0f452e5b9ce1c..06881a4c960dc 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
@@ -177,6 +177,7 @@
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType;
import java.io.Closeable;
@@ -244,7 +245,7 @@ public class RestHighLevelClient implements Closeable {
// To be called using performClientRequest and performClientRequestAsync to ensure version compatibility check
private final RestClient client;
- private final NamedXContentRegistry registry;
+ private final XContentParserConfiguration parserConfig;
private final CheckedConsumer doClose;
private final boolean useAPICompatibility;
@@ -297,11 +298,19 @@ protected RestHighLevelClient(
) {
this.client = Objects.requireNonNull(restClient, "restClient must not be null");
this.doClose = Objects.requireNonNull(doClose, "doClose consumer must not be null");
- this.registry = new NamedXContentRegistry(
+ NamedXContentRegistry registry = new NamedXContentRegistry(
Stream.of(getDefaultNamedXContents().stream(), getProvidedNamedXContents().stream(), namedXContentEntries.stream())
.flatMap(Function.identity())
.collect(toList())
);
+ /*
+ * Ignores deprecation warnings. This is appropriate because it is only
+ * used to parse responses from Elasticsearch. Any deprecation warnings
+ * emitted there just mean that you are talking to an old version of
+ * Elasticsearch. There isn't anything you can do about the deprecation.
+ */
+ this.parserConfig = XContentParserConfiguration.EMPTY.withRegistry(registry)
+ .withDeprecationHandler(DeprecationHandler.IGNORE_DEPRECATIONS);
if (useAPICompatibility == null && "true".equals(System.getenv(API_VERSIONING_ENV_VARIABLE))) {
this.useAPICompatibility = true;
} else {
@@ -1165,7 +1174,7 @@ protected final Resp parseEntity(final HttpEntity entity, final CheckedFu
if (xContentType == null) {
throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue());
}
- try (XContentParser parser = xContentType.xContent().createParser(registry, DEPRECATION_HANDLER, entity.getContent())) {
+ try (XContentParser parser = xContentType.xContent().createParser(parserConfig, entity.getContent())) {
return entityParser.apply(parser);
}
}
@@ -1506,14 +1515,6 @@ private Optional getVersionValidation(Response response) throws IOExcept
return Optional.empty();
}
- /**
- * Ignores deprecation warnings. This is appropriate because it is only
- * used to parse responses from Elasticsearch. Any deprecation warnings
- * emitted there just mean that you are talking to an old version of
- * Elasticsearch. There isn't anything you can do about the deprecation.
- */
- private static final DeprecationHandler DEPRECATION_HANDLER = DeprecationHandler.IGNORE_DEPRECATIONS;
-
static List getDefaultNamedXContents() {
Map> map = new HashMap<>();
map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c));
diff --git a/client/rest/build.gradle b/client/rest/build.gradle
index e92779683dac8..97cf0f4cd65e4 100644
--- a/client/rest/build.gradle
+++ b/client/rest/build.gradle
@@ -86,7 +86,6 @@ tasks.named("thirdPartyAudit").configure {
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
- 'org.apache.log4j.Category',
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
'org.apache.log4j.Priority',
diff --git a/client/rest/licenses/commons-codec-1.14.jar.sha1 b/client/rest/licenses/commons-codec-1.14.jar.sha1
deleted file mode 100644
index 9fe75b9a90da7..0000000000000
--- a/client/rest/licenses/commons-codec-1.14.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3cb1181b2141a7e752f5bdc998b7ef1849f726cf
\ No newline at end of file
diff --git a/client/rest/licenses/commons-logging-1.1.3.jar.sha1 b/client/rest/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index 5b8f029e58293..0000000000000
--- a/client/rest/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
\ No newline at end of file
diff --git a/client/rest/licenses/httpasyncclient-4.1.5.jar.sha1 b/client/rest/licenses/httpasyncclient-4.1.5.jar.sha1
deleted file mode 100644
index 366a9e31069a6..0000000000000
--- a/client/rest/licenses/httpasyncclient-4.1.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cd18227f1eb8e9a263286c1d7362ceb24f6f9b32
\ No newline at end of file
diff --git a/client/rest/licenses/httpclient-4.5.13.jar.sha1 b/client/rest/licenses/httpclient-4.5.13.jar.sha1
deleted file mode 100644
index 3281e21595b39..0000000000000
--- a/client/rest/licenses/httpclient-4.5.13.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e5f6cae5ca7ecaac1ec2827a9e2d65ae2869cada
\ No newline at end of file
diff --git a/client/rest/licenses/httpcore-4.4.13.jar.sha1 b/client/rest/licenses/httpcore-4.4.13.jar.sha1
deleted file mode 100644
index 0cb64863b9760..0000000000000
--- a/client/rest/licenses/httpcore-4.4.13.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-853b96d3afbb7bf8cc303fe27ee96836a10c1834
\ No newline at end of file
diff --git a/client/rest/licenses/httpcore-nio-4.4.13.jar.sha1 b/client/rest/licenses/httpcore-nio-4.4.13.jar.sha1
deleted file mode 100644
index 7629b7d5584c8..0000000000000
--- a/client/rest/licenses/httpcore-nio-4.4.13.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3f897ace4d7f10f0ea6a58f524a3b105dd483653
\ No newline at end of file
diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle
index 9162c813c746b..0d4dbad62cbbf 100644
--- a/client/sniffer/build.gradle
+++ b/client/sniffer/build.gradle
@@ -78,7 +78,6 @@ tasks.named("thirdPartyAudit").configure {
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
- 'org.apache.log4j.Category',
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
'org.apache.log4j.Priority',
diff --git a/client/sniffer/licenses/commons-codec-1.14.jar.sha1 b/client/sniffer/licenses/commons-codec-1.14.jar.sha1
deleted file mode 100644
index 9fe75b9a90da7..0000000000000
--- a/client/sniffer/licenses/commons-codec-1.14.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3cb1181b2141a7e752f5bdc998b7ef1849f726cf
\ No newline at end of file
diff --git a/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1 b/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1
deleted file mode 100644
index 5b8f029e58293..0000000000000
--- a/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f
\ No newline at end of file
diff --git a/client/sniffer/licenses/httpclient-4.5.13.jar.sha1 b/client/sniffer/licenses/httpclient-4.5.13.jar.sha1
deleted file mode 100644
index 3281e21595b39..0000000000000
--- a/client/sniffer/licenses/httpclient-4.5.13.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e5f6cae5ca7ecaac1ec2827a9e2d65ae2869cada
\ No newline at end of file
diff --git a/client/sniffer/licenses/httpcore-4.4.13.jar.sha1 b/client/sniffer/licenses/httpcore-4.4.13.jar.sha1
deleted file mode 100644
index 0cb64863b9760..0000000000000
--- a/client/sniffer/licenses/httpcore-4.4.13.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-853b96d3afbb7bf8cc303fe27ee96836a10c1834
\ No newline at end of file
diff --git a/client/sniffer/licenses/jackson-core-2.13.2.jar.sha1 b/client/sniffer/licenses/jackson-core-2.13.2.jar.sha1
deleted file mode 100644
index eb8a8bc45f041..0000000000000
--- a/client/sniffer/licenses/jackson-core-2.13.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0a6a0e0620d51833feffc67bccb51937b2345763
\ No newline at end of file
diff --git a/distribution/build.gradle b/distribution/build.gradle
index 158e7c70091a7..301ca89438b97 100644
--- a/distribution/build.gradle
+++ b/distribution/build.gradle
@@ -177,7 +177,7 @@ project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each {
}
distro.copyModule(processDefaultOutputsTaskProvider, module)
- if (module.name.startsWith('transport-')) {
+ if (module.name.startsWith('transport-') || (BuildParams.snapshotBuild == false && module.name == 'apm')) {
distro.copyModule(processIntegTestOutputsTaskProvider, module)
}
diff --git a/distribution/docker/README.md b/distribution/docker/README.md
new file mode 100644
index 0000000000000..2e22fe099f4f5
--- /dev/null
+++ b/distribution/docker/README.md
@@ -0,0 +1,132 @@
+# Elasticsearch Docker Distribution
+
+The ES build can generate several types of Docker image. These are enumerated in
+the [DockerBase] enum.
+
+ * Default - this is what most people use, and is based on Ubuntu
+ * UBI - the same as the default image, but based upon [RedHat's UBI
+ images][ubi], specifically their minimal flavour.
+ * Iron Bank - this is the US Department of Defence's repository of digitally
+ signed, binary container images including both Free and Open-Source
+ software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is
+ another UBI build, this time on the regular UBI image, with extra
+ hardening. See below for more details.
+ * Cloud - this is mostly the same as the default image, with some notable differences:
+ * `filebeat` and `metricbeat` are included
+ * `wget` is included
+ * The `ENTRYPOINT` is just `/bin/tini`, and the `CMD` is
+ `/app/elasticsearc.sh`. In normal use this file would be bind-mounted
+ in, but the image ships a stub version of this file so that the image
+ can still be tested.
+ * Cloud ESS - this directly extends the Cloud image, and adds all ES plugins
+ that the ES build generates in an archive directory. It also sets an
+ environment variable that points at this directory. This allows plugins to
+ be installed from the archive instead of the internet, speeding up
+ deployment times.
+
+The long-term goal is for both Cloud images to be retired in favour of the
+default image.
+
+
+## Build strategy
+
+For all image flavours, the ES build implements a pipeline:
+
+ 1. Construct a Docker build context
+ 2. Transform the build context so that it is possible to build it locally
+ 3. Build the Docker image locally
+
+Some images use (1) as the releasable artifact, some use (3).
+
+**NOTE:** "Pipeline" isn't actually the correct term - in reality, each Gradle
+task depends on the one before it. Notably, it is the transform tasks that
+depend on a locally build `.tar.gz` Elasticsearch archive.
+
+
+## Releasing on Docker Hub
+
+Elasticsearch is an [official image on Docker
+Hub](https://hub.docker.com/_/elasticsearch). On release day, we build the ES
+Docker image and upload it to [Elastic's Docker
+registry](https://www.docker.elastic.co/). Separately, we submit a build context
+to Docker via the [elastic/dockerfiles](https://github.com/elastic/dockerfiles)
+repository. Docker then builds the image, and uploads it to Docker Hub.
+Unfortunately, this is an asynchronous process, and we don't hear back if
+there's a failure, so even when everything works, there's a lag between
+releasing a new version of Elasticsearch, and the image being available on
+Docker Hub.
+
+Being an official image puts additional constraints on how the Elasticsearch
+image is built.
+
+ * It must extend another official image
+ * It must fetch any required artifacts - they cannot be supplied in the build
+ context.
+ * It must be platform-independent i.e. it can build on ARM and x64
+
+The transform step in the [build strategy](#build-strategy) above replaces the
+`curl` command in the `Dockerfile` that fetches an Elasticsearch `.tar.gz`
+distribution with a `COPY` command, so that it is possible to build the ES image
+locally.
+
+## Iron Bank release process
+
+Elastic does not release an Iron Bank image. Rather, for each release we provide
+a Docker build context, and Iron Bank build the image themselves using a custom
+build process.
+
+The ES build still has a task to build an Iron Bank image, in order to test
+something close to what Iron Bank build. The ES build does this by transforming
+the files in the Docker build context slightly, and passing usable values for
+the build variables (we use the regular UBI image instead of the DoD one).
+
+The important takeaway here is that the releasable artifact is the Iron Bank
+build context, not the image.
+
+
+## Multi-architecture images
+
+We publish [multi-architecture images][multi-arch] images, for use on both
+`x86_64` (Intel) and `aarch64` (ARM). This works by essentially building two
+images, and combining them with a Docker manifest. The Elasticsearch Delivery
+team aren't responsible for this - rather, it happens during our unified release
+process.
+
+
+## Testing
+
+We have a suite of tests in the [qa/os](../../qa/os) subproject. Most of the
+Docker tests are in the [DockerTests] class, but there are tests that use Docker
+in other test classes.
+
+The tests are mostly concerned with ensuring that the image has been built
+correctly e.g. contents and permissions are correct. We also check that the
+custom behaviour in the
+[docker-entrypoint.sh](src/docker/bin/docker-entrypoint.sh) works as intended.
+
+
+## Reliability
+
+We go to some lengths to try and make the Docker build resilient to transient
+network errors. This is why, when browsing the
+[Dockerfile](src/docker/Dockerfile), you'll see many commands wrapped in looping
+logic, so that if e.g. package installation fails, we try again. We also perform
+explicit `docker pull` commands instead of relying on `docker run` to pull an
+image down automatically, so that we can wrap the `pull` part in a retry.
+
+
+## What are the export project for?
+
+Our integration tests are set up so that the test task depends on the project
+that creates the required artifacts. Note, it doesn't depend on a task, but a
+project! Also, we used to use Vagrant for testing (this has largely since been
+abandoned), which meant we needed to be able to build an image locally, export
+it, and load it again inside a Vagrant VM.
+
+Ideally this import / export stuff should be completely removed.
+
+
+[DockerBase]: ../../build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java
+[DockerTests]: ../../qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
+[multi-arch]: https://www.docker.com/blog/multi-arch-build-and-images-the-simple-way/
+[ubi]: https://developers.redhat.com/products/rhel/ubi
diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle
index a3be272a09b0c..ccb23b554ea84 100644
--- a/distribution/docker/build.gradle
+++ b/distribution/docker/build.gradle
@@ -286,8 +286,8 @@ void addTransformDockerContextTask(Architecture architecture, DockerBase base) {
from(tarTree("${project.buildDir}/distributions/${archiveName}.tar.gz")) {
eachFile { FileCopyDetails details ->
if (details.name.equals("Dockerfile")) {
- filter { String filename ->
- return filename.replaceAll('^RUN curl.*artifacts-no-kpi.*$', "COPY ${distributionName} /tmp/elasticsearch.tar.gz")
+ filter { String contents ->
+ return contents.replaceAll('^RUN curl.*artifacts-no-kpi.*$', "COPY ${distributionName} /tmp/elasticsearch.tar.gz")
}
}
}
@@ -487,3 +487,8 @@ subprojects { Project subProject ->
}
}
}
+
+tasks.named('resolveAllDependencies') {
+ // Don't try and resolve filebeat or metricbeat snapshots as they may not always be available
+ configs = configurations.matching { it.name.endsWith('beat') == false }
+}
diff --git a/distribution/tools/ansi-console/licenses/jansi-2.4.0.jar.sha1 b/distribution/tools/ansi-console/licenses/jansi-2.4.0.jar.sha1
deleted file mode 100644
index 37ca74b255dcf..0000000000000
--- a/distribution/tools/ansi-console/licenses/jansi-2.4.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-321c614f85f1dea6bb08c1817c60d53b7f3552fd
\ No newline at end of file
diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.jar.sha1
deleted file mode 100644
index 425b11ee6c13f..0000000000000
--- a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4fb5db5f03d00f6a94e43b78d097978190e4abb2
\ No newline at end of file
diff --git a/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.4.jar.sha1 b/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.4.jar.sha1
deleted file mode 100644
index 7aec78e9e6f07..0000000000000
--- a/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.4.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1a838a87959d9c2cee658f4a4e1869e28f6b9976
\ No newline at end of file
diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java
new file mode 100644
index 0000000000000..d9ad620ffaee5
--- /dev/null
+++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java
@@ -0,0 +1,290 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.server.cli;
+
+import org.elasticsearch.Build;
+import org.elasticsearch.Version;
+import org.elasticsearch.cli.ExitCodes;
+import org.elasticsearch.cli.UserException;
+import org.elasticsearch.common.settings.KeyStoreWrapper;
+import org.elasticsearch.common.settings.SecureString;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.core.Nullable;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+/**
+ * This class is responsible for working out if APM tracing is configured and if so, preparing
+ * a temporary config file for the APM Java agent and CLI options to the JVM to configure APM.
+ * APM doesn't need to be enabled, as that can be toggled at runtime, but some configuration e.g.
+ * server URL and secret key can only be provided when Elasticsearch starts.
+ */
+class APMJvmOptions {
+ /**
+ * Contains agent configuration that must always be applied, and cannot be overridden.
+ */
+ // tag::noformat
+ private static final Map STATIC_CONFIG = Map.of(
+ // Identifies the version of Elasticsearch in the captured trace data.
+ "service_version", Version.CURRENT.toString(),
+
+ // Configures a log file to write to. `_AGENT_HOME_` is a placeholder used
+ // by the agent. Don't disable writing to a log file, as the agent will then
+ // require extra Security Manager permissions when it tries to do something
+ // else, and it's just painful.
+ "log_file", "_AGENT_HOME_/../../logs/apm.log",
+
+ // ES does not use auto-instrumentation.
+ "instrument", "false"
+ );
+
+ /**
+ * Contains default configuration that will be used unless overridden by explicit configuration.
+ */
+ private static final Map CONFIG_DEFAULTS = Map.of(
+ // This is used to keep all the errors and transactions of a service
+ // together and is the primary filter in the Elastic APM user interface.
+ //
+ // You can optionally also set `service_node_name`, which is used to
+ // distinguish between different nodes of a service, therefore it should
+ // be unique for each JVM within a service. If not set, data
+ // aggregations will be done based on a container ID (where valid) or on
+ // the reported hostname (automatically discovered or manually
+ // configured through hostname). However, if this node's `node.name` is
+ // set, then that value is used for the `service_node_name`.
+ "service_name", "elasticsearch",
+
+ // An arbitrary string that identifies this deployment environment. For
+ // example, "dev", "staging" or "prod". Can be anything you like, but must
+ // have the same value across different systems in the same deployment
+ // environment.
+ "environment", "dev",
+
+ // Logging configuration. Unless you need detailed logs about what the APM
+ // is doing, leave this value alone.
+ "log_level", "error",
+ "application_packages", "org.elasticsearch,org.apache.lucene",
+ "metrics_interval", "120s",
+ "breakdown_metrics", "false",
+ "central_config", "false"
+ );
+ // end::noformat
+
+ /**
+ * Lists all APM configuration keys that are not dynamic and must be configured via the config file.
+ */
+ private static final List STATIC_AGENT_KEYS = List.of(
+ "api_key",
+ "aws_lambda_handler",
+ "breakdown_metrics",
+ "classes_excluded_from_instrumentation",
+ "cloud_provider",
+ "data_flush_timeout",
+ "disable_metrics",
+ "disable_send",
+ "enabled",
+ "enable_public_api_annotation_inheritance",
+ "environment",
+ "global_labels",
+ "hostname",
+ "include_process_args",
+ "log_ecs_formatter_allow_list",
+ "log_ecs_reformatting_additional_fields",
+ "log_ecs_reformatting_dir",
+ "log_file",
+ "log_file_size",
+ "log_format_file",
+ "log_format_sout",
+ "max_queue_size",
+ "metrics_interval",
+ "plugins_dir",
+ "profiling_inferred_spans_lib_directory",
+ "secret_token",
+ "service_name",
+ "service_node_name",
+ "service_version",
+ "stress_monitoring_interval",
+ "trace_methods_duration_threshold",
+ "use_jaxrs_path_as_transaction_name",
+ "verify_server_cert"
+ );
+
+ /**
+ * This method works out if APM tracing is enabled, and if so, prepares a temporary config file
+ * for the APM Java agent and CLI options to the JVM to configure APM. The config file is temporary
+ * because it will be deleted once Elasticsearch starts.
+ *
+ * @param settings the Elasticsearch settings to consider
+ * @param keystore a wrapper to access the keystore, or null if there is no keystore
+ * @param tmpdir Elasticsearch's temporary directory, where the config file will be written
+ */
+ static List apmJvmOptions(Settings settings, @Nullable KeyStoreWrapper keystore, Path tmpdir) throws UserException,
+ IOException {
+ final Path agentJar = findAgentJar();
+
+ if (agentJar == null) {
+ return List.of();
+ }
+
+ final Map propertiesMap = extractApmSettings(settings);
+
+ // No point doing anything if we don't have a destination for the trace data, and it can't be configured dynamically
+ if (propertiesMap.containsKey("server_url") == false && propertiesMap.containsKey("server_urls") == false) {
+ return List.of();
+ }
+
+ if (propertiesMap.containsKey("service_node_name") == false) {
+ final String nodeName = settings.get("node.name");
+ if (nodeName != null) {
+ propertiesMap.put("service_node_name", nodeName);
+ }
+ }
+
+ if (keystore != null) {
+ extractSecureSettings(keystore, propertiesMap);
+ }
+ final Map dynamicSettings = extractDynamicSettings(propertiesMap);
+
+ final Path tmpProperties = writeApmProperties(tmpdir, propertiesMap);
+
+ final List options = new ArrayList<>();
+ // Use an agent argument to specify the config file instead of e.g. `-Delastic.apm.config_file=...`
+ // because then the agent won't try to reload the file, and we can remove it after startup.
+ options.add("-javaagent:" + agentJar + "=c=" + tmpProperties);
+
+ dynamicSettings.forEach((key, value) -> options.add("-Delastic.apm." + key + "=" + value));
+
+ return options;
+ }
+
+ private static void extractSecureSettings(KeyStoreWrapper keystore, Map propertiesMap) {
+ final Set settingNames = keystore.getSettingNames();
+ for (String key : List.of("api_key", "secret_token")) {
+ if (settingNames.contains("tracing.apm." + key)) {
+ try (SecureString token = keystore.getString("tracing.apm." + key)) {
+ propertiesMap.put(key, token.toString());
+ }
+ }
+ }
+ }
+
+ /**
+ * Removes settings that can be changed dynamically at runtime from the supplied map, and returns
+ * those settings in a new map.
+ */
+ private static Map extractDynamicSettings(Map propertiesMap) {
+ final Map cliOptionsMap = new HashMap<>();
+
+ final Iterator> propertiesIterator = propertiesMap.entrySet().iterator();
+ while (propertiesIterator.hasNext()) {
+ final Map.Entry entry = propertiesIterator.next();
+ if (STATIC_AGENT_KEYS.contains(entry.getKey()) == false) {
+ propertiesIterator.remove();
+ cliOptionsMap.put(entry.getKey(), entry.getValue());
+ }
+ }
+
+ return cliOptionsMap;
+ }
+
+ private static Map extractApmSettings(Settings settings) throws UserException {
+ final Map propertiesMap = new HashMap<>();
+
+ final Settings agentSettings = settings.getByPrefix("tracing.apm.agent.");
+ agentSettings.keySet().forEach(key -> propertiesMap.put(key, String.valueOf(agentSettings.get(key))));
+
+ // These settings must not be changed
+ for (String key : STATIC_CONFIG.keySet()) {
+ if (propertiesMap.containsKey(key)) {
+ throw new UserException(
+ ExitCodes.CONFIG,
+ "Do not set a value for [tracing.apm.agent." + key + "], as this is configured automatically by Elasticsearch"
+ );
+ }
+ }
+
+ CONFIG_DEFAULTS.forEach(propertiesMap::putIfAbsent);
+
+ propertiesMap.putAll(STATIC_CONFIG);
+ return propertiesMap;
+ }
+
+ /**
+ * Writes a Java properties file with data from supplied map to a temporary config, and returns
+ * the file that was created.
+ *
+ * @param tmpdir the directory for the file
+ * @param propertiesMap the data to write
+ * @return the file that was created
+ * @throws IOException if writing the file fails
+ */
+ private static Path writeApmProperties(Path tmpdir, Map propertiesMap) throws IOException {
+ final Properties p = new Properties();
+ p.putAll(propertiesMap);
+
+ final Path tmpFile = Files.createTempFile(tmpdir, ".elstcapm.", ".tmp");
+ try (OutputStream os = Files.newOutputStream(tmpFile)) {
+ p.store(os, " Automatically generated by Elasticsearch, do not edit!");
+ }
+ return tmpFile;
+ }
+
+ /**
+ * The JVM argument that configure the APM agent needs to specify the agent jar path, so this method
+ * finds the jar by inspecting the filesystem.
+ * @return the agent jar file
+ * @throws IOException if a problem occurs reading the filesystem
+ */
+ @Nullable
+ private static Path findAgentJar() throws IOException, UserException {
+ final Path apmModule = Path.of(System.getProperty("user.dir")).resolve("modules/apm");
+
+ if (Files.notExists(apmModule)) {
+ if (Build.CURRENT.isProductionRelease()) {
+ throw new UserException(
+ ExitCodes.CODE_ERROR,
+ "Expected to find [apm] module in [" + apmModule + "]! Installation is corrupt"
+ );
+ }
+ return null;
+ }
+
+ try (var apmStream = Files.list(apmModule)) {
+ final List paths = apmStream.filter(
+ path -> path.getFileName().toString().matches("elastic-apm-agent-\\d+\\.\\d+\\.\\d+\\.jar")
+ ).toList();
+
+ if (paths.size() > 1) {
+ throw new UserException(
+ ExitCodes.CODE_ERROR,
+ "Found multiple [elastic-apm-agent] jars under [" + apmModule + "]! Installation is corrupt."
+ );
+ }
+
+ if (paths.isEmpty()) {
+ throw new UserException(
+ ExitCodes.CODE_ERROR,
+ "Found no [elastic-apm-agent] jar under [" + apmModule + "]! Installation is corrupt."
+ );
+ }
+
+ return paths.get(0);
+ }
+ }
+}
diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java
index 455e9dc607194..b20aad3a0b845 100644
--- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java
+++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java
@@ -8,8 +8,10 @@
package org.elasticsearch.server.cli;
+import org.elasticsearch.bootstrap.ServerArgs;
import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.UserException;
+import org.elasticsearch.common.settings.KeyStoreWrapper;
import java.io.BufferedReader;
import java.io.IOException;
@@ -68,16 +70,17 @@ SortedMap invalidLines() {
* files in the {@code jvm.options.d} directory, and the options given by the {@code ES_JAVA_OPTS} environment
* variable.
*
- * @param configDir the ES config dir
- * @param tmpDir the directory that should be passed to {@code -Djava.io.tmpdir}
- * @param envOptions the options passed through the ES_JAVA_OPTS env var
+ * @param keystore the installation's keystore
+ * @param configDir the ES config dir
+ * @param tmpDir the directory that should be passed to {@code -Djava.io.tmpdir}
+ * @param envOptions the options passed through the ES_JAVA_OPTS env var
* @return the list of options to put on the Java command line
* @throws InterruptedException if the java subprocess is interrupted
- * @throws IOException if there is a problem reading any of the files
- * @throws UserException if there is a problem parsing the `jvm.options` file or `jvm.options.d` files
+ * @throws IOException if there is a problem reading any of the files
+ * @throws UserException if there is a problem parsing the `jvm.options` file or `jvm.options.d` files
*/
- static List determineJvmOptions(Path configDir, Path tmpDir, String envOptions) throws InterruptedException, IOException,
- UserException {
+ static List determineJvmOptions(ServerArgs args, KeyStoreWrapper keystore, Path configDir, Path tmpDir, String envOptions)
+ throws InterruptedException, IOException, UserException {
final JvmOptionsParser parser = new JvmOptionsParser();
@@ -86,7 +89,7 @@ static List determineJvmOptions(Path configDir, Path tmpDir, String envO
substitutions.put("ES_PATH_CONF", configDir.toString());
try {
- return parser.jvmOptions(configDir, envOptions, substitutions);
+ return parser.jvmOptions(args, keystore, configDir, tmpDir, envOptions, substitutions);
} catch (final JvmOptionsFileParserException e) {
final String errorMessage = String.format(
Locale.ROOT,
@@ -115,8 +118,14 @@ static List determineJvmOptions(Path configDir, Path tmpDir, String envO
}
}
- private List jvmOptions(final Path config, final String esJavaOpts, final Map substitutions)
- throws InterruptedException, IOException, JvmOptionsFileParserException {
+ private List jvmOptions(
+ ServerArgs args,
+ KeyStoreWrapper keystore,
+ final Path config,
+ Path tmpDir,
+ final String esJavaOpts,
+ final Map substitutions
+ ) throws InterruptedException, IOException, JvmOptionsFileParserException, UserException {
final List jvmOptions = readJvmOptionsFiles(config);
@@ -132,12 +141,15 @@ private List jvmOptions(final Path config, final String esJavaOpts, fina
final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions);
final List systemJvmOptions = SystemJvmOptions.systemJvmOptions();
+ final List apmOptions = APMJvmOptions.apmJvmOptions(args.nodeSettings(), keystore, tmpDir);
+
final List finalJvmOptions = new ArrayList<>(
- systemJvmOptions.size() + substitutedJvmOptions.size() + ergonomicJvmOptions.size()
+ systemJvmOptions.size() + substitutedJvmOptions.size() + ergonomicJvmOptions.size() + apmOptions.size()
);
finalJvmOptions.addAll(systemJvmOptions); // add the system JVM options first so that they can be overridden
finalJvmOptions.addAll(substitutedJvmOptions);
finalJvmOptions.addAll(ergonomicJvmOptions);
+ finalJvmOptions.addAll(apmOptions);
return finalJvmOptions;
}
diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java
index c0a259d9f4699..73269b8c719fd 100644
--- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java
+++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java
@@ -27,7 +27,6 @@
import org.elasticsearch.env.Environment;
import org.elasticsearch.monitor.jvm.JvmInfo;
-import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
@@ -76,15 +75,21 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce
validateConfig(options, env);
- // setup security
- final SecureString keystorePassword = getKeystorePassword(env.configFile(), terminal);
- env = autoConfigureSecurity(terminal, options, processInfo, env, keystorePassword);
+ try (KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile())) {
+ // setup security
+ final SecureString keystorePassword = getKeystorePassword(keystore, terminal);
+ env = autoConfigureSecurity(terminal, options, processInfo, env, keystorePassword);
- // install/remove plugins from elasticsearch-plugins.yml
- syncPlugins(terminal, env, processInfo);
+ if (keystore != null) {
+ keystore.decrypt(keystorePassword.getChars());
+ }
+
+ // install/remove plugins from elasticsearch-plugins.yml
+ syncPlugins(terminal, env, processInfo);
- ServerArgs args = createArgs(options, env, keystorePassword, processInfo);
- this.server = startServer(terminal, processInfo, args);
+ ServerArgs args = createArgs(options, env, keystorePassword, processInfo);
+ this.server = startServer(terminal, processInfo, args, keystore);
+ }
if (options.has(daemonizeOption)) {
server.detach();
@@ -122,13 +127,11 @@ private void validateConfig(OptionSet options, Environment env) throws UserExcep
}
}
- private static SecureString getKeystorePassword(Path configDir, Terminal terminal) throws IOException {
- try (KeyStoreWrapper keystore = KeyStoreWrapper.load(configDir)) {
- if (keystore != null && keystore.hasPassword()) {
- return new SecureString(terminal.readSecret(KeyStoreWrapper.PROMPT));
- } else {
- return new SecureString(new char[0]);
- }
+ private static SecureString getKeystorePassword(KeyStoreWrapper keystore, Terminal terminal) {
+ if (keystore != null && keystore.hasPassword()) {
+ return new SecureString(terminal.readSecret(KeyStoreWrapper.PROMPT));
+ } else {
+ return new SecureString(new char[0]);
}
}
@@ -226,7 +229,8 @@ protected Command loadTool(String toolname, String libs) {
}
// protected to allow tests to override
- protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args) throws UserException {
- return ServerProcess.start(terminal, processInfo, args);
+ protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, KeyStoreWrapper keystore)
+ throws UserException {
+ return ServerProcess.start(terminal, processInfo, args, keystore);
}
}
diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java
index ecb5bd89a694f..674f9f12c916b 100644
--- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java
+++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java
@@ -15,6 +15,7 @@
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.cli.UserException;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
+import org.elasticsearch.common.settings.KeyStoreWrapper;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.core.SuppressForbidden;
@@ -36,7 +37,7 @@
/**
* A helper to control a {@link Process} running the main Elasticsearch server.
*
- *
The process can be started by calling {@link #start(Terminal, ProcessInfo, ServerArgs)}.
+ *
The process can be started by calling {@link #start(Terminal, ProcessInfo, ServerArgs, KeyStoreWrapper)}.
* The process is controlled by internally sending arguments and control signals on stdin,
* and receiving control signals on stderr. The start method does not return until the
* server is ready to process requests and has exited the bootstrap thread.
@@ -66,7 +67,8 @@ public class ServerProcess {
// this allows mocking the process building by tests
interface OptionsBuilder {
- List getJvmOptions(Path configDir, Path tmpDir, String envOptions) throws InterruptedException, IOException, UserException;
+ List getJvmOptions(ServerArgs args, KeyStoreWrapper keyStoreWrapper, Path configDir, Path tmpDir, String envOptions)
+ throws InterruptedException, IOException, UserException;
}
// this allows mocking the process building by tests
@@ -77,14 +79,16 @@ interface ProcessStarter {
/**
* Start a server in a new process.
*
- * @param terminal A terminal to connect the standard inputs and outputs to for the new process.
- * @param processInfo Info about the current process, for passing through to the subprocess.
- * @param args Arguments to the server process.
+ * @param terminal A terminal to connect the standard inputs and outputs to for the new process.
+ * @param processInfo Info about the current process, for passing through to the subprocess.
+ * @param args Arguments to the server process.
+ * @param keystore A keystore for accessing secrets.
* @return A running server process that is ready for requests
* @throws UserException If the process failed during bootstrap
*/
- public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args) throws UserException {
- return start(terminal, processInfo, args, JvmOptionsParser::determineJvmOptions, ProcessBuilder::start);
+ public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args, KeyStoreWrapper keystore)
+ throws UserException {
+ return start(terminal, processInfo, args, keystore, JvmOptionsParser::determineJvmOptions, ProcessBuilder::start);
}
// package private so tests can mock options building and process starting
@@ -92,6 +96,7 @@ static ServerProcess start(
Terminal terminal,
ProcessInfo processInfo,
ServerArgs args,
+ KeyStoreWrapper keystore,
OptionsBuilder optionsBuilder,
ProcessStarter processStarter
) throws UserException {
@@ -100,7 +105,7 @@ static ServerProcess start(
boolean success = false;
try {
- jvmProcess = createProcess(processInfo, args.configDir(), optionsBuilder, processStarter);
+ jvmProcess = createProcess(args, keystore, processInfo, args.configDir(), optionsBuilder, processStarter);
errorPump = new ErrorPumpThread(terminal.getErrorWriter(), jvmProcess.getErrorStream());
errorPump.start();
sendArgs(args, jvmProcess.getOutputStream());
@@ -193,6 +198,8 @@ private void sendShutdownMarker() {
}
private static Process createProcess(
+ ServerArgs args,
+ KeyStoreWrapper keystore,
ProcessInfo processInfo,
Path configDir,
OptionsBuilder optionsBuilder,
@@ -204,7 +211,7 @@ private static Process createProcess(
envVars.put("LIBFFI_TMPDIR", tempDir.toString());
}
- List jvmOptions = optionsBuilder.getJvmOptions(configDir, tempDir, envVars.remove("ES_JAVA_OPTS"));
+ List jvmOptions = optionsBuilder.getJvmOptions(args, keystore, configDir, tempDir, envVars.remove("ES_JAVA_OPTS"));
// also pass through distribution type
jvmOptions.add("-Des.distribution.type=" + processInfo.sysprops().get("es.distribution.type"));
@@ -216,6 +223,7 @@ private static Process createProcess(
command.addAll(jvmOptions);
command.add("--module-path");
command.add(esHome.resolve("lib").toString());
+ command.add("--add-modules=jdk.net"); // very special circumstance; explicit modules should typically not be added here
command.add("-m");
command.add("org.elasticsearch.server/org.elasticsearch.bootstrap.Elasticsearch");
diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java
index 2ccdfffe6cb07..7a189563801eb 100644
--- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java
+++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java
@@ -436,7 +436,7 @@ protected Command loadTool(String toolname, String libs) {
}
@Override
- protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args) {
+ protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, KeyStoreWrapper keystore) {
if (argsValidator != null) {
argsValidator.accept(args);
}
diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java
index 1834245ca7e16..f0fa37227119d 100644
--- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java
+++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java
@@ -92,7 +92,7 @@ public void resetEnv() {
envVars.clear();
esHomeDir = createTempDir();
nodeSettings = Settings.builder();
- optionsBuilder = (configDir, tmpDir, envOptions) -> new ArrayList<>();
+ optionsBuilder = (args, keystore, configDir, tmpDir, envOptions) -> new ArrayList<>();
processValidator = null;
mainCallback = null;
}
@@ -201,7 +201,7 @@ ServerProcess startProcess(boolean daemonize, boolean quiet, String keystorePass
process = new MockElasticsearchProcess();
return process;
};
- return ServerProcess.start(terminal, pinfo, args, optionsBuilder, starter);
+ return ServerProcess.start(terminal, pinfo, args, null, optionsBuilder, starter);
}
public void testProcessBuilder() throws Exception {
@@ -253,7 +253,9 @@ public void testStartError() throws Exception {
}
public void testOptionsBuildingInterrupted() throws Exception {
- optionsBuilder = (configDir, tmpDir, envOptions) -> { throw new InterruptedException("interrupted while get jvm options"); };
+ optionsBuilder = (args, keystore, configDir, tmpDir, envOptions) -> {
+ throw new InterruptedException("interrupted while get jvm options");
+ };
var e = expectThrows(RuntimeException.class, () -> runForeground());
assertThat(e.getCause().getMessage(), equalTo("interrupted while get jvm options"));
}
@@ -277,7 +279,7 @@ public void testLibffiEnv() throws Exception {
}
public void testTempDir() throws Exception {
- optionsBuilder = (configDir, tmpDir, envOptions) -> {
+ optionsBuilder = (args, keystore, configDir, tmpDir, envOptions) -> {
assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true));
assertThat(tmpDir.getFileName().toString(), startsWith("elasticsearch-"));
return new ArrayList<>();
@@ -289,7 +291,7 @@ public void testTempDirWindows() throws Exception {
Path baseTmpDir = createTempDir();
sysprops.put("os.name", "Windows 10");
sysprops.put("java.io.tmpdir", baseTmpDir.toString());
- optionsBuilder = (configDir, tmpDir, envOptions) -> {
+ optionsBuilder = (args, keystore, configDir, tmpDir, envOptions) -> {
assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true));
assertThat(tmpDir.getFileName().toString(), equalTo("elasticsearch"));
assertThat(tmpDir.getParent().toString(), equalTo(baseTmpDir.toString()));
@@ -301,7 +303,7 @@ public void testTempDirWindows() throws Exception {
public void testTempDirOverride() throws Exception {
Path customTmpDir = createTempDir();
envVars.put("ES_TMPDIR", customTmpDir.toString());
- optionsBuilder = (configDir, tmpDir, envOptions) -> {
+ optionsBuilder = (args, keystore, configDir, tmpDir, envOptions) -> {
assertThat(tmpDir.toString(), equalTo(customTmpDir.toString()));
return new ArrayList<>();
};
@@ -327,7 +329,7 @@ public void testTempDirOverrideNotADirectory() throws Exception {
public void testCustomJvmOptions() throws Exception {
envVars.put("ES_JAVA_OPTS", "-Dmyoption=foo");
- optionsBuilder = (configDir, tmpDir, envOptions) -> {
+ optionsBuilder = (args, keystore, configDir, tmpDir, envOptions) -> {
assertThat(envOptions, equalTo("-Dmyoption=foo"));
return new ArrayList<>();
};
@@ -336,7 +338,7 @@ public void testCustomJvmOptions() throws Exception {
}
public void testCommandLineSysprops() throws Exception {
- optionsBuilder = (configDir, tmpDir, envOptions) -> List.of("-Dfoo1=bar", "-Dfoo2=baz");
+ optionsBuilder = (args, keystore, configDir, tmpDir, envOptions) -> List.of("-Dfoo1=bar", "-Dfoo2=baz");
processValidator = pb -> {
assertThat(pb.command(), contains("-Dfoo1=bar"));
assertThat(pb.command(), contains("-Dfoo2=bar"));
diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/ProcrunCommand.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/ProcrunCommand.java
index c10495d3b8af6..b507e5e43a456 100644
--- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/ProcrunCommand.java
+++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/ProcrunCommand.java
@@ -67,7 +67,7 @@ protected void execute(Terminal terminal, OptionSet options, ProcessInfo process
preExecute(terminal, processInfo, serviceId);
List procrunCmd = new ArrayList<>();
- procrunCmd.add(procrun.toString());
+ procrunCmd.add(quote(procrun.toString()));
procrunCmd.add("//%s/%s".formatted(cmd, serviceId));
if (includeLogArgs()) {
procrunCmd.add(getLogArgs(serviceId, processInfo.workingDir(), processInfo.envVars()));
@@ -86,6 +86,11 @@ protected void execute(Terminal terminal, OptionSet options, ProcessInfo process
}
}
+ /** Quotes the given String. */
+ static String quote(String s) {
+ return '"' + s + '"';
+ }
+
/** Determines the service id for the Elasticsearch service that should be used */
private String getServiceId(OptionSet options, Map env) throws UserException {
List> args = options.nonOptionArguments();
diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java
index b6e7596d50c03..637bd09eb2cea 100644
--- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java
+++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java
@@ -35,7 +35,7 @@ class WindowsServiceDaemon extends EnvironmentAwareCommand {
@Override
public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception {
var args = new ServerArgs(false, true, null, new SecureString(""), env.settings(), env.configFile());
- this.server = ServerProcess.start(terminal, processInfo, args);
+ this.server = ServerProcess.start(terminal, processInfo, args, null);
// start does not return until the server is ready, and we do not wait for the process
}
diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceInstallCommand.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceInstallCommand.java
index 4e6e2cddfeb93..0d0bd040db30a 100644
--- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceInstallCommand.java
+++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceInstallCommand.java
@@ -42,7 +42,7 @@ protected String getAdditionalArgs(String serviceId, ProcessInfo pinfo) {
addArg(args, "--Classpath", pinfo.sysprops().get("java.class.path"));
addArg(args, "--JvmMs", "4m");
addArg(args, "--JvmMx", "64m");
- addArg(args, "--JvmOptions", getJvmOptions(pinfo.sysprops()));
+ addQuotedArg(args, "--JvmOptions", getJvmOptions(pinfo.sysprops()));
addArg(args, "--PidFile", "%s.pid".formatted(serviceId));
addArg(
args,
@@ -55,10 +55,10 @@ protected String getAdditionalArgs(String serviceId, ProcessInfo pinfo) {
pinfo.envVars()
.getOrDefault("SERVICE_DESCRIPTION", "Elasticsearch %s Windows Service - https://elastic.co".formatted(Version.CURRENT))
);
- addArg(args, "--Jvm", getJvmDll(getJavaHome(pinfo.sysprops())).toString());
+ addQuotedArg(args, "--Jvm", quote(getJvmDll(getJavaHome(pinfo.sysprops())).toString()));
addArg(args, "--StartMode", "jvm");
addArg(args, "--StopMode", "jvm");
- addArg(args, "--StartPath", pinfo.workingDir().toString());
+ addQuotedArg(args, "--StartPath", quote(pinfo.workingDir().toString()));
addArg(args, "++JvmOptions", "-Dcli.name=windows-service-daemon");
addArg(args, "++JvmOptions", "-Dcli.libs=lib/tools/server-cli,lib/tools/windows-service-cli");
addArg(args, "++Environment", "HOSTNAME=%s".formatted(pinfo.envVars().get("COMPUTERNAME")));
@@ -89,6 +89,13 @@ private static void addArg(List args, String arg, String value) {
args.add(value);
}
+ // Adds an arg with an already appropriately quoted value. Trivial, but explicit implementation.
+ // This method is typically used when adding args whose value contains a file-system path
+ private static void addQuotedArg(List args, String arg, String value) {
+ args.add(arg);
+ args.add(value);
+ }
+
@SuppressForbidden(reason = "get java home path to pass through")
private static Path getJavaHome(Map sysprops) {
return Paths.get(sysprops.get("java.home"));
@@ -107,7 +114,7 @@ private static String getJvmOptions(Map sysprops) {
jvmOptions.add("-XX:+UseSerialGC");
// passthrough these properties
for (var prop : List.of("es.path.home", "es.path.conf", "es.distribution.type")) {
- jvmOptions.add("-D%s=%s".formatted(prop, sysprops.get(prop)));
+ jvmOptions.add("-D%s=%s".formatted(prop, quote(sysprops.get(prop))));
}
return String.join(";", jvmOptions);
}
diff --git a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java
index b683884a37571..e4b651fcb77af 100644
--- a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java
+++ b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java
@@ -25,6 +25,10 @@
public class ProcrunCommandTests extends WindowsServiceCliTestCase {
+ public ProcrunCommandTests(boolean spaceInPath) {
+ super(spaceInPath);
+ }
+
PreExecuteHook preExecuteHook;
boolean includeLogArgs;
String additionalArgs;
diff --git a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/WindowsServiceCliTestCase.java b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/WindowsServiceCliTestCase.java
index b727774ea2d1d..808173005b96f 100644
--- a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/WindowsServiceCliTestCase.java
+++ b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/WindowsServiceCliTestCase.java
@@ -8,6 +8,8 @@
package org.elasticsearch.windows.service;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+
import org.elasticsearch.cli.CommandTestCase;
import org.junit.Before;
@@ -47,6 +49,15 @@ public abstract class WindowsServiceCliTestCase extends CommandTestCase {
int mockProcessExit = 0;
ProcessValidator mockProcessValidator = null;
+ @ParametersFactory
+ public static Iterable