From f428aa2f4cc6b285f91af66d73fc1a79bf47464d Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Tue, 31 Oct 2023 14:32:52 -0400 Subject: [PATCH 01/47] [ci] Disable intake jobs in Jenkins (#101641) --- ...icsearch+intake+multijob+bwc-snapshots.yml | 1 - ...ic+elasticsearch+intake+multijob+part1.yml | 1 - ...ic+elasticsearch+intake+multijob+part2.yml | 1 - ...ic+elasticsearch+intake+multijob+part3.yml | 1 - ...sticsearch+intake+multijob+rest-compat.yml | 1 - ...ticsearch+intake+multijob+sanity-check.yml | 1 - ...ntake+multijob+update-last-good-commit.yml | 3 +- .ci/jobs.t/elastic+elasticsearch+intake.yml | 18 ++--------- .../elastic+elasticsearch+periodic+bwc.yml | 2 +- .ci/templates.t/generic-gradle-unix.yml | 5 +-- .../matrix-gradle-unix-disabled.yml | 32 ------------------- .ci/templates.t/matrix-gradle-unix.yml | 5 +-- 12 files changed, 12 insertions(+), 59 deletions(-) delete mode 100644 .ci/templates.t/matrix-gradle-unix-disabled.yml diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml index 70509792a9d8..20e3ef376091 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml @@ -3,7 +3,6 @@ jjbb-template: matrix-gradle-unix.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc-snapshots - job-display-name: "elastic / elasticsearch # %BRANCH% - intake bwc" - - job-description: Elasticsearch %BRANCH% branch intake backwards compatibility checks. - matrix-yaml-file: ".ci/snapshotBwcVersions" - matrix-variable: BWC_VERSION - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files v$BWC_VERSION#bwcTest" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part1.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part1.yml index e8a52d795306..999dfaf94b4b 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part1.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part1.yml @@ -3,5 +3,4 @@ jjbb-template: generic-gradle-unix.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+part1 - job-display-name: "elastic / elasticsearch # %BRANCH% - intake part 1" - - job-description: Elasticsearch %BRANCH% branch intake check part 1. - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart1" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part2.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part2.yml index 15cb91eea572..7cb51800bd78 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part2.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part2.yml @@ -3,5 +3,4 @@ jjbb-template: generic-gradle-unix.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+part2 - job-display-name: "elastic / elasticsearch # %BRANCH% - intake part 2" - - job-description: Elasticsearch %BRANCH% branch intake check part 2. - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part3.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part3.yml index 685c957dc28c..0965b566aeeb 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part3.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+part3.yml @@ -3,5 +3,4 @@ jjbb-template: generic-gradle-unix.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+part3 - job-display-name: "elastic / elasticsearch # %BRANCH% - intake part 3" - - job-description: Elasticsearch %BRANCH% branch intake check part 3. - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+rest-compat.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+rest-compat.yml index eea1cfecdb12..13feea1bc99d 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+rest-compat.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+rest-compat.yml @@ -3,5 +3,4 @@ jjbb-template: generic-gradle-unix.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+rest-compat - job-display-name: "elastic / elasticsearch # %BRANCH% - intake rest compatibility" - - job-description: Elasticsearch %BRANCH% branch intake REST compatibility checks. - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkRestCompat" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+sanity-check.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+sanity-check.yml index 047eca2cf5a4..77720544e45c 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+sanity-check.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+sanity-check.yml @@ -3,5 +3,4 @@ jjbb-template: generic-gradle-unix.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+sanity-check - job-display-name: "elastic / elasticsearch # %BRANCH% - intake sanity check" - - job-description: Elasticsearch %BRANCH% branch intake sanity check. - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files precommit" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+update-last-good-commit.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+update-last-good-commit.yml index 2a1d462d9ac8..a224e9b0eee4 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+update-last-good-commit.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+update-last-good-commit.yml @@ -2,7 +2,8 @@ - job: name: elastic+elasticsearch+%BRANCH%+intake+multijob+update-last-good-commit display-name: "elastic / elasticsearch # %BRANCH% - update last good commit" - description: Elasticsearch %BRANCH% branch update last good commit in build-stats. + description: "This job has been migrated to Buildkite.\n" + disabled: true node: light properties: [] builders: diff --git a/.ci/jobs.t/elastic+elasticsearch+intake.yml b/.ci/jobs.t/elastic+elasticsearch+intake.yml index 138318897b52..70af45a7aab2 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake.yml @@ -2,24 +2,12 @@ - job: name: elastic+elasticsearch+%BRANCH%+intake display-name: "elastic / elasticsearch # %BRANCH% - intake" - description: "Testing of the Elasticsearch %BRANCH% branch on every push.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true project-type: multijob node: master vault: [] - triggers: - # We use this trigger instead of the provided "github" webhook trigger because it's more robust. - # Here we only trigger builds for pushes to the corresponding branch, rather than a push to any branch of the - # configured git repository. This avoids duplicate builds being triggered when pushes to multiple branches are - # done in quick succession. - - generic-webhook-trigger: - post-content-params: - - type: JSONPath - key: ref - value: '$.ref' - regex-filter-text: '$ref' - regex-filter-expression: "^refs/heads/%BRANCH%$" - cause: Push to GitHub (refs/heads/%BRANCH%) - silent-response: true + triggers: [] scm: - git: wipe-workspace: false diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+bwc.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+bwc.yml index 2eeb08c6cff6..9d64deadeabd 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+bwc.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+bwc.yml @@ -1,5 +1,5 @@ --- -jjbb-template: matrix-gradle-unix-disabled.yml +jjbb-template: matrix-gradle-unix.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+periodic+bwc - job-display-name: "elastic / elasticsearch # %BRANCH% - backwards compatibility matrix" diff --git a/.ci/templates.t/generic-gradle-unix.yml b/.ci/templates.t/generic-gradle-unix.yml index 6243155c5388..a5de6178f7df 100644 --- a/.ci/templates.t/generic-gradle-unix.yml +++ b/.ci/templates.t/generic-gradle-unix.yml @@ -2,11 +2,12 @@ - job: name: "{job-name}" display-name: "{job-display-name}" - description: "{job-description}" + description: "This job has been migrated to Buildkite.\n" + disabled: true workspace: /dev/shm/{job-name} builders: - inject: - properties-file: '.ci/java-versions.properties' + properties-file: ".ci/java-versions.properties" properties-content: | JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA JAVA11_HOME=$HOME/.java/java11 diff --git a/.ci/templates.t/matrix-gradle-unix-disabled.yml b/.ci/templates.t/matrix-gradle-unix-disabled.yml deleted file mode 100644 index 1eafe77a5ec7..000000000000 --- a/.ci/templates.t/matrix-gradle-unix-disabled.yml +++ /dev/null @@ -1,32 +0,0 @@ ---- -- job: - name: "{job-name}" - display-name: "{job-display-name}" - description: "This job has been migrated to Buildkite.\n" - disabled: true - project-type: matrix - child-workspace: /dev/shm/{job-name} - node: master - scm: - - git: - wipe-workspace: false - axes: - - axis: - type: slave - name: nodes - values: - - "general-purpose" - - axis: - type: yaml - filename: "{matrix-yaml-file}" - name: "{matrix-variable}" - builders: - - inject: - properties-file: ".ci/java-versions.properties" - properties-content: | - JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA - JAVA11_HOME=$HOME/.java/java11 - JAVA16_HOME=$HOME/.java/openjdk16 - - shell: | - #!/usr/local/bin/runbld --redirect-stderr - $WORKSPACE/.ci/scripts/run-gradle.sh {gradle-args} diff --git a/.ci/templates.t/matrix-gradle-unix.yml b/.ci/templates.t/matrix-gradle-unix.yml index c69eb4ce65ce..1eafe77a5ec7 100644 --- a/.ci/templates.t/matrix-gradle-unix.yml +++ b/.ci/templates.t/matrix-gradle-unix.yml @@ -2,7 +2,8 @@ - job: name: "{job-name}" display-name: "{job-display-name}" - description: "{job-description}" + description: "This job has been migrated to Buildkite.\n" + disabled: true project-type: matrix child-workspace: /dev/shm/{job-name} node: master @@ -21,7 +22,7 @@ name: "{matrix-variable}" builders: - inject: - properties-file: '.ci/java-versions.properties' + properties-file: ".ci/java-versions.properties" properties-content: | JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA JAVA11_HOME=$HOME/.java/java11 From 03b5bbab6e173d5224e202e0fc87e3a0b164c873 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 31 Oct 2023 14:49:37 -0600 Subject: [PATCH 02/47] Create file watcher for master nodes (#101625) This commit moves the logic of watchign a file only on the master node into a new shared class, MasterNodeFileWatchingService. relates #101572 --- .../service/FileSettingsService.java | 74 +----------- .../MasterNodeFileWatchingService.java | 108 ++++++++++++++++++ 2 files changed, 112 insertions(+), 70 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/reservedstate/service/MasterNodeFileWatchingService.java diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index e1f6b4e80977..2d13af0248a7 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -11,22 +11,17 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.file.AbstractFileWatchingService; import org.elasticsearch.env.Environment; import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.BufferedInputStream; import java.io.IOException; import java.nio.file.Files; -import java.nio.file.attribute.FileTime; -import java.time.Instant; import java.util.concurrent.ExecutionException; import static org.elasticsearch.xcontent.XContentType.JSON; @@ -42,16 +37,14 @@ * the service as a listener to cluster state changes, so that we can enable the file watcher thread when this * node becomes a master node. */ -public class FileSettingsService extends AbstractFileWatchingService implements ClusterStateListener { +public class FileSettingsService extends MasterNodeFileWatchingService implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(FileSettingsService.class); public static final String SETTINGS_FILE_NAME = "settings.json"; public static final String NAMESPACE = "file_settings"; public static final String OPERATOR_DIRECTORY = "operator"; - private final ClusterService clusterService; private final ReservedClusterStateService stateService; - private volatile boolean active = false; /** * Constructs the {@link FileSettingsService} @@ -61,70 +54,10 @@ public class FileSettingsService extends AbstractFileWatchingService implements * @param environment we need the environment to pull the location of the config and operator directories */ public FileSettingsService(ClusterService clusterService, ReservedClusterStateService stateService, Environment environment) { - super(environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); - this.clusterService = clusterService; + super(clusterService, environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); this.stateService = stateService; } - @Override - protected void doStart() { - // We start the file watcher when we know we are master from a cluster state change notification. - // We need the additional active flag, since cluster state can change after we've shutdown the service - // causing the watcher to start again. - this.active = Files.exists(watchedFileDir().getParent()); - if (active == false) { - // we don't have a config directory, we can't possibly launch the file settings service - return; - } - if (DiscoveryNode.isMasterNode(clusterService.getSettings())) { - clusterService.addListener(this); - } - } - - @Override - protected void doStop() { - this.active = false; - super.doStop(); - } - - @Override - public final void clusterChanged(ClusterChangedEvent event) { - ClusterState clusterState = event.state(); - if (clusterState.nodes().isLocalNodeElectedMaster()) { - synchronized (this) { - if (watching() || active == false) { - refreshExistingFileStateIfNeeded(clusterState); - return; - } - startWatcher(); - } - } else if (event.previousState().nodes().isLocalNodeElectedMaster()) { - stopWatcher(); - } - } - - /** - * 'Touches' the settings file so the file watcher will re-processes it. - *

- * The file processing is asynchronous, the cluster state or the file must be already updated such that - * the version information in the file is newer than what's already saved as processed in the - * cluster state. - * - * For snapshot restores we first must restore the snapshot and then force a refresh, since the cluster state - * metadata version must be reset to 0 and saved in the cluster state. - */ - private void refreshExistingFileStateIfNeeded(ClusterState clusterState) { - if (watching()) { - if (shouldRefreshFileState(clusterState) && Files.exists(watchedFile())) { - try { - Files.setLastModifiedTime(watchedFile(), FileTime.from(Instant.now())); - } catch (IOException e) { - logger.warn("encountered I/O error trying to update file settings timestamp", e); - } - } - } - } - /** * Used by snapshot restore service {@link org.elasticsearch.snapshots.RestoreService} to prepare the reserved * state of the snapshot for the current cluster. @@ -162,7 +95,8 @@ public void handleSnapshotRestore(ClusterState clusterState, Metadata.Builder md * @param clusterState State of the cluster * @return true if file settings metadata version is exactly 0, false otherwise. */ - private boolean shouldRefreshFileState(ClusterState clusterState) { + @Override + protected boolean shouldRefreshFileState(ClusterState clusterState) { // We check if the version was reset to 0, and force an update if a file exists. This can happen in situations // like snapshot restores. ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(NAMESPACE); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/MasterNodeFileWatchingService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/MasterNodeFileWatchingService.java new file mode 100644 index 000000000000..444cde45d696 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/MasterNodeFileWatchingService.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.reservedstate.service; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.file.AbstractFileWatchingService; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.FileTime; +import java.time.Instant; + +public abstract class MasterNodeFileWatchingService extends AbstractFileWatchingService implements ClusterStateListener { + + private static final Logger logger = LogManager.getLogger(MasterNodeFileWatchingService.class); + + private final ClusterService clusterService; + private volatile boolean active = false; + + protected MasterNodeFileWatchingService(ClusterService clusterService, Path watchedFile) { + super(watchedFile); + this.clusterService = clusterService; + } + + @Override + protected void doStart() { + // We start the file watcher when we know we are master from a cluster state change notification. + // We need the additional active flag, since cluster state can change after we've shutdown the service + // causing the watcher to start again. + this.active = Files.exists(watchedFileDir().getParent()); + if (active == false) { + // we don't have a config directory, we can't possibly launch the file settings service + return; + } + if (DiscoveryNode.isMasterNode(clusterService.getSettings())) { + clusterService.addListener(this); + } + } + + @Override + protected void doStop() { + this.active = false; + super.doStop(); + } + + @Override + public final void clusterChanged(ClusterChangedEvent event) { + ClusterState clusterState = event.state(); + if (clusterState.nodes().isLocalNodeElectedMaster()) { + synchronized (this) { + if (watching() || active == false) { + refreshExistingFileStateIfNeeded(clusterState); + return; + } + startWatcher(); + } + } else if (event.previousState().nodes().isLocalNodeElectedMaster()) { + stopWatcher(); + } + } + + /** + * 'Touches' the settings file so the file watcher will re-processes it. + *

+ * The file processing is asynchronous, the cluster state or the file must be already updated such that + * the version information in the file is newer than what's already saved as processed in the + * cluster state. + * + * For snapshot restores we first must restore the snapshot and then force a refresh, since the cluster state + * metadata version must be reset to 0 and saved in the cluster state. + */ + private void refreshExistingFileStateIfNeeded(ClusterState clusterState) { + if (watching()) { + if (shouldRefreshFileState(clusterState) && Files.exists(watchedFile())) { + try { + Files.setLastModifiedTime(watchedFile(), FileTime.from(Instant.now())); + } catch (IOException e) { + logger.warn("encountered I/O error trying to update file settings timestamp", e); + } + } + } + } + + /** + * There may be an indication in cluster state that the file we are watching + * should be re-processed: for example, after cluster state has been restored + * from a snapshot. By default, we do nothing, but this method should be overridden + * if different behavior is desired. + * @param clusterState State of the cluster + * @return false, by default + */ + protected boolean shouldRefreshFileState(ClusterState clusterState) { + return false; + } +} From d0064250ad9592be77dec14091394c376d0ac605 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 31 Oct 2023 16:18:32 -0600 Subject: [PATCH 03/47] Forbid non-named threads (#101631) This commit forbids the use of Thread constructors that do not take a name. In general nameless threads are more difficult to understand their purpose when debugging. Note that this is only added to production signatures. Tests are not forbidden here so as not to be pedantic (or require a larger change since many tests create anonymous threads). relates #101628 --- .../src/main/resources/forbidden/es-server-signatures.txt | 4 ++++ .../main/java/org/elasticsearch/launcher/CliToolLauncher.java | 2 +- server/src/main/java/org/elasticsearch/ExceptionsHelper.java | 2 +- .../main/java/org/elasticsearch/bootstrap/Elasticsearch.java | 4 ++-- .../elasticsearch/qa/die_with_dignity/DieWithDignityIT.java | 2 +- 5 files changed, 9 insertions(+), 5 deletions(-) diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index d19500c3c332..34f39bbc4ca5 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -160,3 +160,7 @@ org.elasticsearch.cluster.ClusterState#compatibilityVersions() org.elasticsearch.cluster.ClusterFeatures#nodeFeatures() @defaultMessage ClusterFeatures#clusterHasFeature is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster. org.elasticsearch.cluster.ClusterFeatures#clusterHasFeature(org.elasticsearch.features.NodeFeature) + +@defaultMessage Use a Thread constructor with a name, anonymous threads are more difficult to debug +java.lang.Thread#(java.lang.Runnable) +java.lang.Thread#(java.lang.ThreadGroup, java.lang.Runnable) diff --git a/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java b/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java index 700c5bc2b321..4fd2512f2cbb 100644 --- a/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java +++ b/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java @@ -94,7 +94,7 @@ static Thread createShutdownHook(Terminal terminal, Closeable closeable) { e.printStackTrace(terminal.getErrorWriter()); } terminal.flush(); // make sure to flush whatever the close or error might have written - }); + }, "elasticsearch-cli-shutdown"); } diff --git a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java index 6c0836c27744..d625da5df9cc 100644 --- a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java +++ b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java @@ -257,7 +257,7 @@ public static void maybeDieOnAnotherThread(final Throwable throwable) { final String formatted = ExceptionsHelper.formatStackTrace(Thread.currentThread().getStackTrace()); logger.error("fatal error {}: {}\n{}", error.getClass().getCanonicalName(), error.getMessage(), formatted); } finally { - new Thread(() -> { throw error; }).start(); + new Thread(() -> { throw error; }, "elasticsearch-error-rethrower").start(); } }); } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 628d2bce2815..eb2c2b7f6738 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -174,7 +174,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { // initialize probes before the security manager is installed initializeProbes(); - Runtime.getRuntime().addShutdownHook(new Thread(Elasticsearch::shutdown)); + Runtime.getRuntime().addShutdownHook(new Thread(Elasticsearch::shutdown, "elasticsearch-shutdown")); // look for jar hell final Logger logger = LogManager.getLogger(JarHell.class); @@ -376,7 +376,7 @@ private static void startCliMonitorThread(InputStream stdin) { Bootstrap.exit(1); } } - }).start(); + }, "elasticsearch-cli-monitor-thread").start(); } /** diff --git a/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java b/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java index a39d8a521474..291e4124e17e 100644 --- a/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java +++ b/test/external-modules/die-with-dignity/src/javaRestTest/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java @@ -68,7 +68,7 @@ public void testDieWithDignity() throws Exception { line, ".*ERROR.*", ".*ElasticsearchUncaughtExceptionHandler.*", - ".*fatal error in thread \\[Thread-\\d+\\], exiting.*", + ".*fatal error in thread \\[elasticsearch-error-rethrower\\], exiting.*", ".*java.lang.OutOfMemoryError: Requested array size exceeds VM limit.*" )) { fatalErrorInThreadExiting = true; From fe0b1748d2e7021d1ca1349d96aaea7232acbfcd Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 31 Oct 2023 20:23:42 -0400 Subject: [PATCH 04/47] Drop very old version checks from searchable snapshots (#101605) --- .../MountSearchableSnapshotRequest.java | 21 +-------- .../SearchableSnapshotShardStats.java | 45 ++++--------------- ...ransportMountSearchableSnapshotAction.java | 23 ++-------- .../action/cache/FrozenCacheInfoAction.java | 21 ++++----- .../cache/blob/BlobStoreCacheService.java | 18 -------- ...archableSnapshotIndexMetadataUpgrader.java | 3 +- .../cache/common/TestUtils.java | 10 ----- 7 files changed, 22 insertions(+), 119 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java index 276eedc6408e..76b0634b55b3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.core.searchablesnapshots; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -81,11 +79,6 @@ public class MountSearchableSnapshotRequest extends MasterNodeRequest listener) { - if (request.discoveryNode.getVersion().onOrAfter(Version.V_7_12_0)) { - transportService.sendChildRequest( - request.discoveryNode, - FrozenCacheInfoNodeAction.NAME, - nodeRequest, - task, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(listener, FrozenCacheInfoResponse::new, TransportResponseHandler.TRANSPORT_WORKER) - ); - } else { - listener.onResponse(new FrozenCacheInfoResponse(false)); - } + transportService.sendChildRequest( + request.discoveryNode, + FrozenCacheInfoNodeAction.NAME, + nodeRequest, + task, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(listener, FrozenCacheInfoResponse::new, TransportResponseHandler.TRANSPORT_WORKER) + ); } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java index ebf1296da9f5..586621082adb 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java @@ -57,11 +57,6 @@ public class BlobStoreCacheService extends AbstractLifecycleComponent { private static final Logger logger = LogManager.getLogger(BlobStoreCacheService.class); - /** - * Before 7.12.0 blobs were cached using a 4KB or 8KB maximum length. - */ - private static final Version OLD_CACHED_BLOB_SIZE_VERSION = Version.V_7_12_0; - public static final int DEFAULT_CACHED_BLOB_SIZE = ByteSizeUnit.KB.toIntBytes(1); private static final Cache LOG_EXCEEDING_FILES_CACHE = CacheBuilder.builder() .setExpireAfterAccess(TimeValue.timeValueMinutes(60L)) @@ -333,14 +328,6 @@ protected static String generatePath(final SnapshotId snapshotId, final IndexId public ByteRange computeBlobCacheByteRange(ShardId shardId, String fileName, long fileLength, ByteSizeValue maxMetadataLength) { final LuceneFilesExtensions fileExtension = LuceneFilesExtensions.fromExtension(IndexFileNames.getExtension(fileName)); - if (useLegacyCachedBlobSizes()) { - if (fileLength <= ByteSizeUnit.KB.toBytes(8L)) { - return ByteRange.of(0L, fileLength); - } else { - return ByteRange.of(0L, ByteSizeUnit.KB.toBytes(4L)); - } - } - if (fileExtension != null && fileExtension.isMetadata()) { final long maxAllowedLengthInBytes = maxMetadataLength.getBytes(); if (fileLength > maxAllowedLengthInBytes) { @@ -351,11 +338,6 @@ public ByteRange computeBlobCacheByteRange(ShardId shardId, String fileName, lon return ByteRange.of(0L, Math.min(fileLength, DEFAULT_CACHED_BLOB_SIZE)); } - protected boolean useLegacyCachedBlobSizes() { - final Version minNodeVersion = clusterService.state().nodes().getMinNodeVersion(); - return minNodeVersion.before(OLD_CACHED_BLOB_SIZE_VERSION); - } - private static void logExceedingFile(ShardId shardId, LuceneFilesExtensions extension, long length, ByteSizeValue maxAllowedLength) { if (logger.isInfoEnabled()) { try { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java index 8a7bcc565acf..ccdad61adee5 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -51,7 +50,7 @@ private void clusterChanged(ClusterChangedEvent event) { return; } - if (event.localNodeMaster() && event.state().nodes().getMinNodeVersion().onOrAfter(Version.V_7_13_0)) { + if (event.localNodeMaster()) { // only want one doing this at a time, assume it succeeds and reset if not. if (upgraded.compareAndSet(false, true)) { final Executor executor = threadPool.generic(); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java index d72a137285c6..d1bcf842b7c8 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java @@ -270,11 +270,6 @@ public NoopBlobStoreCacheService() { super(null, mock(Client.class), SNAPSHOT_BLOB_CACHE_INDEX); } - @Override - protected boolean useLegacyCachedBlobSizes() { - return false; - } - @Override protected void innerGet(GetRequest request, ActionListener listener) { listener.onFailure(new IndexNotFoundException(request.index())); @@ -299,11 +294,6 @@ public SimpleBlobStoreCacheService() { super(null, mock(Client.class), SNAPSHOT_BLOB_CACHE_INDEX); } - @Override - protected boolean useLegacyCachedBlobSizes() { - return false; - } - @Override protected void innerGet(GetRequest request, ActionListener listener) { final BytesArray bytes = blobs.get(request.id()); From 288fe44cc34ae8020c9f32f244f066135ca19dd2 Mon Sep 17 00:00:00 2001 From: William Brafford Date: Tue, 31 Oct 2023 23:10:29 -0400 Subject: [PATCH 05/47] Don't just obfuscate NodeMetadata version checks (#101644) In #99988, one of the code changes just obfuscated a check against minimumCompatibilityVersion, rather than changing NodeMetadata in such a way that the comparison could work in serverless. This PR switches it back so that we can make a more comprehensive fix in NodeMetadata. --- .../org/elasticsearch/env/NodeMetadata.java | 18 +----------------- .../elasticsearch/env/NodeMetadataTests.java | 18 ------------------ 2 files changed, 1 insertion(+), 35 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java index a714ee4cf5ec..77415bbaea94 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java +++ b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java @@ -114,7 +114,7 @@ public void verifyUpgradeToCurrentVersion() { assert (nodeVersion.equals(Version.V_EMPTY) == false) || (Version.CURRENT.major <= Version.V_7_0_0.major + 1) : "version is required in the node metadata from v9 onwards"; - if (NodeMetadata.isNodeVersionWireCompatible(nodeVersion.toString()) == false) { + if (nodeVersion.before(Version.CURRENT.minimumCompatibilityVersion())) { throw new IllegalStateException( "cannot upgrade a node from version [" + nodeVersion @@ -222,20 +222,4 @@ public NodeMetadata fromXContent(XContentParser parser) throws IOException { public static final MetadataStateFormat FORMAT = new NodeMetadataStateFormat(false); - /** - * Check whether a node version is compatible with the current minimum transport version. - * @param version A version identifier as a string - * @throws IllegalArgumentException if version is not a valid transport version identifier - * @return true if the version is compatible, false otherwise - */ - // visible for testing - static boolean isNodeVersionWireCompatible(String version) { - try { - Version esVersion = Version.fromString(version); - return esVersion.onOrAfter(Version.CURRENT.minimumCompatibilityVersion()); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("Cannot parse [" + version + "] as a transport version identifier", e); - } - } - } diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index fc89e4d2176c..b7001943073b 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.index.IndexVersionUtils; @@ -160,23 +159,6 @@ public void testUpgradeMarksPreviousVersion() { assertThat(nodeMetadata.previousNodeVersion(), equalTo(version)); } - public void testIsNodeVersionWireCompatible() { - String nodeVersion = VersionUtils.randomCompatibleVersion(random(), Version.CURRENT).toString(); - assertTrue(NodeMetadata.isNodeVersionWireCompatible(nodeVersion)); - nodeVersion = VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()).toString(); - assertFalse(NodeMetadata.isNodeVersionWireCompatible(nodeVersion)); - - String transportVersion = TransportVersionUtils.randomCompatibleVersion(random()).toString(); - IllegalArgumentException e1 = expectThrows( - IllegalArgumentException.class, - () -> NodeMetadata.isNodeVersionWireCompatible(transportVersion) - ); - assertThat(e1.getMessage(), equalTo("Cannot parse [" + transportVersion + "] as a transport version identifier")); - - IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> NodeMetadata.isNodeVersionWireCompatible("x.y.z")); - assertThat(e2.getMessage(), equalTo("Cannot parse [x.y.z] as a transport version identifier")); - } - public static Version tooNewVersion() { return Version.fromId(between(Version.CURRENT.id + 1, 99999999)); } From fc5c2d8695032c63aee44c804b241b643bc35a2f Mon Sep 17 00:00:00 2001 From: William Brafford Date: Tue, 31 Oct 2023 23:12:26 -0400 Subject: [PATCH 06/47] Remove ThreadGroup#allowThreadSuspension from forbidden APIs (#101651) java.lang.ThreadGroup#allowThreadSuspension(boolean) was removed in JDK 21. See https://www.oracle.com/java/technologies/javase/21all-relnotes.html#JDK-8297295 --- .../src/main/resources/forbidden/jdk-deprecated.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/build-tools-internal/src/main/resources/forbidden/jdk-deprecated.txt b/build-tools-internal/src/main/resources/forbidden/jdk-deprecated.txt index 7a0c8f968507..d7545a5a5fac 100644 --- a/build-tools-internal/src/main/resources/forbidden/jdk-deprecated.txt +++ b/build-tools-internal/src/main/resources/forbidden/jdk-deprecated.txt @@ -210,7 +210,6 @@ java.lang.Thread#countStackFrames() java.lang.Thread#resume() java.lang.Thread#stop() java.lang.Thread#suspend() -java.lang.ThreadGroup#allowThreadSuspension(boolean) java.lang.ThreadGroup#checkAccess() java.lang.ThreadGroup#destroy() java.lang.ThreadGroup#isDaemon() From 2f0e58520e75772ae49b223dea7e8168828d04ad Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Wed, 1 Nov 2023 08:44:38 +0000 Subject: [PATCH 07/47] Document DSL setting to signalling lack of progress for index (#101635) --- .../settings/data-stream-lifecycle-settings.asciidoc | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/reference/settings/data-stream-lifecycle-settings.asciidoc b/docs/reference/settings/data-stream-lifecycle-settings.asciidoc index 8c3f4c793e5e..023a8fcf860e 100644 --- a/docs/reference/settings/data-stream-lifecycle-settings.asciidoc +++ b/docs/reference/settings/data-stream-lifecycle-settings.asciidoc @@ -51,6 +51,17 @@ segment size is a way to prevent indices from having a long tail of very small s This setting controls what value does <> configures on the target index. It defaults to `100MB`. +[[data-streams-lifecycle-signalling-error-retry-interval]] +`data_streams.lifecycle.signalling.error_retry_interval`:: +(<>, integer) +Represents the number of retries data stream lifecycle has to perform for an index +in an error step in order to signal that the index is not progressing (i.e. it's +stuck in an error step). +The current signalling mechanism is a log statement at the `error` level however, +the signalling mechanism can be extended in the future. +Defaults to 10 retries. + + ==== Index level settings The following index-level settings are typically configured on the backing indices of a data stream. From 04405978f56930ebe750b6d81051cf53058b278d Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Wed, 1 Nov 2023 09:46:51 +0100 Subject: [PATCH 08/47] Fix SnapshotRetentionConfigurationTests (#101603) The test failure happens as we choose same ids with `"snap-" + randomAlphaOfLength(3)`. This change uses UUID for ids to minimize the change of collisions. --- .../repositories/RepositoryData.java | 16 ++++++++++++++++ .../java/org/elasticsearch/test/ESTestCase.java | 5 +++++ .../slm/SnapshotRetentionConfigurationTests.java | 2 +- 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 1c5ea5a2b001..17ac4ef38f1b 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -1229,6 +1229,22 @@ public int hashCode() { return Objects.hash(snapshotState, version, startTimeMillis, endTimeMillis, slmPolicy); } + @Override + public String toString() { + return "SnapshotDetails{" + + "snapshotState=" + + snapshotState + + ", version=" + + version + + ", startTimeMillis=" + + startTimeMillis + + ", endTimeMillis=" + + endTimeMillis + + ", slmPolicy='" + + slmPolicy + + "'}"; + } + public static SnapshotDetails fromSnapshotInfo(SnapshotInfo snapshotInfo) { return new SnapshotDetails( snapshotInfo.state(), diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 3c458a43f91b..7631c8606480 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -45,6 +45,7 @@ import org.elasticsearch.client.internal.Requests; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.CompositeBytesReference; @@ -1003,6 +1004,10 @@ public static String randomIdentifier() { return randomAlphaOfLengthBetween(8, 12).toLowerCase(Locale.ROOT); } + public static String randomUUID() { + return UUIDs.randomBase64UUID(random()); + } + public static String randomUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) { return RandomizedTest.randomUnicodeOfLengthBetween(minCodeUnits, maxCodeUnits); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java index ec3fc2b8a88e..2ac6b633c0f0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfigurationTests.java @@ -313,7 +313,7 @@ private SnapshotInfo makeInfo(long startTime) { meta.put(SnapshotsService.POLICY_ID_METADATA_FIELD, REPO); final int totalShards = between(1, 20); SnapshotInfo snapInfo = new SnapshotInfo( - new Snapshot(REPO, new SnapshotId("snap-" + randomAlphaOfLength(3), "uuid")), + new Snapshot(REPO, new SnapshotId("snap-" + randomUUID(), "uuid")), Collections.singletonList("foo"), Collections.singletonList("bar"), Collections.emptyList(), From 35b57bfe959174dd0ddba4ea2cb0c8467211946b Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 1 Nov 2023 09:30:20 +0000 Subject: [PATCH 09/47] Remove IndexShardSnapshotStatus#isAborted (#101661) We'd like to add more nuance to the aborted status of `IndexShardSnapshotStatus` than can be carried in a plain boolean, so this commit replaces all usages of the `isAborted()` method with calls to `ensureNotAborted()`. --- .../snapshots/IndexShardSnapshotStatus.java | 9 ++----- .../repositories/Repository.java | 2 +- .../blobstore/BlobStoreRepository.java | 27 ++++++++++++------- 3 files changed, 21 insertions(+), 17 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java index 843f049d30e4..be3f620b3a9d 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java @@ -119,9 +119,8 @@ public synchronized Copy moveToStarted( this.totalFileCount = totalFileCount; this.incrementalSize = incrementalSize; this.totalSize = totalSize; - } else if (isAborted()) { - throw new AbortedSnapshotException(); } else { + ensureNotAborted(); assert false : "Should not try to move stage [" + stage.get() + "] to [STARTED]"; throw new IllegalStateException( "Unable to move the shard snapshot status to [STARTED]: " + "expecting [INIT] but got [" + stage.get() + "]" @@ -195,12 +194,8 @@ public ShardSnapshotResult getShardSnapshotResult() { return shardSnapshotResult.get(); } - public boolean isAborted() { - return stage.get() == Stage.ABORTED; - } - public void ensureNotAborted() { - if (isAborted()) { + if (stage.get() == Stage.ABORTED) { throw new AbortedSnapshotException(); } } diff --git a/server/src/main/java/org/elasticsearch/repositories/Repository.java b/server/src/main/java/org/elasticsearch/repositories/Repository.java index 1fd01631818b..4e957fdf6927 100644 --- a/server/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/server/src/main/java/org/elasticsearch/repositories/Repository.java @@ -208,7 +208,7 @@ default RepositoryStats stats() { * Creates a snapshot of the shard referenced by the given {@link SnapshotShardContext}. *

* As snapshot process progresses, implementation of this method should update {@link IndexShardSnapshotStatus} object returned by - * {@link SnapshotShardContext#status()} and check its {@link IndexShardSnapshotStatus#isAborted()} to see if the snapshot process + * {@link SnapshotShardContext#status()} and call {@link IndexShardSnapshotStatus#ensureNotAborted()} to see if the snapshot process * should be aborted. * * @param snapshotShardContext snapshot shard context that must be completed via {@link SnapshotShardContext#onResponse} or diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 743e978181f3..b307b251362f 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -3040,10 +3040,7 @@ private void doSnapshotShard(SnapshotShardContext context) { } } for (String fileName : fileNames) { - if (snapshotStatus.isAborted()) { - logger.debug("[{}] [{}] Aborted on the file [{}], exiting", shardId, snapshotId, fileName); - throw new AbortedSnapshotException(); - } + ensureNotAborted(shardId, snapshotId, snapshotStatus, fileName); logger.trace("[{}] [{}] Processing [{}]", shardId, snapshotId, fileName); final StoreFileMetadata md = metadataFromStore.get(fileName); @@ -3245,6 +3242,16 @@ private void doSnapshotShard(SnapshotShardContext context) { } } + private static void ensureNotAborted(ShardId shardId, SnapshotId snapshotId, IndexShardSnapshotStatus snapshotStatus, String fileName) { + try { + snapshotStatus.ensureNotAborted(); + } catch (Exception e) { + logger.debug("[{}] [{}] {} on the file [{}], exiting", shardId, snapshotId, e.getMessage(), fileName); + assert e instanceof AbortedSnapshotException : e; + throw e; + } + } + protected void snapshotFiles( SnapshotShardContext context, BlockingQueue filesToSnapshot, @@ -3273,7 +3280,12 @@ private static boolean assertFileContentsMatchHash( store.decRef(); } } else { - assert snapshotStatus.isAborted() : "if the store is already closed we must have been aborted"; + try { + snapshotStatus.ensureNotAborted(); + assert false : "if the store is already closed we must have been aborted"; + } catch (Exception e) { + assert e instanceof AbortedSnapshotException : e; + } } return true; } @@ -3712,10 +3724,7 @@ public int read(byte[] b, int off, int len) throws IOException { } private void checkAborted() { - if (snapshotStatus.isAborted()) { - logger.debug("[{}] [{}] Aborted on the file [{}], exiting", shardId, snapshotId, fileInfo.physicalName()); - throw new AbortedSnapshotException(); - } + ensureNotAborted(shardId, snapshotId, snapshotStatus, fileInfo.physicalName()); } }; final String partName = fileInfo.partName(i); From 81f933755aa36e402dd42bd9c3425f31eb2b38d4 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 1 Nov 2023 11:24:42 +0000 Subject: [PATCH 10/47] Reduce exposure of mutable `IndexShardSnapshotStatus` (#101665) We expose the mutable `IndexShardSnapshotStatus` in various APIs, but essentially all callers convert it into `IndexShardSnapshotStatus.Copy` before doing anything with it. This commit tightens up the API to expose the immutable `IndexShardSnapshotStatus.Copy` directly so that callers cannot inadvertently access the mutable state of an ongoing shard snapshot. --- .../snapshots/AbortedSnapshotIT.java | 2 +- .../snapshots/SnapshotShardsServiceIT.java | 2 +- .../status/TransportNodesSnapshotsStatus.java | 6 +- .../TransportSnapshotsStatusAction.java | 13 ++- .../snapshots/IndexShardSnapshotStatus.java | 8 +- .../repositories/FilterRepository.java | 2 +- .../repositories/InvalidRepository.java | 2 +- .../repositories/Repository.java | 2 +- .../repositories/UnknownTypeRepository.java | 2 +- .../blobstore/BlobStoreRepository.java | 6 +- .../InternalSnapshotsInfoService.java | 2 +- .../snapshots/SnapshotShardsService.java | 91 +++++++++++-------- .../RepositoriesServiceTests.java | 2 +- .../InternalSnapshotsInfoServiceTests.java | 8 +- .../index/shard/RestoreOnlyRepository.java | 2 +- .../xpack/ccr/CcrRepositoryIT.java | 2 +- .../xpack/ccr/repository/CcrRepository.java | 2 +- 17 files changed, 84 insertions(+), 70 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/AbortedSnapshotIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/AbortedSnapshotIT.java index e3bd85440c53..bd14f913b10e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/AbortedSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/AbortedSnapshotIT.java @@ -78,7 +78,7 @@ public void run() { final var shardStatuses = snapshotShardsService.currentSnapshotShards(snapshot); assertEquals(1, shardStatuses.size()); - final var shardStatus = shardStatuses.get(new ShardId(index, 0)).asCopy(); + final var shardStatus = shardStatuses.get(new ShardId(index, 0)); logger.info("--> {}", shardStatus); if (i == 0) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java index ee955da01f4a..b2494c5bd2b9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShardsServiceIT.java @@ -71,7 +71,7 @@ public void testRetryPostingSnapshotStatusMessages() throws Exception { List stages = snapshotShardsService.currentSnapshotShards(snapshot) .values() .stream() - .map(status -> status.asCopy().getStage()) + .map(IndexShardSnapshotStatus.Copy::getStage) .toList(); assertThat(stages, hasSize(shards)); assertThat(stages, everyItem(equalTo(IndexShardSnapshotStatus.Stage.DONE))); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index a5c8b13f8dc7..c7a29e61da28 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -95,15 +95,15 @@ protected NodeSnapshotStatus nodeOperation(NodeRequest request, Task task) { try { final String nodeId = clusterService.localNode().getId(); for (Snapshot snapshot : request.snapshots) { - Map shardsStatus = snapshotShardsService.currentSnapshotShards(snapshot); + final var shardsStatus = snapshotShardsService.currentSnapshotShards(snapshot); if (shardsStatus == null) { continue; } Map shardMapBuilder = new HashMap<>(); - for (Map.Entry shardEntry : shardsStatus.entrySet()) { + for (final var shardEntry : shardsStatus.entrySet()) { final ShardId shardId = shardEntry.getKey(); - final IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardEntry.getValue().asCopy(); + final IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardEntry.getValue(); final IndexShardSnapshotStatus.Stage stage = lastSnapshotStatus.getStage(); String shardNodeId = null; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 2a6f0325be1d..f8b9a9571ddd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -243,7 +243,6 @@ private void buildResponse( entry.indices().get(shardId.getIndexName()), shardId ) - .asCopy() ); } else { shardStatus = new SnapshotIndexShardStatus(entry.shardId(shardEntry.getKey()), stage); @@ -322,7 +321,7 @@ private void loadRepositoryData( repositoriesService.repository(repositoryName) .getSnapshotInfo(new GetSnapshotInfoContext(snapshotIdsToLoad, true, task::isCancelled, (context, snapshotInfo) -> { List shardStatusBuilder = new ArrayList<>(); - final Map shardStatuses; + final Map shardStatuses; try { shardStatuses = snapshotShards(repositoryName, repositoryData, task, snapshotInfo); } catch (Exception e) { @@ -330,8 +329,8 @@ private void loadRepositoryData( context.onFailure(e); return; } - for (Map.Entry shardStatus : shardStatuses.entrySet()) { - IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardStatus.getValue().asCopy(); + for (final var shardStatus : shardStatuses.entrySet()) { + IndexShardSnapshotStatus.Copy lastSnapshotStatus = shardStatus.getValue(); shardStatusBuilder.add(new SnapshotIndexShardStatus(shardStatus.getKey(), lastSnapshotStatus)); } final SnapshotsInProgress.State state = switch (snapshotInfo.state()) { @@ -374,14 +373,14 @@ private void loadRepositoryData( * @param snapshotInfo snapshot info * @return map of shard id to snapshot status */ - private Map snapshotShards( + private Map snapshotShards( final String repositoryName, final RepositoryData repositoryData, final CancellableTask task, final SnapshotInfo snapshotInfo ) throws IOException { final Repository repository = repositoriesService.repository(repositoryName); - final Map shardStatus = new HashMap<>(); + final Map shardStatus = new HashMap<>(); for (String index : snapshotInfo.indices()) { IndexId indexId = repositoryData.resolveIndexId(index); task.ensureNotCancelled(); @@ -394,7 +393,7 @@ private Map snapshotShards( if (shardFailure != null) { shardStatus.put(shardId, IndexShardSnapshotStatus.newFailed(shardFailure.reason())); } else { - final IndexShardSnapshotStatus shardSnapshotStatus; + final IndexShardSnapshotStatus.Copy shardSnapshotStatus; if (snapshotInfo.state() == SnapshotState.FAILED) { // If the snapshot failed, but the shard's snapshot does // not have an exception, it means that partial snapshots diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java index be3f620b3a9d..140c4684d1a7 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java @@ -238,15 +238,15 @@ public static IndexShardSnapshotStatus newInitializing(ShardGeneration generatio return new IndexShardSnapshotStatus(Stage.INIT, 0L, 0L, 0, 0, 0, 0, 0, 0, null, generation); } - public static IndexShardSnapshotStatus newFailed(final String failure) { + public static IndexShardSnapshotStatus.Copy newFailed(final String failure) { assert failure != null : "expecting non null failure for a failed IndexShardSnapshotStatus"; if (failure == null) { throw new IllegalArgumentException("A failure description is required for a failed IndexShardSnapshotStatus"); } - return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, 0, failure, null); + return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, 0, failure, null).asCopy(); } - public static IndexShardSnapshotStatus newDone( + public static IndexShardSnapshotStatus.Copy newDone( final long startTime, final long totalTime, final int incrementalFileCount, @@ -268,7 +268,7 @@ public static IndexShardSnapshotStatus newDone( incrementalSize, null, generation - ); + ).asCopy(); } /** diff --git a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java index b4dd0a2f37b3..c88bbcfa91b9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java @@ -129,7 +129,7 @@ public void restoreShard( } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { return in.getShardSnapshotStatus(snapshotId, indexId, shardId); } diff --git a/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java b/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java index ad0f956a1664..6bd967d84c89 100644 --- a/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java @@ -137,7 +137,7 @@ public void restoreShard( } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { throw createCreationException(); } diff --git a/server/src/main/java/org/elasticsearch/repositories/Repository.java b/server/src/main/java/org/elasticsearch/repositories/Repository.java index 4e957fdf6927..5782dedf3cfb 100644 --- a/server/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/server/src/main/java/org/elasticsearch/repositories/Repository.java @@ -244,7 +244,7 @@ void restoreShard( * @param shardId shard id * @return snapshot status */ - IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId); + IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId); /** * Check if this instances {@link Settings} can be changed to the provided updated settings without recreating the repository. diff --git a/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java b/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java index b9da0b1663c5..30f167d8c5cf 100644 --- a/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java @@ -135,7 +135,7 @@ public void restoreShard( } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { throw createUnknownTypeException(); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index b307b251362f..4167717e0900 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -3509,7 +3509,7 @@ public InputStream maybeRateLimitSnapshots(InputStream stream, RateLimitingInput } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { BlobStoreIndexShardSnapshot snapshot = loadShardSnapshot(shardContainer(indexId, shardId), snapshotId); return IndexShardSnapshotStatus.newDone( snapshot.startTime(), @@ -3518,8 +3518,8 @@ public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, In snapshot.totalFileCount(), snapshot.incrementalSize(), snapshot.totalSize(), - null - ); // Not adding a real generation here as it doesn't matter to callers + null // Not adding a real generation here as it doesn't matter to callers + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java b/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java index 8451396e0e59..29b69b545e5d 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java @@ -220,7 +220,7 @@ protected void doRun() throws Exception { snapshotShard.snapshot().getSnapshotId(), snapshotShard.index(), snapshotShard.shardId() - ).asCopy().getTotalSize(); + ).getTotalSize(); logger.debug("snapshot shard size for {}: {} bytes", snapshotShard, snapshotShardSize); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 5765b0fc4b99..d842fc21f7fe 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; @@ -183,10 +184,18 @@ public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexSh * @param snapshot snapshot * @return map of shard id to snapshot status */ - public Map currentSnapshotShards(Snapshot snapshot) { + public Map currentSnapshotShards(Snapshot snapshot) { synchronized (shardSnapshots) { - final Map current = shardSnapshots.get(snapshot); - return current == null ? null : new HashMap<>(current); + final var current = shardSnapshots.get(snapshot); + if (current == null) { + return null; + } + + final Map result = Maps.newMapWithExpectedSize(current.size()); + for (final var entry : current.entrySet()) { + result.put(entry.getKey(), entry.getValue().asCopy()); + } + return result; } } @@ -483,44 +492,50 @@ public static String getShardStateId(IndexShard indexShard, IndexCommit snapshot private void syncShardStatsOnNewMaster(List entries) { for (SnapshotsInProgress.Entry snapshot : entries) { if (snapshot.state() == State.STARTED || snapshot.state() == State.ABORTED) { - Map localShards = currentSnapshotShards(snapshot.snapshot()); - if (localShards != null) { - Map masterShards = snapshot.shards(); - for (Map.Entry localShard : localShards.entrySet()) { - ShardId shardId = localShard.getKey(); - ShardSnapshotStatus masterShard = masterShards.get(shardId); - if (masterShard != null && masterShard.state().completed() == false) { - final IndexShardSnapshotStatus.Copy indexShardSnapshotStatus = localShard.getValue().asCopy(); - final Stage stage = indexShardSnapshotStatus.getStage(); - // Master knows about the shard and thinks it has not completed - if (stage == Stage.DONE) { - // but we think the shard is done - we need to make new master know that the shard is done - logger.debug( - "[{}] new master thinks the shard [{}] is not completed but the shard is done locally, " - + "updating status on the master", - snapshot.snapshot(), - shardId - ); - notifySuccessfulSnapshotShard(snapshot.snapshot(), shardId, localShard.getValue().getShardSnapshotResult()); - - } else if (stage == Stage.FAILURE) { - // but we think the shard failed - we need to make new master know that the shard failed - logger.debug( - "[{}] new master thinks the shard [{}] is not completed but the shard failed locally, " - + "updating status on master", - snapshot.snapshot(), - shardId - ); - notifyFailedSnapshotShard( - snapshot.snapshot(), - shardId, - indexShardSnapshotStatus.getFailure(), - localShard.getValue().generation() - ); - } + final Map localShards; + synchronized (shardSnapshots) { + final var currentLocalShards = shardSnapshots.get(snapshot.snapshot()); + if (currentLocalShards == null) { + return; + } + localShards = Map.copyOf(currentLocalShards); + } + Map masterShards = snapshot.shards(); + for (Map.Entry localShard : localShards.entrySet()) { + ShardId shardId = localShard.getKey(); + ShardSnapshotStatus masterShard = masterShards.get(shardId); + if (masterShard != null && masterShard.state().completed() == false) { + final IndexShardSnapshotStatus.Copy indexShardSnapshotStatus = localShard.getValue().asCopy(); + final Stage stage = indexShardSnapshotStatus.getStage(); + // Master knows about the shard and thinks it has not completed + if (stage == Stage.DONE) { + // but we think the shard is done - we need to make new master know that the shard is done + logger.debug( + "[{}] new master thinks the shard [{}] is not completed but the shard is done locally, " + + "updating status on the master", + snapshot.snapshot(), + shardId + ); + notifySuccessfulSnapshotShard(snapshot.snapshot(), shardId, localShard.getValue().getShardSnapshotResult()); + + } else if (stage == Stage.FAILURE) { + // but we think the shard failed - we need to make new master know that the shard failed + logger.debug( + "[{}] new master thinks the shard [{}] is not completed but the shard failed locally, " + + "updating status on master", + snapshot.snapshot(), + shardId + ); + notifyFailedSnapshotShard( + snapshot.snapshot(), + shardId, + indexShardSnapshotStatus.getFailure(), + localShard.getValue().generation() + ); } } } + } } } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index d5bc5ad236b5..e30a67c166b5 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -415,7 +415,7 @@ public void restoreShard( } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { return null; } diff --git a/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java index 3ee2b56f5d69..a64283c8554b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/InternalSnapshotsInfoServiceTests.java @@ -128,7 +128,7 @@ public void testSnapshotShardSizes() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final Repository mockRepository = new FilterRepository(mock(Repository.class)) { @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { assertThat(indexId.getName(), equalTo(indexName)); assertThat(shardId.id(), allOf(greaterThanOrEqualTo(0), lessThan(numberOfShards))); safeAwait(latch); @@ -192,7 +192,7 @@ public void testErroneousSnapshotShardSizes() throws Exception { final Map results = new ConcurrentHashMap<>(); final Repository mockRepository = new FilterRepository(mock(Repository.class)) { @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { final InternalSnapshotsInfoService.SnapshotShard snapshotShard = new InternalSnapshotsInfoService.SnapshotShard( new Snapshot("_repo", snapshotId), indexId, @@ -280,7 +280,7 @@ public void testNoLongerMaster() throws Exception { final Repository mockRepository = new FilterRepository(mock(Repository.class)) { @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { return IndexShardSnapshotStatus.newDone(0L, 0L, 0, 0, 0L, randomNonNegativeLong(), null); } }; @@ -316,7 +316,7 @@ public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, In public void testCleanUpSnapshotShardSizes() throws Exception { final Repository mockRepository = new FilterRepository(mock(Repository.class)) { @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { if (randomBoolean()) { throw new SnapshotException(new Snapshot("_repo", snapshotId), "simulated"); } else { diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java index 12f5989d560a..181b6c82379e 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java @@ -134,7 +134,7 @@ public boolean isReadOnly() { public void snapshotShard(SnapshotShardContext context) {} @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId indexId, ShardId shardId) { return null; } diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index b68a115850fe..8b8af3dae2fe 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -531,7 +531,7 @@ public void testCcrRepositoryFetchesSnapshotShardSizeFromIndexShardStoreStats() new SnapshotId(CcrRepository.LATEST, CcrRepository.LATEST), new IndexId(indexStats.getIndex(), indexStats.getUuid()), new ShardId(new Index(indexStats.getIndex(), indexStats.getUuid()), shardId) - ).asCopy(); + ); assertThat(indexShardSnapshotStatus, notNullValue()); assertThat(indexShardSnapshotStatus.getStage(), is(IndexShardSnapshotStatus.Stage.DONE)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index 35b02b26eca9..5868bba00229 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -504,7 +504,7 @@ void acquireRetentionLeaseOnLeader( private static final ShardGeneration DUMMY_GENERATION = new ShardGeneration(""); @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, IndexId index, ShardId shardId) { + public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotId, IndexId index, ShardId shardId) { assert SNAPSHOT_ID.equals(snapshotId) : "RemoteClusterRepository only supports " + SNAPSHOT_ID + " as the SnapshotId"; final String leaderIndex = index.getName(); final IndicesStatsResponse response = getRemoteClusterClient().admin() From 71a47c8cac2c67951048079ec020f7df6cb18fc3 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 1 Nov 2023 13:00:49 +0000 Subject: [PATCH 11/47] Actually call onPublishSuccess during cluster state tests (#101633) --- ...rdFailedClusterStateTaskExecutorTests.java | 3 +- ...dStartedClusterStateTaskExecutorTests.java | 3 +- .../coordination/NodeJoinExecutorTests.java | 10 +++--- .../allocation/InSyncAllocationIdTests.java | 3 +- .../indices/cluster/ClusterStateChanges.java | 13 +++---- .../ingest/IngestServiceTests.java | 5 +-- .../snapshots/SnapshotsServiceTests.java | 6 ++-- .../ClusterStateTaskExecutorUtils.java | 36 ++++++++++++------- 8 files changed, 47 insertions(+), 32 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 588260d79d40..bf27d7fe73c4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -354,6 +355,6 @@ private static List toTasks( } private static ActionListener createTestListener() { - return ActionListener.running(() -> { throw new AssertionError("task should not complete"); }); + return ActionTestUtils.assertNoFailureListener(t -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java index 8aa10227dd66..ea2bc79542e4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStartedClusterStateTaskExecutorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.action.shard; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardEntry; @@ -408,6 +409,6 @@ private ClusterState executeTasks(final ClusterState state, final List ActionListener createTestListener() { - return ActionListener.running(() -> { throw new AssertionError("task should not complete"); }); + return ActionTestUtils.assertNoFailureListener(t -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java index 559c0a362805..46f03aef76b9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -71,9 +72,7 @@ public class NodeJoinExecutorTests extends ESTestCase { - private static final ActionListener NOT_COMPLETED_LISTENER = ActionListener.running(() -> { - throw new AssertionError("should not complete publication"); - }); + private static final ActionListener NOT_COMPLETED_LISTENER = ActionTestUtils.assertNoFailureListener(t -> {}); public void testPreventJoinClusterWithNewerIndices() { Settings.builder().build(); @@ -481,7 +480,10 @@ public void testRemovesOlderNodeInstancesWhenBecomingMaster() throws Exception { CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, - NOT_COMPLETED_LISTENER, + ActionListener.wrap( + r -> fail("Task should have failed"), + e -> assertThat(e.getMessage(), containsString("found existing node")) + ), executorTerm ) ) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/InSyncAllocationIdTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/InSyncAllocationIdTests.java index d3c49d23ce20..d2390dfda788 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/InSyncAllocationIdTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/InSyncAllocationIdTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -419,6 +420,6 @@ private ClusterState createOnePrimaryOneReplicaClusterState(AllocationService al } private static ActionListener createTestListener() { - return ActionListener.running(() -> { throw new AssertionError("task should not complete"); }); + return ActionTestUtils.assertNoFailureListener(t -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index c81ea1579828..6f57707cd9e7 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.PlainActionFuture; @@ -413,9 +414,7 @@ public ClusterState addNode(ClusterState clusterState, DiscoveryNode discoveryNo new CompatibilityVersions(transportVersion, Map.of()), Set.of(), DUMMY_REASON, - ActionListener.running(() -> { - throw new AssertionError("should not complete publication"); - }), + createTestListener(), clusterState.term() ) ) @@ -435,9 +434,7 @@ public ClusterState joinNodesAndBecomeMaster(ClusterState clusterState, List { - throw new AssertionError("should not complete publication"); - }) + createTestListener() ) ), clusterState.term() + between(1, 10) @@ -552,7 +549,7 @@ private , Response extends ActionResp } } - private ActionListener createTestListener() { - return ActionListener.running(() -> { throw new AssertionError("task should not complete"); }); + private static ActionListener createTestListener() { + return ActionTestUtils.assertNoFailureListener(t -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 3b114cf0a618..624313114149 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.internal.Client; @@ -2664,7 +2665,7 @@ private IngestStats.Stats getPipelineStats(List pipeli } private static List oneTask(DeletePipelineRequest request) { - return List.of(new IngestService.DeletePipelineClusterStateUpdateTask(ActionListener.running(() -> fail("not called")), request)); + return List.of(new IngestService.DeletePipelineClusterStateUpdateTask(ActionTestUtils.assertNoFailureListener(t -> {}), request)); } private static ClusterState executeDelete(DeletePipelineRequest request, ClusterState clusterState) { @@ -2680,7 +2681,7 @@ private static void executeFailingDelete(DeletePipelineRequest request, ClusterS } private static List oneTask(PutPipelineRequest request) { - return List.of(new IngestService.PutPipelineClusterStateUpdateTask(ActionListener.running(() -> fail("not called")), request)); + return List.of(new IngestService.PutPipelineClusterStateUpdateTask(ActionTestUtils.assertNoFailureListener(t -> {}), request)); } private static ClusterState executePut(PutPipelineRequest request, ClusterState clusterState) { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java index 1b8f775bfb23..eb95e8212057 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.snapshots; -import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -433,7 +433,7 @@ private static SnapshotsService.ShardSnapshotUpdate successUpdate(Snapshot snaps shardId, null, successfulShardStatus(nodeId), - ActionListener.running(() -> fail("should not complete publication")) + ActionTestUtils.assertNoFailureListener(t -> {}) ); } @@ -443,7 +443,7 @@ private static SnapshotsService.ShardSnapshotUpdate successUpdate(Snapshot snaps null, shardId, successfulShardStatus(nodeId), - ActionListener.running(() -> fail("should not complete publication")) + ActionTestUtils.assertNoFailureListener(t -> {}) ); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java b/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java index 784b82c713d3..b9528e47745f 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java @@ -16,8 +16,8 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasable; +import java.util.Collection; import java.util.function.Consumer; -import java.util.stream.StreamSupport; import static org.elasticsearch.test.ESTestCase.fail; import static org.junit.Assert.assertFalse; @@ -35,7 +35,7 @@ private ClusterStateTaskExecutorUtils() { public static ClusterState executeAndAssertSuccessful( ClusterState originalState, ClusterStateTaskExecutor executor, - Iterable tasks + Collection tasks ) throws Exception { return executeHandlingResults(originalState, executor, tasks, task -> {}, (task, e) -> fail(e)); } @@ -43,7 +43,7 @@ public static ClusterState executeAndAssert public static ClusterState executeAndThrowFirstFailure( ClusterState originalState, ClusterStateTaskExecutor executor, - Iterable tasks + Collection tasks ) throws Exception { return executeHandlingResults(originalState, executor, tasks, task -> {}, (task, e) -> { throw e; }); } @@ -51,7 +51,7 @@ public static ClusterState executeAndThrowF public static ClusterState executeIgnoringFailures( ClusterState originalState, ClusterStateTaskExecutor executor, - Iterable tasks + Collection tasks ) throws Exception { return executeHandlingResults(originalState, executor, tasks, task -> {}, (task, e) -> {}); } @@ -59,26 +59,30 @@ public static ClusterState executeIgnoringF public static ClusterState executeHandlingResults( ClusterState originalState, ClusterStateTaskExecutor executor, - Iterable tasks, + Collection tasks, CheckedConsumer onTaskSuccess, CheckedBiConsumer onTaskFailure ) throws Exception { - final var taskContexts = StreamSupport.stream(tasks.spliterator(), false).>map( - TestTaskContext::new - ).toList(); - final var resultingState = executor.execute( + final var taskContexts = tasks.stream().map(TestTaskContext::new).toList(); + ClusterState resultingState = executor.execute( new ClusterStateTaskExecutor.BatchExecutionContext<>(originalState, taskContexts, () -> null) ); assertNotNull(resultingState); - for (final var taskContext : taskContexts) { - final var testTaskContext = (TestTaskContext) taskContext; - assertFalse(taskContext + " should have completed", testTaskContext.incomplete()); + boolean allSuccess = true; + for (final var testTaskContext : taskContexts) { + assertFalse(testTaskContext + " should have completed", testTaskContext.incomplete()); if (testTaskContext.succeeded()) { onTaskSuccess.accept(testTaskContext.getTask()); } else { onTaskFailure.accept(testTaskContext.getTask(), testTaskContext.getFailure()); + allSuccess = false; } } + + if (allSuccess) { + taskContexts.forEach(TestTaskContext::onPublishSuccess); + } + return resultingState; } @@ -86,6 +90,7 @@ private static class TestTaskContext impleme private final T task; private Exception failure; private boolean succeeded; + private Runnable onPublishSuccess; TestTaskContext(T task) { this.task = task; @@ -109,6 +114,11 @@ Exception getFailure() { return failure; } + void onPublishSuccess() { + assert onPublishSuccess != null; + onPublishSuccess.run(); + } + @Override public void onFailure(Exception failure) { assert incomplete(); @@ -123,6 +133,7 @@ public void success(Runnable onPublishSuccess, ClusterStateAckListener clusterSt assert clusterStateAckListener != null; assert task == clusterStateAckListener || (task instanceof ClusterStateAckListener == false); this.succeeded = true; + this.onPublishSuccess = onPublishSuccess; } @Override @@ -131,6 +142,7 @@ public void success(Runnable onPublishSuccess) { assert onPublishSuccess != null; assert task instanceof ClusterStateAckListener == false; this.succeeded = true; + this.onPublishSuccess = onPublishSuccess; } @Override From 0ddc3baa0771c6b6b03d52a78c0970fcebb292bb Mon Sep 17 00:00:00 2001 From: William Brafford Date: Wed, 1 Nov 2023 10:06:53 -0400 Subject: [PATCH 12/47] Get rid of "this-escape" warning in NodeShutdownTasksIT (#101657) * Make test class final instead of suppressing If this class is final, there can't be any subclasses, and therefore no subclass initialization bugs and no JDK warning. --- .../org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java index 15e16d2a8691..d6bef01672fa 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java @@ -160,7 +160,7 @@ public List getNamedXContent() { } } - public static class TaskExecutor extends PersistentTasksExecutor implements ClusterStateListener { + public static final class TaskExecutor extends PersistentTasksExecutor implements ClusterStateListener { private final PersistentTasksService persistentTasksService; From e9179ae71ee261471e9257c04898b6bcb3b7ad83 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 1 Nov 2023 10:10:44 -0400 Subject: [PATCH 13/47] Miscellaneous tidying towards removing the HLRC (#101604) --- .../client/RestHighLevelClient.java | 59 ++++++------------- 1 file changed, 17 insertions(+), 42 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index bfc1b1e6be96..85803ec68de1 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -232,15 +232,7 @@ public class RestHighLevelClient implements Closeable { * {@link RestClient} to be used to perform requests. */ public RestHighLevelClient(RestClientBuilder restClientBuilder) { - this(restClientBuilder, Collections.emptyList()); - } - - /** - * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the - * {@link RestClient} to be used to perform requests and parsers for custom response sections added to Elasticsearch through plugins. - */ - protected RestHighLevelClient(RestClientBuilder restClientBuilder, List namedXContentEntries) { - this(restClientBuilder.build(), RestClient::close, namedXContentEntries); + this(restClientBuilder.build(), RestClient::close, Collections.emptyList()); } /** @@ -265,7 +257,7 @@ protected RestHighLevelClient( * The consumer argument allows to control what needs to be done when the {@link #close()} method is called. * Also subclasses can provide parsers for custom response sections added to Elasticsearch through plugins. */ - protected RestHighLevelClient( + private RestHighLevelClient( RestClient restClient, CheckedConsumer doClose, List namedXContentEntries, @@ -410,7 +402,7 @@ public final SearchResponse scroll(SearchScrollRequest searchScrollRequest, Requ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. */ @Deprecated - protected final Resp performRequestAndParseEntity( + private Resp performRequestAndParseEntity( Req request, CheckedFunction requestConverter, RequestOptions options, @@ -425,7 +417,7 @@ protected final Resp performRequestAndParseEnt * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. */ @Deprecated - protected final Resp performRequest( + private Resp performRequest( Req request, CheckedFunction requestConverter, RequestOptions options, @@ -439,23 +431,6 @@ protected final Resp performRequest( return internalPerformRequest(request, requestConverter, options, responseConverter, ignores); } - /** - * Defines a helper method for performing a request. - */ - protected final Resp performRequest( - Req request, - CheckedFunction requestConverter, - RequestOptions options, - CheckedFunction responseConverter, - Set ignores - ) throws IOException { - Optional validationException = request.validate(); - if (validationException != null && validationException.isPresent()) { - throw validationException.get(); - } - return internalPerformRequest(request, requestConverter, options, responseConverter, ignores); - } - /** * Provides common functionality for performing a request. */ @@ -499,7 +474,7 @@ private Resp internalPerformRequest( * @return Cancellable instance that may be used to cancel the request */ @Deprecated - protected final Cancellable performRequestAsyncAndParseEntity( + private Cancellable performRequestAsyncAndParseEntity( Req request, CheckedFunction requestConverter, RequestOptions options, @@ -523,7 +498,7 @@ protected final Cancellable performRequestAsyn * @return Cancellable instance that may be used to cancel the request */ @Deprecated - protected final Cancellable performRequestAsync( + private Cancellable performRequestAsync( Req request, CheckedFunction requestConverter, RequestOptions options, @@ -564,7 +539,7 @@ private Cancellable internalPerformRequestAsync( return performClientRequestAsync(req, responseListener); } - final ResponseListener wrapResponseListener( + private ResponseListener wrapResponseListener( CheckedFunction responseConverter, ActionListener actionListener, Set ignores @@ -611,7 +586,7 @@ public void onFailure(Exception exception) { * that wraps the original {@link ResponseException}. The potential exception obtained while parsing is added to the returned * exception as a suppressed exception. This method is guaranteed to not throw any exception eventually thrown while parsing. */ - protected final ElasticsearchStatusException parseResponseException(ResponseException responseException) { + private ElasticsearchStatusException parseResponseException(ResponseException responseException) { Response response = responseException.getResponse(); HttpEntity entity = response.getEntity(); ElasticsearchStatusException elasticsearchException; @@ -631,7 +606,7 @@ protected final ElasticsearchStatusException parseResponseException(ResponseExce return elasticsearchException; } - protected final Resp parseEntity(final HttpEntity entity, final CheckedFunction entityParser) + private Resp parseEntity(final HttpEntity entity, final CheckedFunction entityParser) throws IOException { if (entity == null) { throw new IllegalStateException("Response body expected but not returned"); @@ -735,7 +710,7 @@ private Cancellable performClientRequestAsync(Request request, ResponseListener ListenableFuture> versionCheck = getVersionValidationFuture(); // Create a future that tracks cancellation of this method's result and forwards cancellation to the actual LLRC request. - CompletableFuture cancellationForwarder = new CompletableFuture(); + CompletableFuture cancellationForwarder = new CompletableFuture<>(); Cancellable result = new Cancellable() { @Override public void cancel() { @@ -754,7 +729,7 @@ void runIfNotCancelled(Runnable runnable) { // Send the request after we have done the version compatibility check. Note that if it has already happened, the listener will // be called immediately on the same thread with no asynchronous scheduling overhead. - versionCheck.addListener(new ActionListener>() { + versionCheck.addListener(new ActionListener<>() { @Override public void onResponse(Optional validation) { if (validation.isPresent() == false) { @@ -779,13 +754,13 @@ public void onFailure(Exception e) { }); return result; - }; + } /** * Go through all the request's existing headers, looking for {@code headerName} headers and if they exist, * changing them to use version compatibility. If no request headers are changed, modify the entity type header if appropriate */ - boolean addCompatibilityFor(RequestOptions.Builder newOptions, Header entityHeader, String headerName) { + private boolean addCompatibilityFor(RequestOptions.Builder newOptions, Header entityHeader, String headerName) { // Modify any existing "Content-Type" headers on the request to use the version compatibility, if available boolean contentTypeModified = false; for (Header header : new ArrayList<>(newOptions.getHeaders())) { @@ -807,7 +782,7 @@ boolean addCompatibilityFor(RequestOptions.Builder newOptions, Header entityHead * Modify the given header to be version compatible, if necessary. * Returns true if a modification was made, false otherwise. */ - boolean modifyHeader(RequestOptions.Builder newOptions, Header header, String headerName) { + private boolean modifyHeader(RequestOptions.Builder newOptions, Header header, String headerName) { for (EntityType type : EntityType.values()) { final String headerValue = header.getValue(); if (headerValue.startsWith(type.header())) { @@ -825,7 +800,7 @@ boolean modifyHeader(RequestOptions.Builder newOptions, Header header, String he * modifying the "Content-Type" and "Accept" headers if present, or modifying the header based * on the request's entity type. */ - void modifyRequestForCompatibility(Request request) { + private void modifyRequestForCompatibility(Request request) { final Header entityHeader = request.getEntity() == null ? null : request.getEntity().getContentType(); final RequestOptions.Builder newOptions = request.getOptions().toBuilder(); @@ -982,7 +957,7 @@ private Optional getVersionValidation(Response response) throws IOExcept return Optional.empty(); } - static List getDefaultNamedXContents() { + private static List getDefaultNamedXContents() { Map> map = new HashMap<>(); map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c)); map.put(InternalHDRPercentiles.NAME, (p, c) -> ParsedHDRPercentiles.fromXContent(p, (String) c)); @@ -1068,7 +1043,7 @@ static List getDefaultNamedXContents() { /** * Loads and returns the {@link NamedXContentRegistry.Entry} parsers provided by plugins. */ - static List getProvidedNamedXContents() { + private static List getProvidedNamedXContents() { List entries = new ArrayList<>(); for (NamedXContentProvider service : ServiceLoader.load(NamedXContentProvider.class)) { entries.addAll(service.getNamedXContentParsers()); From b22eae5fa70b05bc5c40807cbbab02047357d2ac Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Wed, 1 Nov 2023 07:34:32 -0700 Subject: [PATCH 14/47] Fix race condition in SnapshotsService (#101652) * Fix race condition in SnapshotsService * Update docs/changelog/101652.yaml --- docs/changelog/101652.yaml | 5 +++++ .../org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java | 1 - .../java/org/elasticsearch/snapshots/SnapshotsService.java | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/101652.yaml diff --git a/docs/changelog/101652.yaml b/docs/changelog/101652.yaml new file mode 100644 index 000000000000..79e3167696ae --- /dev/null +++ b/docs/changelog/101652.yaml @@ -0,0 +1,5 @@ +pr: 101652 +summary: Fix race condition in `SnapshotsService` +area: Snapshot/Restore +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index d68301a31072..ca522064e3d0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -1066,7 +1066,6 @@ public void testEquivalentDeletesAreDeduplicated() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99355") public void testMasterFailoverOnFinalizationLoop() throws Exception { internalCluster().startMasterOnlyNodes(3); final String dataNode = internalCluster().startDataOnlyNode(); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 3317efd7675b..8a15572c3e7f 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -164,7 +164,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus /** * Listeners for snapshot deletion keyed by delete uuid as returned from {@link SnapshotDeletionsInProgress.Entry#uuid()} */ - private final Map>> snapshotDeletionListeners = new HashMap<>(); + private final Map>> snapshotDeletionListeners = new ConcurrentHashMap<>(); // Set of repositories currently running either a snapshot finalization or a snapshot delete. private final Set currentlyFinalizing = Collections.synchronizedSet(new HashSet<>()); From 4fa18ff42b0b1ae77e42891617aa07b659de9142 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 1 Nov 2023 16:24:28 +0100 Subject: [PATCH 15/47] Make forbidden apis check cacheable and cc compatible (#101217) * Make forbidden apis check cacheable and cc compatible * Port CheckForbiddenApiTask to use worker api * Simplify runtime classpath for CheckForbiddenApisTask --- .../precommit/CheckForbiddenApisTask.java | 565 +++++++++++++++++- .../precommit/DependencyLicensesTask.java | 2 - .../ForbiddenApisPrecommitPlugin.java | 67 +-- .../internal/precommit/LoggerUsageTask.java | 2 +- client/rest/build.gradle | 11 +- client/sniffer/build.gradle | 8 +- client/test/build.gradle | 7 +- distribution/tools/server-cli/build.gradle | 4 +- x-pack/plugin/security/build.gradle | 7 +- x-pack/plugin/security/cli/build.gradle | 9 +- x-pack/plugin/sql/sql-client/build.gradle | 7 +- 11 files changed, 606 insertions(+), 83 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java index e158dd7c755c..194d0361980e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java @@ -8,25 +8,568 @@ package org.elasticsearch.gradle.internal.precommit; -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis; +import de.thetaphi.forbiddenapis.Checker; +import de.thetaphi.forbiddenapis.Constants; +import de.thetaphi.forbiddenapis.Logger; +import de.thetaphi.forbiddenapis.ParseException; +import groovy.lang.Closure; +import org.gradle.api.DefaultTask; +import org.gradle.api.GradleException; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.Transformer; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileTree; +import org.gradle.api.file.FileTreeElement; +import org.gradle.api.file.ProjectLayout; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.logging.Logging; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.provider.ListProperty; +import org.gradle.api.provider.Property; +import org.gradle.api.provider.SetProperty; +import org.gradle.api.specs.Spec; +import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.CompileClasspath; import org.gradle.api.tasks.IgnoreEmptyDirectories; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.Optional; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.VerificationTask; +import org.gradle.api.tasks.util.PatternFilterable; +import org.gradle.api.tasks.util.PatternSet; +import org.gradle.workers.WorkAction; +import org.gradle.workers.WorkParameters; +import org.gradle.workers.WorkQueue; +import org.gradle.workers.WorkerExecutor; +import org.jetbrains.annotations.NotNull; -/** - * This implementation is used to fix gradle 8 compatibility of - * the CheckForbiddenApis task which is built with gradle 4 support - * in mind. - * */ -public class CheckForbiddenApisTask extends CheckForbiddenApis { +import java.io.File; +import java.io.IOException; +import java.lang.annotation.RetentionPolicy; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.Set; + +import javax.inject.Inject; + +import static de.thetaphi.forbiddenapis.Checker.Option.DISABLE_CLASSLOADING_CACHE; +import static de.thetaphi.forbiddenapis.Checker.Option.FAIL_ON_MISSING_CLASSES; +import static de.thetaphi.forbiddenapis.Checker.Option.FAIL_ON_UNRESOLVABLE_SIGNATURES; +import static de.thetaphi.forbiddenapis.Checker.Option.FAIL_ON_VIOLATION; + +@CacheableTask +public abstract class CheckForbiddenApisTask extends DefaultTask implements PatternFilterable, VerificationTask, Constants { + + public static final Set BUNDLED_SIGNATURE_DEFAULTS = Set.of("jdk-unsafe", "jdk-non-portable", "jdk-system-out"); + + private static final String NL = System.getProperty("line.separator", "\n"); + private final PatternSet patternSet = new PatternSet().include("**/*.class"); + private FileCollection classesDirs; + private FileCollection classpath; + private String targetCompatibility; + + private FileCollection signaturesFiles; + + private final ObjectFactory objectFactory; + private ProjectLayout projectLayout; + + private List signatures = new ArrayList<>(); + + private File resourcesDir; + + private boolean ignoreFailures = false; + + @Input + @Optional + abstract SetProperty getBundledSignatures(); + + /** + * List of a custom Java annotations (full class names) that are used in the checked + * code to suppress errors. Those annotations must have at least + * {@link RetentionPolicy#CLASS}. They can be applied to classes, their methods, + * or fields. By default, {@code @de.thetaphi.forbiddenapis.SuppressForbidden} + * can always be used, but needs the {@code forbidden-apis.jar} file in classpath + * of compiled project, which may not be wanted. + * Instead of a full class name, a glob pattern may be used (e.g., + * {@code **.SuppressForbidden}). + */ + @Input + @Optional + abstract SetProperty getSuppressAnnotations(); + + @Inject + public CheckForbiddenApisTask(ObjectFactory factory, ProjectLayout projectLayout) { + signaturesFiles = factory.fileCollection(); + this.objectFactory = factory; + this.projectLayout = projectLayout; + } + + @OutputFile + public File getSuccessMarker() { + return new File(projectLayout.getBuildDirectory().getAsFile().get(), "markers/" + this.getName()); + } + + /** + * Directories with the class files to check. + * Defaults to current sourseSet's output directory (Gradle 3) or output directories (Gradle 4.0+). + */ + @Internal + public FileCollection getClassesDirs() { + return classesDirs; + } + + /** @see #getClassesDirs() */ + public void setClassesDirs(FileCollection classesDirs) { + Objects.requireNonNull(classesDirs, "classesDirs"); + this.classesDirs = classesDirs; + } + + /** Returns the pattern set to match against class files in {@link #getClassesDirs()}. */ + @Internal + public PatternSet getPatternSet() { + return patternSet; + } + + /** @see #getPatternSet() */ + public void setPatternSet(PatternSet patternSet) { + patternSet.copyFrom(patternSet); + } + + /** + * A {@link FileCollection} used to configure the classpath. + * Defaults to current sourseSet's compile classpath. + */ + @CompileClasspath + public FileCollection getClasspath() { + return classpath; + } + + /** @see #getClasspath */ + public void setClasspath(FileCollection classpath) { + Objects.requireNonNull(classpath, "classpath"); + this.classpath = classpath; + } /** - * Add additional annotation to make this input gradle 8 compliant. - * Otherwise we see a deprecation warning here starting with gradle 7.4 - * */ + * A {@link FileCollection} containing all files, which contain signatures and comments for forbidden API calls. + * The signatures are resolved against {@link #getClasspath()}. + */ + @InputFiles + @Optional + @PathSensitive(PathSensitivity.RELATIVE) + public FileCollection getSignaturesFiles() { + return signaturesFiles; + } + + @InputDirectory + @PathSensitive(PathSensitivity.RELATIVE) + public File getResourcesDir() { + return resourcesDir; + } + + public void setResourcesDir(File resourcesDir) { + this.resourcesDir = resourcesDir; + } + + /** @see #getSignaturesFiles */ + public void setSignaturesFiles(FileCollection signaturesFiles) { + this.signaturesFiles = signaturesFiles; + } + + public void modifyBundledSignatures(Transformer, Set> transformer) { + getBundledSignatures().set(transformer.transform(getBundledSignatures().get())); + } + + public void replaceSignatureFiles(String... signatureFiles) { + List resources = new ArrayList<>(signatureFiles.length); + for (Object name : signatureFiles) { + resources.add(new File(resourcesDir, "forbidden/" + name + ".txt")); + } + setSignaturesFiles(objectFactory.fileCollection().from(resources)); + } + + public void addSignatureFiles(String... signatureFiles) { + List resources = new ArrayList<>(signatureFiles.length); + for (Object name : signatureFiles) { + resources.add(new File(resourcesDir, "forbidden/" + name + ".txt")); + } + setSignaturesFiles(objectFactory.fileCollection().from(getSignaturesFiles()).from(resources)); + + } + + /** + * Gives multiple API signatures that are joined with newlines and + * parsed like a single {@link #getSignaturesFiles()}. + * The signatures are resolved against {@link #getClasspath()}. + */ + @Input + @Optional + public List getSignatures() { + return signatures; + } + + /** @see #getSignatures */ + public void setSignatures(List signatures) { + this.signatures = signatures; + } + + /** + * {@inheritDoc} + *

+ * This setting is to conform with {@link VerificationTask} interface. + * Default is {@code false}. + */ @Override + @Input + public boolean getIgnoreFailures() { + return ignoreFailures; + } + + @Override + public void setIgnoreFailures(boolean ignoreFailures) { + this.ignoreFailures = ignoreFailures; + } + + /** + * The default compiler target version used to expand references to bundled JDK signatures. + * E.g., if you use "jdk-deprecated", it will expand to this version. + * This setting should be identical to the target version used in the compiler task. + * Defaults to {@code project.targetCompatibility}. + */ + @Input + @Optional + public String getTargetCompatibility() { + return targetCompatibility; + } + + /** @see #getTargetCompatibility */ + public void setTargetCompatibility(String targetCompatibility) { + this.targetCompatibility = targetCompatibility; + } + + // PatternFilterable implementation: + + /** + * {@inheritDoc} + *

+ * Set of patterns matching all class files to be parsed from the classesDirectory. + * Can be changed to e.g. exclude several files (using excludes). + * The default is a single include with pattern '**/*.class' + */ + @Override + @Internal + public Set getIncludes() { + return getPatternSet().getIncludes(); + } + + @Override + public CheckForbiddenApisTask setIncludes(Iterable includes) { + getPatternSet().setIncludes(includes); + return this; + } + + /** + * {@inheritDoc} + *

+ * Set of patterns matching class files to be excluded from checking. + */ + @Override + @Internal + public Set getExcludes() { + return getPatternSet().getExcludes(); + } + + @Override + public CheckForbiddenApisTask setExcludes(Iterable excludes) { + getPatternSet().setExcludes(excludes); + return this; + } + + @Override + public CheckForbiddenApisTask exclude(String... arg0) { + getPatternSet().exclude(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask exclude(Iterable arg0) { + getPatternSet().exclude(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask exclude(Spec arg0) { + getPatternSet().exclude(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask exclude(@SuppressWarnings("rawtypes") Closure arg0) { + getPatternSet().exclude(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask include(String... arg0) { + getPatternSet().include(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask include(Iterable arg0) { + getPatternSet().include(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask include(Spec arg0) { + getPatternSet().include(arg0); + return this; + } + + @Override + public CheckForbiddenApisTask include(@SuppressWarnings("rawtypes") Closure arg0) { + getPatternSet().include(arg0); + return this; + } + + /** Returns the classes to check. */ + @InputFiles + @SkipWhenEmpty @IgnoreEmptyDirectories + @PathSensitive(PathSensitivity.RELATIVE) public FileTree getClassFiles() { - return super.getClassFiles(); + return getClassesDirs().getAsFileTree().matching(getPatternSet()); + } + + @Inject + public abstract WorkerExecutor getWorkerExecutor(); + + /** Executes the forbidden apis task. */ + @TaskAction + public void checkForbidden() { + WorkQueue workQueue = getWorkerExecutor().noIsolation(); + workQueue.submit(ForbiddenApisCheckWorkAction.class, parameters -> { + parameters.getClasspath().setFrom(getClasspath()); + parameters.getClassDirectories().setFrom(getClassesDirs()); + parameters.getClassFiles().from(getClassFiles().getFiles()); + parameters.getSuppressAnnotations().set(getSuppressAnnotations()); + parameters.getBundledSignatures().set(getBundledSignatures()); + parameters.getSignatures().set(getSignatures()); + parameters.getTargetCompatibility().set(getTargetCompatibility()); + parameters.getIgnoreFailures().set(getIgnoreFailures()); + parameters.getSuccessMarker().set(getSuccessMarker()); + }); + } + + abstract static class ForbiddenApisCheckWorkAction implements WorkAction { + + private final org.gradle.api.logging.Logger logger = Logging.getLogger(getClass()); + + @Inject + public ForbiddenApisCheckWorkAction() {} + + private boolean checkIsUnsupportedJDK(Checker checker) { + if (checker.isSupportedJDK == false) { + final String msg = String.format( + Locale.ENGLISH, + "Your Java runtime (%s %s) is not supported by the forbiddenapis plugin. Please run the checks with a supported JDK!", + System.getProperty("java.runtime.name"), + System.getProperty("java.runtime.version") + ); + logger.warn(msg); + return true; + } + return false; + } + + @Override + public void execute() { + + final URLClassLoader urlLoader = createClassLoader(getParameters().getClasspath(), getParameters().getClassDirectories()); + try { + final Checker checker = createChecker(urlLoader); + if (checkIsUnsupportedJDK(checker)) { + return; + } + + final Set suppressAnnotations = getParameters().getSuppressAnnotations().get(); + for (String a : suppressAnnotations) { + checker.addSuppressAnnotation(a); + } + + try { + final Set bundledSignatures = getParameters().getBundledSignatures().get(); + if (bundledSignatures.isEmpty() == false) { + final String bundledSigsJavaVersion = getParameters().getTargetCompatibility().get(); + if (bundledSigsJavaVersion == null) { + logger.warn( + "The 'targetCompatibility' project or task property is missing. " + + "Trying to read bundled JDK signatures without compiler target. " + + "You have to explicitly specify the version in the resource name." + ); + } + for (String bs : bundledSignatures) { + checker.addBundledSignatures(bs, bundledSigsJavaVersion); + } + } + + final FileCollection signaturesFiles = getParameters().getSignaturesFiles(); + if (signaturesFiles != null) for (final File f : signaturesFiles) { + checker.parseSignaturesFile(f); + } + final List signatures = getParameters().getSignatures().get(); + if ((signatures != null) && !signatures.isEmpty()) { + final StringBuilder sb = new StringBuilder(); + for (String line : signatures) { + sb.append(line).append(NL); + } + checker.parseSignaturesString(sb.toString()); + } + } catch (IOException ioe) { + throw new GradleException("IO problem while reading files with API signatures.", ioe); + } catch (ParseException pe) { + throw new InvalidUserDataException("Parsing signatures failed: " + pe.getMessage(), pe); + } + + if (checker.hasNoSignatures()) { + if (checker.noSignaturesFilesParsed()) { + throw new InvalidUserDataException( + "No signatures were added to task; use properties 'signatures', 'bundledSignatures', 'signaturesURLs', and/or 'signaturesFiles' to define those!" + ); + } else { + logger.info("Skipping execution because no API signatures are available."); + return; + } + } + + try { + checker.addClassesToCheck(getParameters().getClassFiles()); + } catch (IOException ioe) { + throw new GradleException("Failed to load one of the given class files.", ioe); + } + checker.run(); + writeMarker(getParameters().getSuccessMarker().getAsFile().get()); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + // Close the classloader to free resources: + try { + if (urlLoader != null) urlLoader.close(); + } catch (IOException ioe) { + // getLogger().warn("Cannot close classloader: ".concat(ioe.toString())); + } + } + } + + private void writeMarker(File successMarker) throws IOException { + Files.write(successMarker.toPath(), new byte[] {}, StandardOpenOption.CREATE); + } + + private URLClassLoader createClassLoader(FileCollection classpath, FileCollection classesDirs) { + if (classesDirs == null || classpath == null) { + throw new InvalidUserDataException("Missing 'classesDirs' or 'classpath' property."); + } + + final Set cpElements = new LinkedHashSet<>(); + cpElements.addAll(classpath.getFiles()); + cpElements.addAll(classesDirs.getFiles()); + final URL[] urls = new URL[cpElements.size()]; + try { + int i = 0; + for (final File cpElement : cpElements) { + urls[i++] = cpElement.toURI().toURL(); + } + assert i == urls.length; + } catch (MalformedURLException mfue) { + throw new InvalidUserDataException("Failed to build classpath URLs.", mfue); + } + + return URLClassLoader.newInstance(urls, ClassLoader.getSystemClassLoader()); + } + + @NotNull + private Checker createChecker(URLClassLoader urlLoader) { + final EnumSet options = EnumSet.noneOf(Checker.Option.class); + options.add(FAIL_ON_MISSING_CLASSES); + if (getParameters().getIgnoreFailures().get() == false) { + options.add(FAIL_ON_VIOLATION); + } + options.add(FAIL_ON_UNRESOLVABLE_SIGNATURES); + options.add(DISABLE_CLASSLOADING_CACHE); + final Checker checker = new Checker(new GradleForbiddenApiLogger(logger), urlLoader, options); + return checker; + } + + private static class GradleForbiddenApiLogger implements Logger { + + private final org.gradle.api.logging.Logger delegate; + + GradleForbiddenApiLogger(org.gradle.api.logging.Logger delegate) { + this.delegate = delegate; + } + + @Override + public void error(String msg) { + delegate.error(msg); + } + + @Override + public void warn(String msg) { + delegate.warn(msg); + } + + @Override + public void info(String msg) { + delegate.info(msg); + } + + @Override + public void debug(String msg) { + delegate.debug(msg); + } + }; } + + interface Parameters extends WorkParameters { + ConfigurableFileCollection getClassDirectories(); + + ConfigurableFileCollection getClassFiles(); + + ConfigurableFileCollection getClasspath(); + + SetProperty getSuppressAnnotations(); + + RegularFileProperty getSuccessMarker(); + + ConfigurableFileCollection getSignaturesFiles(); + + SetProperty getBundledSignatures(); + + Property getTargetCompatibility(); + + Property getIgnoreFailures(); + + ListProperty getSignatures(); + + } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java index 71de2626d5fc..092230a2b12e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/DependencyLicensesTask.java @@ -88,8 +88,6 @@ public class DependencyLicensesTask extends DefaultTask { private final Logger logger = Logging.getLogger(getClass()); - private static final String SHA_EXTENSION = ".sha1"; - // TODO: we should be able to default this to eg compile deps, but we need to move the licenses // check from distribution to core (ie this should only be run on java projects) /** diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java index 96fb11214902..e24dd5ab2094 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java @@ -8,50 +8,37 @@ package org.elasticsearch.gradle.internal.precommit; -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApisExtension; -import groovy.lang.Closure; - import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; import org.elasticsearch.gradle.internal.info.BuildParams; import org.gradle.api.Project; import org.gradle.api.Task; -import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.plugins.JavaBasePlugin; -import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.specs.Specs; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.TaskProvider; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; +import java.io.File; import java.util.Set; -import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_EXTENSION_NAME; import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME; +import static org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask.BUNDLED_SIGNATURE_DEFAULTS; public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin { + @Override public TaskProvider createTask(Project project) { project.getPluginManager().apply(JavaBasePlugin.class); - // create Extension for defaults: - var checkForbiddenApisExtension = project.getExtensions() - .create(FORBIDDEN_APIS_EXTENSION_NAME, CheckForbiddenApisExtension.class, project); - // Create a convenience task for all checks (this does not conflict with extension, as it has higher priority in DSL): var forbiddenTask = project.getTasks() .register(FORBIDDEN_APIS_TASK_NAME, task -> { task.setDescription("Runs forbidden-apis checks."); }); - JavaPluginExtension javaPluginExtension = project.getExtensions().getByType(JavaPluginExtension.class); - // Define our tasks (one for each SourceSet): - TaskProvider resourcesTask = project.getTasks() .register("forbiddenApisResources", ExportElasticsearchBuildResourcesTask.class); - Path resourcesDir = project.getBuildDir().toPath().resolve("forbidden-apis-config"); + File resourcesDir = project.getLayout().getBuildDirectory().dir("forbidden-apis-config").get().getAsFile(); resourcesTask.configure(t -> { - t.setOutputDir(resourcesDir.toFile()); + t.setOutputDir(resourcesDir); t.copy("forbidden/jdk-signatures.txt"); t.copy("forbidden/jdk-deprecated.txt"); t.copy("forbidden/es-all-signatures.txt"); @@ -65,60 +52,36 @@ public TaskProvider createTask(Project project) { String sourceSetTaskName = sourceSet.getTaskName(FORBIDDEN_APIS_TASK_NAME, null); var sourceSetTask = project.getTasks().register(sourceSetTaskName, CheckForbiddenApisTask.class, t -> { t.setDescription("Runs forbidden-apis checks on '${sourceSet.name}' classes."); + t.setResourcesDir(resourcesDir); t.getOutputs().upToDateWhen(Specs.SATISFIES_ALL); t.setClassesDirs(sourceSet.getOutput().getClassesDirs()); t.dependsOn(resourcesTask); - t.setClasspath(sourceSet.getRuntimeClasspath().plus(sourceSet.getCompileClasspath()).plus(sourceSet.getOutput())); + t.setClasspath(sourceSet.getRuntimeClasspath().plus(sourceSet.getCompileClasspath())); t.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion().getMajorVersion()); - t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-non-portable", "jdk-system-out")); + t.getBundledSignatures().set(BUNDLED_SIGNATURE_DEFAULTS); t.setSignaturesFiles( project.files( - resourcesDir.resolve("forbidden/jdk-signatures.txt"), - resourcesDir.resolve("forbidden/es-all-signatures.txt"), - resourcesDir.resolve("forbidden/jdk-deprecated.txt") + resourcesDir.toPath().resolve("forbidden/jdk-signatures.txt"), + resourcesDir.toPath().resolve("forbidden/es-all-signatures.txt"), + resourcesDir.toPath().resolve("forbidden/jdk-deprecated.txt") ) ); - t.setSuppressAnnotations(Set.of("**.SuppressForbidden")); + t.getSuppressAnnotations().set(Set.of("**.SuppressForbidden")); if (t.getName().endsWith("Test")) { t.setSignaturesFiles( t.getSignaturesFiles() .plus( project.files( - resourcesDir.resolve("forbidden/es-test-signatures.txt"), - resourcesDir.resolve("forbidden/http-signatures.txt") + resourcesDir.toPath().resolve("forbidden/es-test-signatures.txt"), + resourcesDir.toPath().resolve("forbidden/http-signatures.txt") ) ) ); } else { t.setSignaturesFiles( - t.getSignaturesFiles().plus(project.files(resourcesDir.resolve("forbidden/es-server-signatures.txt"))) + t.getSignaturesFiles().plus(project.files(resourcesDir.toPath().resolve("forbidden/es-server-signatures.txt"))) ); } - ExtraPropertiesExtension ext = t.getExtensions().getExtraProperties(); - ext.set("replaceSignatureFiles", new Closure(t) { - @Override - public Void call(Object... names) { - List resources = new ArrayList<>(names.length); - for (Object name : names) { - resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); - } - t.setSignaturesFiles(project.files(resources)); - return null; - } - - }); - ext.set("addSignatureFiles", new Closure(t) { - @Override - public Void call(Object... names) { - List resources = new ArrayList<>(names.length); - for (Object name : names) { - resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); - } - t.setSignaturesFiles(t.getSignaturesFiles().plus(project.files(resources))); - return null; - } - }); - }); forbiddenTask.configure(t -> t.dependsOn(sourceSetTask)); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java index 0059913ad086..559d7536c310 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java @@ -52,7 +52,7 @@ public LoggerUsageTask(ObjectFactory objectFactory) { } @Inject - abstract public WorkerExecutor getWorkerExecutor(); + public abstract WorkerExecutor getWorkerExecutor(); @TaskAction public void runLoggerUsageTask() { diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 85d38b007e63..6006fae1c2d8 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.conventions.precommit.LicenseHeadersTask @@ -60,7 +60,7 @@ tasks.named("processResources").configure { ] } -tasks.withType(CheckForbiddenApis).configureEach { +tasks.withType(CheckForbiddenApisTask).configureEach { //client does not depend on server, so only jdk and http signatures should be checked replaceSignatureFiles('jdk-signatures', 'http-signatures') } @@ -71,8 +71,11 @@ tasks.named("forbiddenPatterns").configure { tasks.named('forbiddenApisTest').configure { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { signatures -> + signatures -= 'jdk-non-portable' + signatures += 'jdk-internal' + signatures + } } // JarHell is part of es server, which we don't want to pull in diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 546e81445bb8..901917c7b25f 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -57,8 +57,12 @@ tasks.named('forbiddenApisMain').configure { tasks.named('forbiddenApisTest').configure { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } + //client does not depend on server, so only jdk signatures should be checked replaceSignatureFiles 'jdk-signatures' } diff --git a/client/test/build.gradle b/client/test/build.gradle index 18eb16883ab1..9ee222b036cd 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -40,8 +40,11 @@ tasks.named('forbiddenApisMain').configure { tasks.named('forbiddenApisTest').configure { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } //client does not depend on core, so only jdk signatures should be checked replaceSignatureFiles 'jdk-signatures' } diff --git a/distribution/tools/server-cli/build.gradle b/distribution/tools/server-cli/build.gradle index 3ab5e6e86f5b..623f9d40cd49 100644 --- a/distribution/tools/server-cli/build.gradle +++ b/distribution/tools/server-cli/build.gradle @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask apply plugin: 'elasticsearch.build' @@ -20,7 +20,7 @@ tasks.named("test").configure { systemProperty "tests.security.manager", "false" } -tasks.withType(CheckForbiddenApis).configureEach { +tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 0b7de9a0996e..509d4d5012f5 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -186,8 +186,11 @@ tasks.named('forbiddenApisMain').configure { tasks.named('forbiddenApisTest').configure { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } } // classes are missing, e.g. com.ibm.icu.lang.UCharacter diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 72c3abec8d3d..3e98dfe60ea2 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -1,4 +1,4 @@ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' @@ -52,7 +52,10 @@ if (BuildParams.inFipsJvm) { } // Forbiden APIs non-portable checks fail because bouncy castle classes being used from the FIPS JDK since those are // not part of the Java specification - all of this is as designed, so we have to relax this check for FIPS. - tasks.withType(CheckForbiddenApis).configureEach { - bundledSignatures -= "jdk-non-portable" + tasks.withType(CheckForbiddenApisTask).configureEach { + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= "jdk-non-portable" + bundledSignatures + } } } diff --git a/x-pack/plugin/sql/sql-client/build.gradle b/x-pack/plugin/sql/sql-client/build.gradle index ac6600b09427..4a20e00666ea 100644 --- a/x-pack/plugin/sql/sql-client/build.gradle +++ b/x-pack/plugin/sql/sql-client/build.gradle @@ -23,8 +23,11 @@ tasks.named('forbiddenApisMain').configure { } tasks.named('forbiddenApisTest').configure { - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } } tasks.named("forbiddenPatterns").configure { From 6d6d972202732f0906812039b6a6411344f23442 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 1 Nov 2023 13:16:18 -0400 Subject: [PATCH 16/47] Drop ancient version checks from some Data Management code (#101672) --- .../datastreams/CreateDataStreamAction.java | 11 ++-------- .../action/MigrateToDataTiersResponse.java | 21 ++++++------------- .../xpack/core/ilm/AllocateAction.java | 7 ++----- .../SearchableSnapshotFeatureSetUsage.java | 15 ++++--------- 4 files changed, 14 insertions(+), 40 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java index 68a4e0d0b04c..4ecb092f34d4 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.action.datastreams; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; @@ -66,20 +65,14 @@ public ActionRequestValidationException validate() { public Request(StreamInput in) throws IOException { super(in); this.name = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - this.startTime = in.readVLong(); - } else { - this.startTime = System.currentTimeMillis(); - } + this.startTime = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(name); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - out.writeVLong(startTime); - } + out.writeVLong(startTime); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersResponse.java index b9a5115a9fa1..ee2b81049b4d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.cluster.action; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -63,15 +62,9 @@ public MigrateToDataTiersResponse(StreamInput in) throws IOException { migratedPolicies = in.readStringCollectionAsList(); migratedIndices = in.readStringCollectionAsList(); dryRun = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_17_0)) { - migratedLegacyTemplates = in.readStringCollectionAsList(); - migratedComposableTemplates = in.readStringCollectionAsList(); - migratedComponentTemplates = in.readStringCollectionAsList(); - } else { - migratedLegacyTemplates = List.of(); - migratedComposableTemplates = List.of(); - migratedComponentTemplates = List.of(); - } + migratedLegacyTemplates = in.readStringCollectionAsList(); + migratedComposableTemplates = in.readStringCollectionAsList(); + migratedComponentTemplates = in.readStringCollectionAsList(); } @Override @@ -154,11 +147,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(migratedPolicies); out.writeStringCollection(migratedIndices); out.writeBoolean(dryRun); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_17_0)) { - out.writeStringCollection(migratedLegacyTemplates); - out.writeStringCollection(migratedComposableTemplates); - out.writeStringCollection(migratedComponentTemplates); - } + out.writeStringCollection(migratedLegacyTemplates); + out.writeStringCollection(migratedComposableTemplates); + out.writeStringCollection(migratedComponentTemplates); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java index 04a04b5ef4f4..311f3484900f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.TransportVersions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; @@ -123,7 +122,7 @@ public AllocateAction( public AllocateAction(StreamInput in) throws IOException { this( in.readOptionalVInt(), - in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0) ? in.readOptionalInt() : null, + in.readOptionalInt(), (Map) in.readGenericValue(), (Map) in.readGenericValue(), (Map) in.readGenericValue() @@ -153,9 +152,7 @@ public Map getRequire() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(numberOfReplicas); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - out.writeOptionalInt(totalShardsPerNode); - } + out.writeOptionalInt(totalShardsPerNode); out.writeGenericValue(include); out.writeGenericValue(exclude); out.writeGenericValue(require); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java index 250efe349d50..e1644e5113a2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotFeatureSetUsage.java @@ -28,13 +28,8 @@ public class SearchableSnapshotFeatureSetUsage extends XPackFeatureSet.Usage { public SearchableSnapshotFeatureSetUsage(StreamInput input) throws IOException { super(input); numberOfSearchableSnapshotIndices = input.readVInt(); - if (input.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - numberOfFullCopySearchableSnapshotIndices = input.readVInt(); - numberOfSharedCacheSearchableSnapshotIndices = input.readVInt(); - } else { - numberOfFullCopySearchableSnapshotIndices = 0; - numberOfSharedCacheSearchableSnapshotIndices = 0; - } + numberOfFullCopySearchableSnapshotIndices = input.readVInt(); + numberOfSharedCacheSearchableSnapshotIndices = input.readVInt(); } @Override @@ -46,10 +41,8 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(numberOfSearchableSnapshotIndices); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - out.writeVInt(numberOfFullCopySearchableSnapshotIndices); - out.writeVInt(numberOfSharedCacheSearchableSnapshotIndices); - } + out.writeVInt(numberOfFullCopySearchableSnapshotIndices); + out.writeVInt(numberOfSharedCacheSearchableSnapshotIndices); } public SearchableSnapshotFeatureSetUsage( From 382f338d75be1988187fdf0a593af46218573f68 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 1 Nov 2023 13:26:41 -0400 Subject: [PATCH 17/47] ESQL: Fix unreleased block in topn (#101648) This fixes a bug in the topn operator where it'll fail to de-track blocks if there's a failure while building the results from topn. Closes #101588 --- docs/changelog/101648.yaml | 6 +++ .../org/elasticsearch/compute/data/Block.java | 19 ++++++++ .../compute/operator/topn/TopNOperator.java | 18 +++++--- .../operator/TupleBlockSourceOperator.java | 31 +++++++------ .../operator/topn/TopNOperatorTests.java | 46 +++++++++++++++---- .../action/AbstractEsqlIntegTestCase.java | 5 +- .../esql/action/EsqlActionBreakerIT.java | 4 +- .../esql/planner/LocalExecutionPlanner.java | 6 ++- 8 files changed, 101 insertions(+), 34 deletions(-) create mode 100644 docs/changelog/101648.yaml diff --git a/docs/changelog/101648.yaml b/docs/changelog/101648.yaml new file mode 100644 index 000000000000..48e01739aabc --- /dev/null +++ b/docs/changelog/101648.yaml @@ -0,0 +1,6 @@ +pr: 101648 +summary: "ESQL: Fix unreleased block in topn" +area: ES|QL +type: bug +issues: + - 101588 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 8dca74109b2c..75b02ff911df 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.BlockLoader; import java.util.List; @@ -208,6 +209,24 @@ interface Builder extends BlockLoader.Builder, Releasable { * Builds the block. This method can be called multiple times. */ Block build(); + + /** + * Build many {@link Block}s at once, releasing any partially built blocks + * if any fail. + */ + static Block[] buildAll(Block.Builder... builders) { + Block[] blocks = new Block[builders.length]; + try { + for (int b = 0; b < blocks.length; b++) { + blocks[b] = builders[b].build(); + } + } finally { + if (blocks[blocks.length - 1] == null) { + Releasables.closeExpectNoException(blocks); + } + } + return blocks; + } } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index 9657d6037676..2ebc9c82c6d9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -91,12 +91,6 @@ public long ramBytesUsed() { return SHALLOW_SIZE + keys.ramBytesUsed() + orderByCompositeKeyAscending.size() / Byte.SIZE + values.ramBytesUsed(); } - private void clear() { - keys.clear(); - orderByCompositeKeyAscending.clear(); - values.clear(); - } - @Override public void close() { Releasables.closeExpectNoException(keys, values); @@ -405,7 +399,17 @@ private Iterator toPages() { p++; if (p == size) { - result.add(new Page(Arrays.stream(builders).map(ResultBuilder::build).toArray(Block[]::new))); + Block[] blocks = new Block[builders.length]; + try { + for (int b = 0; b < blocks.length; b++) { + blocks[b] = builders[b].build(); + } + } finally { + if (blocks[blocks.length - 1] == null) { + Releasables.closeExpectNoException(blocks); + } + } + result.add(new Page(blocks)); Releasables.closeExpectNoException(builders); builders = null; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java index 92c6114d8689..e41c82b89772 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Tuple; @@ -44,23 +45,23 @@ public TupleBlockSourceOperator(BlockFactory blockFactory, List item = values.get(positionOffset + i); - if (item.v1() == null) { - blockBuilder1.appendNull(); - } else { - blockBuilder1.appendLong(item.v1()); - } - if (item.v2() == null) { - blockBuilder2.appendNull(); - } else { - blockBuilder2.appendLong(item.v2()); + try (var blockBuilder1 = blockFactory.newLongBlockBuilder(length); var blockBuilder2 = blockFactory.newLongBlockBuilder(length)) { + for (int i = 0; i < length; i++) { + Tuple item = values.get(positionOffset + i); + if (item.v1() == null) { + blockBuilder1.appendNull(); + } else { + blockBuilder1.appendLong(item.v1()); + } + if (item.v2() == null) { + blockBuilder2.appendNull(); + } else { + blockBuilder2.appendLong(item.v2()); + } } + currentPosition += length; + return new Page(Block.Builder.buildAll(blockBuilder1, blockBuilder2)); } - currentPosition += length; - return new Page(blockBuilder1.build(), blockBuilder2.build()); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 6c5bab9b8f78..f43873b4fdfd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.tests.util.RamUsageTester; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -36,6 +37,7 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.TupleBlockSourceOperator; import org.elasticsearch.core.Tuple; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xpack.versionfield.Version; @@ -234,15 +236,29 @@ public long accumulateObject(Object o, long shallowSize, Map fiel public void testRandomTopN() { for (boolean asc : List.of(true, false)) { - int limit = randomIntBetween(1, 20); - List inputValues = randomList(0, 5000, ESTestCase::randomLong); - Comparator comparator = asc ? naturalOrder() : reverseOrder(); - List expectedValues = inputValues.stream().sorted(comparator).limit(limit).toList(); - List outputValues = topNLong(inputValues, limit, asc, false); - assertThat(outputValues, equalTo(expectedValues)); + testRandomTopN(asc, driverContext()); } } + public void testRandomTopNCranky() { + try { + testRandomTopN(randomBoolean(), crankyDriverContext()); + logger.info("cranky didn't break us"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + private void testRandomTopN(boolean asc, DriverContext context) { + int limit = randomIntBetween(1, 20); + List inputValues = randomList(0, 5000, ESTestCase::randomLong); + Comparator comparator = asc ? naturalOrder() : reverseOrder(); + List expectedValues = inputValues.stream().sorted(comparator).limit(limit).toList(); + List outputValues = topNLong(context, inputValues, limit, asc, false); + assertThat(outputValues, equalTo(expectedValues)); + } + public void testBasicTopN() { List values = Arrays.asList(2L, 1L, 4L, null, 5L, 10L, null, 20L, 4L, 100L); assertThat(topNLong(values, 1, true, false), equalTo(Arrays.asList(1L))); @@ -267,8 +283,15 @@ public void testBasicTopN() { assertThat(topNLong(values, 100, false, true), equalTo(Arrays.asList(null, null, 100L, 20L, 10L, 5L, 4L, 4L, 2L, 1L))); } - private List topNLong(List inputValues, int limit, boolean ascendingOrder, boolean nullsFirst) { + private List topNLong( + DriverContext driverContext, + List inputValues, + int limit, + boolean ascendingOrder, + boolean nullsFirst + ) { return topNTwoColumns( + driverContext, inputValues.stream().map(v -> tuple(v, 0L)).toList(), limit, List.of(LONG, LONG), @@ -277,6 +300,10 @@ private List topNLong(List inputValues, int limit, boolean ascending ).stream().map(Tuple::v1).toList(); } + private List topNLong(List inputValues, int limit, boolean ascendingOrder, boolean nullsFirst) { + return topNLong(driverContext(), inputValues, limit, ascendingOrder, nullsFirst); + } + public void testCompareInts() { testCompare( new Page( @@ -422,6 +449,7 @@ public void testTopNTwoColumns() { List> values = Arrays.asList(tuple(1L, 1L), tuple(1L, 2L), tuple(null, null), tuple(null, 1L), tuple(1L, null)); assertThat( topNTwoColumns( + driverContext(), values, 5, List.of(LONG, LONG), @@ -432,6 +460,7 @@ public void testTopNTwoColumns() { ); assertThat( topNTwoColumns( + driverContext(), values, 5, List.of(LONG, LONG), @@ -442,6 +471,7 @@ public void testTopNTwoColumns() { ); assertThat( topNTwoColumns( + driverContext(), values, 5, List.of(LONG, LONG), @@ -613,13 +643,13 @@ public void testCollectAllValues_RandomMultiValues() { } private List> topNTwoColumns( + DriverContext driverContext, List> inputValues, int limit, List elementTypes, List encoder, List sortOrders ) { - DriverContext driverContext = driverContext(); List> outputValues = new ArrayList<>(); try ( Driver driver = new Driver( diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 11c8b14fb76b..5134e05b4cc3 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -48,7 +48,10 @@ public void ensureBlocksReleased() { CircuitBreakerService breakerService = internalCluster().getInstance(CircuitBreakerService.class, node); CircuitBreaker reqBreaker = breakerService.getBreaker(CircuitBreaker.REQUEST); try { - assertBusy(() -> assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L))); + assertBusy(() -> { + logger.info("running tasks: {}", client().admin().cluster().prepareListTasks().get()); + assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L)); + }); } catch (Exception e) { assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L)); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 8b79d5df189c..342df5209ec9 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.action; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -20,6 +19,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.ArrayList; import java.util.Collection; @@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101588") +@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") public class EsqlActionBreakerIT extends EsqlActionIT { public static class InternalTransportSettingPlugin extends Plugin { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 963b92c04838..9a76bc086586 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -46,6 +46,8 @@ import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; @@ -111,6 +113,7 @@ * drivers that are used to execute the given plan. */ public class LocalExecutionPlanner { + private static final Logger logger = LogManager.getLogger(LocalExecutionPlanner.class); private final String sessionId; private final CancellableTask parentTask; @@ -813,6 +816,7 @@ public List createDrivers(String sessionId) { try { for (DriverFactory df : driverFactories) { for (int i = 0; i < df.driverParallelism.instanceCount; i++) { + logger.trace("building {} {}", i, df); drivers.add(df.driverSupplier.apply(sessionId)); } } @@ -820,7 +824,7 @@ public List createDrivers(String sessionId) { return drivers; } finally { if (success == false) { - Releasables.close(() -> Releasables.close(drivers)); + Releasables.close(Releasables.wrap(drivers)); } } } From ef64936119913e37c77b582e39c0c8e669119b8a Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Wed, 1 Nov 2023 18:47:45 +0100 Subject: [PATCH 18/47] Increase timeout for creating a new index (#101215) `executeAndCancelCommittedPublication` uses 30s timeout for publication task, it makes also to use 30s timeout for request task Fixes #96693 --- .../cluster/coordination/RareClusterStateIT.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java index 3a2c6b5ebd0f..80bba57270aa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java @@ -193,7 +193,7 @@ public void testDeleteCreateInOneBulk() throws Exception { refresh(); disruption.startDisrupting(); logger.info("--> delete index"); - executeAndCancelCommittedPublication(indicesAdmin().prepareDelete("test").setTimeout("0s")).get(10, TimeUnit.SECONDS); + executeAndCancelCommittedPublication(indicesAdmin().prepareDelete("test").setTimeout("0s")).get(30, TimeUnit.SECONDS); logger.info("--> and recreate it"); executeAndCancelCommittedPublication( prepareCreate("test").setSettings( @@ -201,7 +201,7 @@ public void testDeleteCreateInOneBulk() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS.getKey(), "0") ).setTimeout("0s") - ).get(10, TimeUnit.SECONDS); + ).get(30, TimeUnit.SECONDS); logger.info("--> letting cluster proceed"); @@ -295,7 +295,7 @@ public void testDelayedMappingPropagationOnPrimary() throws Exception { // Now make sure the indexing request finishes successfully disruption.stopDisrupting(); - assertTrue(putMappingResponse.get(10, TimeUnit.SECONDS).isAcknowledged()); + assertTrue(putMappingResponse.get(30, TimeUnit.SECONDS).isAcknowledged()); assertThat(docIndexResponse.get(10, TimeUnit.SECONDS), instanceOf(IndexResponse.class)); assertEquals(1, docIndexResponse.get(10, TimeUnit.SECONDS).getShardInfo().getTotal()); } @@ -408,11 +408,11 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { // Now make sure the indexing request finishes successfully disruption.stopDisrupting(); - assertTrue(putMappingResponse.get(10, TimeUnit.SECONDS).isAcknowledged()); + assertTrue(putMappingResponse.get(30, TimeUnit.SECONDS).isAcknowledged()); assertThat(docIndexResponse.get(10, TimeUnit.SECONDS), instanceOf(IndexResponse.class)); assertEquals(2, docIndexResponse.get(10, TimeUnit.SECONDS).getShardInfo().getTotal()); // both shards should have succeeded - assertThat(dynamicMappingsFut.get(10, TimeUnit.SECONDS).getResult(), equalTo(CREATED)); + assertThat(dynamicMappingsFut.get(30, TimeUnit.SECONDS).getResult(), equalTo(CREATED)); } } From 2d144babfb7f584c92438bdc3e95458a357fd1e9 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Wed, 1 Nov 2023 20:13:50 +0200 Subject: [PATCH 19/47] Yaml test for https://github.com/elastic/elasticsearch/issues/101489 (#101685) --- .../rest-api-spec/test/100_bug_fix.yml | 76 +++++++++++++++++-- 1 file changed, 70 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/100_bug_fix.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/100_bug_fix.yml index 720914b579f3..d5f5bee46f50 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/100_bug_fix.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/100_bug_fix.yml @@ -1,5 +1,7 @@ --- -setup: +"Bug fix https://github.com/elastic/elasticsearch/issues/99472": + - skip: + features: warnings - do: bulk: index: test @@ -9,11 +11,6 @@ setup: - { "emp_no": 10, "ip1": "127.0", "ip2": "0.1" } - { "index": { } } - { "emp_no": 20 } - ---- -"Bug fix https://github.com/elastic/elasticsearch/issues/99472": - - skip: - features: warnings - do: warnings: - "Line 1:37: evaluation of [to_ip(coalesce(ip1.keyword, \"255.255.255.255\"))] failed, treating result as null. Only first 20 failures recorded." @@ -55,3 +52,70 @@ setup: - length: { values: 2 } - match: { values.0: [ 10, "127.00.1", "127.00.1", null ] } - match: { values.1: [ 20, null, "255.255.255.255", "255.255.255.255"] } + +--- +"Bug fix https://github.com/elastic/elasticsearch/issues/101489": + - do: + indices.create: + index: index1 + body: + mappings: + properties: + http: + properties: + headers: + type: flattened + - do: + indices.create: + index: index2 + body: + mappings: + properties: + http: + properties: + headers: + properties: + location: + type: keyword + - do: + indices.create: + index: index3 + body: + mappings: + properties: + http: + properties: + headers: + properties: + location: + type: text + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "index1" } } + - { "http.headers": { "location": "RO","code": 123 } } + - { "index": { "_index": "index2" } } + - { "http.headers.location": "US" } + - { "index": { "_index": "index3" } } + - { "http.headers.location": "CN" } + - do: + esql.query: + body: + query: 'from index* [metadata _index] | limit 5 | sort _index desc' + - match: { columns.0.name: http.headers } + - match: { columns.0.type: unsupported } + - match: { columns.1.name: http.headers.location } + - match: { columns.1.type: unsupported } + - match: { columns.2.name: _index } + - match: { columns.2.type: keyword } + - length: { values: 3 } + - match: { values.0.0: null } + - match: { values.0.1: null } + - match: { values.0.2: index3 } + - match: { values.1.0: null } + - match: { values.1.1: null } + - match: { values.1.2: index2 } + - match: { values.2.0: null } + - match: { values.2.1: null } + - match: { values.2.2: index1 } From 41f09fb518a6fc8fdf9b3652e566df05479a9e1e Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 1 Nov 2023 14:14:51 -0400 Subject: [PATCH 20/47] Drop HLRC from this test (#101681) --- .../AbstractMultiClusterRemoteTestCase.java | 25 ++-- .../cluster/remote/test/RemoteClustersIT.java | 130 ++++++++++-------- 2 files changed, 78 insertions(+), 77 deletions(-) diff --git a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java index 824f4db5c4cf..d9be4045c37e 100644 --- a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java +++ b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -26,10 +25,8 @@ import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collections; import java.util.function.Consumer; -@SuppressWarnings("removal") public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase { private static final String USER = "x_pack_rest_user"; @@ -40,8 +37,8 @@ protected boolean preserveClusterUponCompletion() { return true; } - private static RestHighLevelClient cluster1Client; - private static RestHighLevelClient cluster2Client; + private static RestClient cluster1Client; + private static RestClient cluster2Client; private static boolean initialized = false; @Override @@ -62,8 +59,8 @@ public void initClientsAndConfigureClusters() throws Exception { request.addParameter("wait_for_status", "yellow"); request.addParameter("wait_for_nodes", "1"); }; - ensureHealth(cluster1Client().getLowLevelClient(), waitForYellowRequest); - ensureHealth(cluster2Client().getLowLevelClient(), waitForYellowRequest); + ensureHealth(cluster1Client, waitForYellowRequest); + ensureHealth(cluster2Client, waitForYellowRequest); initialized = true; } @@ -86,28 +83,22 @@ public static void destroyClients() throws IOException { } } - protected static RestHighLevelClient cluster1Client() { + protected static RestClient cluster1Client() { return cluster1Client; } - protected static RestHighLevelClient cluster2Client() { + protected static RestClient cluster2Client() { return cluster2Client; } - private static class HighLevelClient extends RestHighLevelClient { - private HighLevelClient(RestClient restClient) { - super(restClient, RestClient::close, Collections.emptyList()); - } - } - - private RestHighLevelClient buildClient(final String url) throws IOException { + private RestClient buildClient(final String url) throws IOException { int portSeparator = url.lastIndexOf(':'); HttpHost httpHost = new HttpHost( url.substring(0, portSeparator), Integer.parseInt(url.substring(portSeparator + 1)), getProtocol() ); - return new HighLevelClient(buildClient(restAdminSettings(), new HttpHost[] { httpHost })); + return buildClient(restAdminSettings(), new HttpHost[] { httpHost }); } protected boolean isOss() { diff --git a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java index 9a0303ab6071..78ffb9cb7b7b 100644 --- a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java +++ b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java @@ -7,13 +7,10 @@ */ package org.elasticsearch.cluster.remote.test; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.test.rest.ObjectPath; import org.junit.After; import org.junit.Before; @@ -28,44 +25,53 @@ public class RemoteClustersIT extends AbstractMultiClusterRemoteTestCase { @Before public void setupIndices() throws IOException { - RestClient cluster1Client = cluster1Client().getLowLevelClient(); - assertTrue(createIndex(cluster1Client, "test1", Settings.builder().put("index.number_of_replicas", 0).build()).isAcknowledged()); - cluster1Client().index( - new IndexRequest("test1").id("id1") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()), - RequestOptions.DEFAULT - ); - - RestClient cluster2Client = cluster2Client().getLowLevelClient(); - assertTrue(createIndex(cluster2Client, "test2", Settings.builder().put("index.number_of_replicas", 0).build()).isAcknowledged()); - cluster2Client().index( - new IndexRequest("test2").id("id1").source(XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()), - RequestOptions.DEFAULT - ); - cluster2Client().index( - new IndexRequest("test2").id("id2") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()), - RequestOptions.DEFAULT - ); - assertEquals(1L, cluster1Client().search(new SearchRequest("test1"), RequestOptions.DEFAULT).getHits().getTotalHits().value); - assertEquals(2L, cluster2Client().search(new SearchRequest("test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value); + assertTrue(createIndex(cluster1Client(), "test1", Settings.builder().put("index.number_of_replicas", 0).build()).isAcknowledged()); + { + Request createDoc = new Request("POST", "/test1/_doc/id1?refresh=true"); + createDoc.setJsonEntity(""" + { "foo": "bar" } + """); + assertOK(cluster1Client().performRequest(createDoc)); + } + { + Request searchRequest = new Request("POST", "/test1/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster1Client().performRequest(searchRequest)); + assertEquals(1, (int) doc.evaluate("hits.total.value")); + } + + assertTrue(createIndex(cluster2Client(), "test2", Settings.builder().put("index.number_of_replicas", 0).build()).isAcknowledged()); + { + Request createDoc = new Request("POST", "/test2/_doc/id1?refresh=true"); + createDoc.setJsonEntity(""" + { "foo": "bar" } + """); + assertOK(cluster2Client().performRequest(createDoc)); + } + { + Request createDoc = new Request("POST", "/test2/_doc/id2?refresh=true"); + createDoc.setJsonEntity(""" + { "foo": "bar" } + """); + assertOK(cluster2Client().performRequest(createDoc)); + } + { + Request searchRequest = new Request("POST", "/test2/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster2Client().performRequest(searchRequest)); + assertEquals(2, (int) doc.evaluate("hits.total.value")); + } } @After public void clearIndices() throws IOException { - RestClient cluster1Client = cluster1Client().getLowLevelClient(); - assertTrue(deleteIndex(cluster1Client, "*").isAcknowledged()); - RestClient cluster2Client = cluster2Client().getLowLevelClient(); - assertTrue(deleteIndex(cluster2Client, "*").isAcknowledged()); + assertTrue(deleteIndex(cluster1Client(), "*").isAcknowledged()); + assertTrue(deleteIndex(cluster2Client(), "*").isAcknowledged()); } @After public void clearRemoteClusterSettings() throws IOException { Settings setting = Settings.builder().putNull("cluster.remote.*").build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), setting); - updateClusterSettings(cluster2Client().getLowLevelClient(), setting); + updateClusterSettings(cluster1Client(), setting); + updateClusterSettings(cluster2Client(), setting); } public void testProxyModeConnectionWorks() throws IOException { @@ -76,14 +82,15 @@ public void testProxyModeConnectionWorks() throws IOException { .put("cluster.remote.cluster2.proxy_address", cluster2RemoteClusterSeed) .build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), settings); + updateClusterSettings(cluster1Client(), settings); - assertTrue(isConnected(cluster1Client().getLowLevelClient())); + assertTrue(isConnected(cluster1Client())); - assertEquals( - 2L, - cluster1Client().search(new SearchRequest("cluster2:test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value - ); + { + Request searchRequest = new Request("POST", "/cluster2:test2/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster1Client().performRequest(searchRequest)); + assertEquals(2, (int) doc.evaluate("hits.total.value")); + } } public void testSniffModeConnectionFails() throws IOException { @@ -93,9 +100,9 @@ public void testSniffModeConnectionFails() throws IOException { .put("cluster.remote.cluster2alt.mode", "sniff") .put("cluster.remote.cluster2alt.seeds", cluster2RemoteClusterSeed) .build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), settings); + updateClusterSettings(cluster1Client(), settings); - assertFalse(isConnected(cluster1Client().getLowLevelClient())); + assertFalse(isConnected(cluster1Client())); } public void testHAProxyModeConnectionWorks() throws IOException { @@ -105,14 +112,15 @@ public void testHAProxyModeConnectionWorks() throws IOException { .put("cluster.remote.haproxynosn.mode", "proxy") .put("cluster.remote.haproxynosn.proxy_address", proxyAddress) .build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), settings); + updateClusterSettings(cluster1Client(), settings); - assertTrue(isConnected(cluster1Client().getLowLevelClient())); + assertTrue(isConnected(cluster1Client())); - assertEquals( - 2L, - cluster1Client().search(new SearchRequest("haproxynosn:test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value - ); + { + Request searchRequest = new Request("POST", "/haproxynosn:test2/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster1Client().performRequest(searchRequest)); + assertEquals(2, (int) doc.evaluate("hits.total.value")); + } } public void testHAProxyModeConnectionWithSNIToCluster1Works() throws IOException { @@ -123,14 +131,15 @@ public void testHAProxyModeConnectionWithSNIToCluster1Works() throws IOException .put("cluster.remote.haproxysni1.proxy_address", "haproxy:9600") .put("cluster.remote.haproxysni1.server_name", "application1.example.com") .build(); - updateClusterSettings(cluster2Client().getLowLevelClient(), settings); + updateClusterSettings(cluster2Client(), settings); - assertTrue(isConnected(cluster2Client().getLowLevelClient())); + assertTrue(isConnected(cluster2Client())); - assertEquals( - 1L, - cluster2Client().search(new SearchRequest("haproxysni1:test1"), RequestOptions.DEFAULT).getHits().getTotalHits().value - ); + { + Request searchRequest = new Request("POST", "/haproxysni1:test1/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster2Client().performRequest(searchRequest)); + assertEquals(1, (int) doc.evaluate("hits.total.value")); + } } public void testHAProxyModeConnectionWithSNIToCluster2Works() throws IOException { @@ -141,14 +150,15 @@ public void testHAProxyModeConnectionWithSNIToCluster2Works() throws IOException .put("cluster.remote.haproxysni2.proxy_address", "haproxy:9600") .put("cluster.remote.haproxysni2.server_name", "application2.example.com") .build(); - updateClusterSettings(cluster1Client().getLowLevelClient(), settings); + updateClusterSettings(cluster1Client(), settings); - assertTrue(isConnected(cluster1Client().getLowLevelClient())); + assertTrue(isConnected(cluster1Client())); - assertEquals( - 2L, - cluster1Client().search(new SearchRequest("haproxysni2:test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value - ); + { + Request searchRequest = new Request("POST", "/haproxysni2:test2/_search"); + ObjectPath doc = ObjectPath.createFromResponse(cluster1Client().performRequest(searchRequest)); + assertEquals(2, (int) doc.evaluate("hits.total.value")); + } } @SuppressWarnings("unchecked") From 61c7483fc930268cc1ec2b791abfc253be5d9171 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Wed, 1 Nov 2023 14:21:40 -0400 Subject: [PATCH 21/47] Make knn search a query (#98916) This introduced a new knn query: - knn query is executed during the Query phase similar to all other queries. - No k parameter, k defaults to size - num_candidates is a size of queue for candidates to consider while search a graph on each shard - For aggregations: "size" results are collected with total = size * shards. Aggregations will see size * shards results. - All filters from DSL are applied as post-filters, except: 1) alias filter is applied as pre-filter or 2) a filter provided as a parameter inside knn query. --- docs/changelog/98916.yaml | 5 + docs/reference/query-dsl/knn-query.asciidoc | 222 ++++++++++++++ .../query-dsl/special-queries.asciidoc | 6 + .../search-your-data/knn-search.asciidoc | 5 +- .../percolator/PercolatorQuerySearchIT.java | 33 +++ .../percolator/PercolatorFieldMapper.java | 3 + .../110_knn_query_with_filter.yml | 274 ++++++++++++++++++ .../120_knn_query_multiple_shards.yml | 216 ++++++++++++++ .../130_knn_query_nested_search.yml | 213 ++++++++++++++ .../140_knn_query_with_other_queries.yml | 127 ++++++++ .../test/search.vectors/40_knn_search.yml | 17 -- .../search.vectors/45_knn_search_byte.yml | 14 - .../org/elasticsearch/TransportVersions.java | 1 + .../index/query/SearchExecutionContext.java | 11 + .../elasticsearch/search/SearchModule.java | 4 +- .../elasticsearch/search/SearchService.java | 1 + .../search/vectors/KnnVectorQueryBuilder.java | 205 ++++++------- ...AbstractKnnVectorQueryBuilderTestCase.java | 85 ++---- .../search-business-rules/10_pinned_query.yml | 97 +++++++ 19 files changed, 1346 insertions(+), 193 deletions(-) create mode 100644 docs/changelog/98916.yaml create mode 100644 docs/reference/query-dsl/knn-query.asciidoc create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml diff --git a/docs/changelog/98916.yaml b/docs/changelog/98916.yaml new file mode 100644 index 000000000000..a466e3deba00 --- /dev/null +++ b/docs/changelog/98916.yaml @@ -0,0 +1,5 @@ +pr: 98916 +summary: Make knn search a query +area: Vector Search +type: feature +issues: [] diff --git a/docs/reference/query-dsl/knn-query.asciidoc b/docs/reference/query-dsl/knn-query.asciidoc new file mode 100644 index 000000000000..f9cc31748ef7 --- /dev/null +++ b/docs/reference/query-dsl/knn-query.asciidoc @@ -0,0 +1,222 @@ +[[query-dsl-knn-query]] +=== Knn query +++++ +Knn +++++ + +Finds the _k_ nearest vectors to a query vector, as measured by a similarity +metric. _knn_ query finds nearest vectors through approximate search on indexed +dense_vectors. The preferred way to do approximate kNN search is through the +<> of a search request. _knn_ query is reserved for +expert cases, where there is a need to combine this query with other queries. + +[[knn-query-ex-request]] +==== Example request + +[source,console] +---- +PUT my-image-index +{ + "mappings": { + "properties": { + "image-vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "l2_norm" + }, + "file-type": { + "type": "keyword" + } + } + } +} +---- + +. Index your data. ++ +[source,console] +---- +POST my-image-index/_bulk?refresh=true +{ "index": { "_id": "1" } } +{ "image-vector": [1, 5, -20], "file-type": "jpg" } +{ "index": { "_id": "2" } } +{ "image-vector": [42, 8, -15], "file-type": "png" } +{ "index": { "_id": "3" } } +{ "image-vector": [15, 11, 23], "file-type": "jpg" } +---- +//TEST[continued] + +. Run the search using the `knn` query, asking for the top 3 nearest vectors. ++ +[source,console] +---- +POST my-image-index/_search +{ + "size" : 3, + "query" : { + "knn": { + "field": "image-vector", + "query_vector": [-5, 9, -12], + "num_candidates": 10 + } + } +} +---- +//TEST[continued] + +NOTE: `knn` query doesn't have a separate `k` parameter. `k` is defined by +`size` parameter of a search request similar to other queries. `knn` query +collects `num_candidates` results from each shard, then merges them to get +the top `size` results. + + +[[knn-query-top-level-parameters]] +==== Top-level parameters for `knn` + +`field`:: ++ +-- +(Required, string) The name of the vector field to search against. Must be a +<>. +-- + +`query_vector`:: ++ +-- +(Required, array of floats) Query vector. Must have the same number of dimensions +as the vector field you are searching against. +-- + +`num_candidates`:: ++ +-- +(Required, integer) The number of nearest neighbor candidates to consider per shard. +Cannot exceed 10,000. {es} collects `num_candidates` results from each shard, then +merges them to find the top results. Increasing `num_candidates` tends to improve the +accuracy of the final results. +-- + +`filter`:: ++ +-- +(Optional, query object) Query to filter the documents that can match. +The kNN search will return the top documents that also match this filter. +The value can be a single query or a list of queries. If `filter` is not provided, +all documents are allowed to match. + +The filter is a pre-filter, meaning that it is applied **during** the approximate +kNN search to ensure that `num_candidates` matching documents are returned. +-- + +`similarity`:: ++ +-- +(Optional, float) The minimum similarity required for a document to be considered +a match. The similarity value calculated relates to the raw +<> used. Not the document score. The matched +documents are then scored according to <> +and the provided `boost` is applied. +-- + +`boost`:: ++ +-- +(Optional, float) Floating point number used to multiply the +scores of matched documents. This value cannot be negative. Defaults to `1.0`. +-- + +`_name`:: ++ +-- +(Optional, string) Name field to identify the query +-- + +[[knn-query-filtering]] +==== Pre-filters and post-filters in knn query + +There are two ways to filter documents that match a kNN query: + +. **pre-filtering** – filter is applied during the approximate kNN search +to ensure that `k` matching documents are returned. +. **post-filtering** – filter is applied after the approximate kNN search +completes, which results in fewer than k results, even when there are enough +matching documents. + +Pre-filtering is supported through the `filter` parameter of the `knn` query. +Also filters from <> are applied as pre-filters. + +All other filters found in the Query DSL tree are applied as post-filters. +For example, `knn` query finds the top 3 documents with the nearest vectors +(num_candidates=3), which are combined with `term` filter, that is +post-filtered. The final set of documents will contain only a single document +that passes the post-filter. + + +[source,console] +---- +POST my-image-index/_search +{ + "size" : 10, + "query" : { + "bool" : { + "must" : { + "knn": { + "field": "image-vector", + "query_vector": [-5, 9, -12], + "num_candidates": 3 + } + }, + "filter" : { + "term" : { "file-type" : "png" } + } + } + } +} +---- +//TEST[continued] + +[[knn-query-with-nested-query]] +==== Knn query inside a nested query + +`knn` query can be used inside a nested query. The behaviour here is similar +to <>: + +* kNN search over nested dense_vectors diversifies the top results over +the top-level document +* `filter` over the top-level document metadata is supported and acts as a +post-filter +* `filter` over `nested` field metadata is not supported + +A sample query can look like below: + +[source,js] +---- +{ + "query" : { + "nested" : { + "path" : "paragraph", + "query" : { + "knn": { + "query_vector": [ + 0.45, + 45 + ], + "field": "paragraph.vector", + "num_candidates": 2 + } + } + } + } +} +---- +// NOTCONSOLE + +[[knn-query-aggregations]] +==== Knn query with aggregations +`knn` query calculates aggregations on `num_candidates` from each shard. +Thus, the final results from aggregations contain +`num_candidates * number_of_shards` documents. This is different from +the <> where aggregations are +calculated on the global top k nearest documents. + diff --git a/docs/reference/query-dsl/special-queries.asciidoc b/docs/reference/query-dsl/special-queries.asciidoc index a6d35d4f9b70..d46377f69835 100644 --- a/docs/reference/query-dsl/special-queries.asciidoc +++ b/docs/reference/query-dsl/special-queries.asciidoc @@ -17,6 +17,10 @@ or collection of documents. This query finds queries that are stored as documents that match with the specified document. +<>:: +A query that finds the _k_ nearest vectors to a query +vector, as measured by a similarity metric. + <>:: A query that computes scores based on the values of numeric features and is able to efficiently skip non-competitive hits. @@ -43,6 +47,8 @@ include::mlt-query.asciidoc[] include::percolate-query.asciidoc[] +include::knn-query.asciidoc[] + include::rank-feature-query.asciidoc[] include::script-query.asciidoc[] diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 8c676a5515ca..4bf1ceabe08d 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -43,7 +43,7 @@ based on a similarity metric, the better its match. {es} supports two methods for kNN search: * <> using the `knn` search -option +option or `knn` query * <> using a `script_score` query with a vector function @@ -129,7 +129,8 @@ POST image-index/_bulk?refresh=true //TEST[continued] //TEST[s/\.\.\.//] -. Run the search using the <>. +. Run the search using the <> or the +<> (expert case). + [source,console] ---- diff --git a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 9362080c9cb3..cad976411b8d 100644 --- a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -9,6 +9,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; @@ -22,10 +23,12 @@ import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -1295,4 +1298,34 @@ public void testWithWildcardFieldNames() throws Exception { ).get(); assertEquals(1, response.getHits().getTotalHits().value); } + + public void testKnnQueryNotSupportedInPercolator() throws IOException { + String mappings = org.elasticsearch.common.Strings.format(""" + { + "properties": { + "my_query" : { + "type" : "percolator" + }, + "my_vector" : { + "type" : "dense_vector", + "dims" : 5, + "index" : true, + "similarity" : "l2_norm" + } + + } + } + """); + indicesAdmin().prepareCreate("index1").setMapping(mappings).get(); + ensureGreen(); + QueryBuilder knnVectorQueryBuilder = new KnnVectorQueryBuilder("my_vector", new float[] { 1, 1, 1, 1, 1 }, 10, null); + + IndexRequestBuilder indexRequestBuilder = client().prepareIndex("index1") + .setId("knn_query1") + .setSource(jsonBuilder().startObject().field("my_query", knnVectorQueryBuilder).endObject()); + + DocumentParsingException exception = expectThrows(DocumentParsingException.class, () -> indexRequestBuilder.get()); + assertThat(exception.getMessage(), containsString("the [knn] query is unsupported inside a percolator")); + } + } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index c00eaa894dd6..e21226428793 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -61,6 +61,7 @@ import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.xcontent.XContentParser; import java.io.ByteArrayOutputStream; @@ -438,6 +439,8 @@ static QueryBuilder parseQueryBuilder(DocumentParserContext context) { throw new IllegalArgumentException("the [has_child] query is unsupported inside a percolator query"); } else if (queryName.equals("has_parent")) { throw new IllegalArgumentException("the [has_parent] query is unsupported inside a percolator query"); + } else if (queryName.equals(KnnVectorQueryBuilder.NAME)) { + throw new IllegalArgumentException("the [knn] query is unsupported inside a percolator query"); } }); } catch (IOException e) { diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml new file mode 100644 index 000000000000..849df86a3056 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml @@ -0,0 +1,274 @@ +# test how knn query interacts with filters +setup: + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 1 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + my_name: + type: keyword + store: true + aliases: + my_alias: + filter: + term: + my_name: v2 + my_alias1: + filter: + term: + my_name: v1 + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + +--- +"Simple knn query": + + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + + - match: { hits.total.value: 5 } # collector sees num_candidates docs + - length: {hits.hits: 3} + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0.fields.my_name.0: v1 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2.fields.my_name.0: v1 } +--- +"PRE_FILTER: knn query with alias filter as pre-filter": + - do: + search: + index: my_alias + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + + - match: { hits.total.value: 5 } # collector sees num_candidates docs + - length: {hits.hits: 3} + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "6" } + - match: { hits.hits.2.fields.my_name.0: v2 } + + # alias prefilter is combined with internal filter + - do: + search: + index: my_alias + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 5 + filter: + term: + my_name: v1 + + # both alias filter and internal filter are applied as pre-filter resulting in 0 hits for knn search + - match: { hits.total.value: 0 } + - length: { hits.hits: 0 } + + # alias prefilter is applied when knn is a part of another query + - do: + search: + index: my_alias + body: + size: 3 + fields: [ my_name ] + query: + bool: + should: + - wildcard: + my_name: + value: "v*" + - knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + + - match: { hits.total.value: 5 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "6" } + - match: { hits.hits.2.fields.my_name.0: v2 } + +--- +"PRE_FILTER: pre-filter across multiple internal filters": +- do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 5 + filter: + - term: + my_name: v1 + - term: + my_name: v2 +- match: { hits.total.value: 0 } +- length: { hits.hits: 0 } + +--- +"PRE_FILTER: pre-filter across multiple aliases": + - do: + search: + index: my_alias,my_alias1 + body: + size: 6 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 100 + + - match: { hits.total.value: 10 } # 5 docs from each alias + - length: {hits.hits: 6} + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0.fields.my_name.0: v1 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2.fields.my_name.0: v1 } + - match: { hits.hits.3._id: "4" } + - match: { hits.hits.3.fields.my_name.0: v2 } + - match: { hits.hits.4._id: "5" } + - match: { hits.hits.4.fields.my_name.0: v1 } + - match: { hits.hits.5._id: "6" } + - match: { hits.hits.5.fields.my_name.0: v2 } + +--- +"PRE_FILTER: knn query with internal filter as pre-filter": + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + filter: + term: + my_name: v2 + + - match: { hits.total.value: 5 } + - length: {hits.hits: 3} + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1.fields.my_name.0: v2 } + - match: { hits.hits.2._id: "6" } + - match: { hits.hits.2.fields.my_name.0: v2 } + +--- +"POST_FILTER: knn query with filter from a parent bool query as post-filter": + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + bool: + must: + - term: + my_name: v2 + - knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 5 + + - match: { hits.total.value: 2 } + - length: {hits.hits: 2} # knn query returns top 5 docs, but they are post-filtered to 2 docs + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.1.fields.my_name.0: v2 } + + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + bool: + must: + - term: + my_name: v2 + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 5 + filter: + term: + my_name: v1 + + - match: { hits.total.value: 0} + - length: { hits.hits: 0 } # knn query returns top 5 docs, but they are post-filtered to 0 docs diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml new file mode 100644 index 000000000000..b1c0fd948481 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml @@ -0,0 +1,216 @@ +# test how knn query interacts with filters +setup: + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + features: close_to + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 2 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + my_name: + type: keyword + store: true + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + - '{"index": {"_id": "11"}}' + - '{"my_vector": [1, 1, 1, 11], "my_name": "v1"}' + - '{"index": {"_id": "12"}}' + - '{"my_vector": [1, 1, 1, 12], "my_name": "v2"}' + + +--- +"Search for 2 knn queries combines scores from them": + - do: + search: + index: my_index + body: + size: 6 + query: + bool: + should: + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 100 + boost: 1.1 + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 12 ] + num_candidates: 100 + + - length: {hits.hits: 6} + - match: {hits.total.value: 12} + - match: {hits.hits.0._id: '1'} + - match: {hits.hits.1._id: '12'} + - match: {hits.hits.2._id: '2'} + - match: { hits.hits.3._id: '11' } + - match: { hits.hits.4._id: '3' } + - match: { hits.hits.5._id: '10' } + + +--- +"Hybrid search combines scores from knn and other queries": + - do: + search: + include_named_queries_score: true + index: my_index + body: + size: 3 + query: + bool: + should: + - wildcard: + my_name: + value: "v*" # produces scores 1.0 + _name: "bm25_query" + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 3 + _name: "knn_query" + + - length: {hits.hits: 3} + - match: {hits.total.value: 12} + - match: {hits.hits.0._id: '1'} + - match: {hits.hits.1._id: '2'} + - match: {hits.hits.2._id: '3'} + + - close_to: {hits.hits.0._score: { value: 2.0, error: 0.00001 } } + - close_to: {hits.hits.0.matched_queries.bm25_query: { value: 1.0, error: 0.00001 } } + - close_to: {hits.hits.0.matched_queries.knn_query: { value: 1.0, error: 0.00001 } } + + - close_to: {hits.hits.1._score: { value: 1.5, error: 0.00001 } } + - close_to: { hits.hits.1.matched_queries.bm25_query: { value: 1.0, error: 0.00001 } } + - close_to: { hits.hits.1.matched_queries.knn_query: { value: 0.5, error: 0.00001 } } + + - close_to: {hits.hits.2._score: { value: 1.2, error: 0.00001 } } + - close_to: { hits.hits.2.matched_queries.bm25_query: { value: 1.0, error: 0.00001 } } + - close_to: { hits.hits.2.matched_queries.knn_query: { value: 0.2, error: 0.00001 } } + + # the same query with boosts + - do: + search: + include_named_queries_score: true + index: my_index + body: + size: 3 + query: + bool: + should: + - wildcard: + my_name: + value: "v*" # produces scores 1.0 + boost: 100 + _name: "bm25_query" + - knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 3 + boost: 100 + _name: "knn_query" + + - length: { hits.hits: 3 } + - match: { hits.total.value: 12 } + - match: { hits.hits.0._id: '1' } + - match: { hits.hits.1._id: '2' } + - match: { hits.hits.2._id: '3' } + + - close_to: { hits.hits.0._score: { value: 200.0, error: 0.00001 } } + - close_to: { hits.hits.0.matched_queries.bm25_query: { value: 100.0, error: 0.00001 } } + - close_to: { hits.hits.0.matched_queries.knn_query: { value: 100.0, error: 0.00001 } } + + - close_to: { hits.hits.1._score: { value: 150.0, error: 0.00001 } } + - close_to: { hits.hits.1.matched_queries.bm25_query: { value: 100.0, error: 0.00001 } } + - close_to: { hits.hits.1.matched_queries.knn_query: { value: 50.0, error: 0.00001 } } + + - close_to: { hits.hits.2._score: { value: 120, error: 0.00001 } } + - close_to: { hits.hits.2.matched_queries.bm25_query: { value: 100.0, error: 0.00001 } } + - close_to: { hits.hits.2.matched_queries.knn_query: { value: 20.0, error: 0.00001 } } + +--- +"Aggregations with collected number of docs depends on num_candidates": + - do: + search: + index: my_index + body: + size: 2 + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + num_candidates: 100 # collect up to 100 candidates from each shard + aggs: + my_agg: + terms: + field: my_name + order: + _key: asc + + - length: {hits.hits: 2} + - match: {hits.total.value: 12} + - match: {aggregations.my_agg.buckets.0.key: 'v1'} + - match: {aggregations.my_agg.buckets.1.key: 'v2'} + - match: {aggregations.my_agg.buckets.0.doc_count: 6} + - match: {aggregations.my_agg.buckets.1.doc_count: 6} + + - do: + search: + index: my_index + body: + size: 2 + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 3 # collect 3 candidates from each shard + aggs: + my_agg2: + terms: + field: my_name + order: + _key: asc + my_sum_buckets: + sum_bucket: + buckets_path: "my_agg2>_count" + + - length: { hits.hits: 2 } + - match: { hits.total.value: 6 } + - match: { aggregations.my_agg2.buckets.0.key: 'v1' } + - match: { aggregations.my_agg2.buckets.1.key: 'v2' } + - match: { aggregations.my_sum_buckets.value: 6.0 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml new file mode 100644 index 000000000000..435291b454d0 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml @@ -0,0 +1,213 @@ +setup: + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + - do: + indices.create: + index: test + body: + settings: + index: + number_of_shards: 1 + mappings: + properties: + name: + type: keyword + nested: + type: nested + properties: + paragraph_id: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + aliases: + my_alias: + filter: + term: + name: "rabbit.jpg" + + - do: + index: + index: test + id: "1" + body: + name: cow.jpg + nested: + - paragraph_id: 0 + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + - paragraph_id: 1 + vector: [240.0, 300, -3, 1, -20] + + - do: + index: + index: test + id: "2" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [-0.5, 100.0, -13, 14.8, -156.0] + - paragraph_id: 2 + vector: [0, 100.0, 0, 14.8, -156.0] + - paragraph_id: 3 + vector: [0, 1.0, 0, 1.8, -15.0] + + - do: + index: + index: test + id: "3" + body: + name: rabbit.jpg + nested: + - paragraph_id: 0 + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - do: + indices.refresh: {} + +--- +"nested kNN search that returns diverse parents docs": + - do: + search: + index: test + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} + + - do: + search: + index: test + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 3 + inner_hits: { size: 1, "fields": [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 3} + + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.name.0: "moose.jpg" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.1.fields.name.0: "rabbit.jpg" } + - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2.fields.name.0: "cow.jpg" } + - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + +--- +"nested kNN search pre-filtered on alias with filter on top level fields": + - do: + search: + index: my_alias # filter on name: "rabbit.jpg" + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 1 + inner_hits: { size: 1, "fields": [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 1} # as alias is passed as pre-filter, we get a single result + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + +--- +"nested kNN search post-filtered on top level fields": + - do: + search: + index: test + body: + fields: [ "name" ] + query: + bool: + must: + - term: + name: "rabbit.jpg" + - nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 1 + - match: { hits.total.value: 0 } # no hits because returned single vector did not pass post-filter + + - do: + search: + index: test + body: + fields: [ "name" ] + query: + bool: + must: + - term: + name: "rabbit.jpg" + - nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 3 + inner_hits: { size: 1, fields: [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 1} + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } +--- + +"nested kNN search post-filtered on nested fields DOES NOT work": + - do: + search: + index: test + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + bool: + must: + - term: + nested.paragraph_id: 3 + - knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 6 + inner_hits: { size: 1, "fields": [ "nested.paragraph_id" ], _source: false } + # no hits because, regardless of num_candidates knn returns top 3 child vectors from distinct parents + # and they don't pass the post-filter + # TODO: fix it on Lucene level so nested knn respects num_candidates + # or do pre-filtering + - match: {hits.total.value: 0} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml new file mode 100644 index 000000000000..8f52a72cce01 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml @@ -0,0 +1,127 @@ +# test how knn query interact with other queries +setup: + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + features: close_to + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 1 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + my_name: + type: keyword + store: true + aliases: + my_alias: + filter: + term: + my_name: v2 + my_alias1: + filter: + term: + my_name: v1 + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + +--- +"Function score query with knn query": + # find top 5 knn docs, then boost docs with name v1 by 10 and docs with name v2 by 100 + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + function_score: + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + num_candidates: 5 + functions: + - filter: { match: { my_name: v1 } } + weight: 10 + - filter: { match: { my_name: v2 } } + weight: 100 + boost_mode: multiply + + - match: { hits.total.value: 5 } # collector sees num_candidates docs + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - close_to: { hits.hits.0._score: { value: 50.0, error: 0.001 } } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1.fields.my_name.0: v1 } + - close_to: { hits.hits.1._score: { value: 10.0, error: 0.001 } } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2.fields.my_name.0: v2 } + - close_to: { hits.hits.2._score: { value: 10.0, error: 0.001 } } + +--- +"dis_max query with knn query": + - do: + search: + index: my_index + body: + size: 10 + fields: [ my_name ] + query: + dis_max: + queries: + - knn: { field: my_vector, query_vector: [ 1, 1, 1, 1 ], num_candidates: 5 } + - match: { my_name: v2 } + tie_breaker: 0.8 + + - match: { hits.total.value: 8 } # 5 knn results + extra results from match query + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1.fields.my_name.0: v1 } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2.fields.my_name.0: v2 } + - match: { hits.hits.3._id: "6" } + - match: { hits.hits.3.fields.my_name.0: v2 } + - match: { hits.hits.4._id: "8" } + - match: { hits.hits.4.fields.my_name.0: v2 } + - match: { hits.hits.5._id: "10" } + - match: { hits.hits.5.fields.my_name.0: v2 } + - match: { hits.hits.6._id: "3" } + - match: { hits.hits.6.fields.my_name.0: v1 } + - match: { hits.hits.7._id: "5" } + - match: { hits.hits.7.fields.my_name.0: v1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index 340cd8f8d0f7..57f8603f1e06 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -294,23 +294,6 @@ setup: - match: { error.root_cause.0.reason: "failed to create query: field [nonexistent] does not exist in the mapping" } --- -"Direct kNN queries are disallowed": - - skip: - version: ' - 8.3.99' - reason: 'error message changed in 8.4' - - do: - catch: bad_request - search: - index: test-index - body: - query: - knn: - field: vector - query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] - num_candidates: 1 - - match: { error.root_cause.0.type: "illegal_argument_exception" } - - match: { error.root_cause.0.reason: "[knn] queries cannot be provided directly, use the [knn] body parameter instead" } ---- "KNN Vector similarity search only": - skip: version: ' - 8.7.99' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml index 873b6d87cac6..ea21bb69a77b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml @@ -163,20 +163,6 @@ setup: - match: { error.root_cause.0.reason: "failed to create query: field [nonexistent] does not exist in the mapping" } --- -"Direct kNN queries are disallowed": - - do: - catch: bad_request - search: - index: test - body: - query: - knn: - field: vector - query_vector: [ -1, 0, 1, 2, 3 ] - num_candidates: 1 - - match: { error.root_cause.0.type: "illegal_argument_exception" } - - match: { error.root_cause.0.reason: "[knn] queries cannot be provided directly, use the [knn] body parameter instead" } ---- "Vector similarity search only": - skip: version: ' - 8.7.99' diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d09be93772e7..5321d7a0a608 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -157,6 +157,7 @@ static TransportVersion def(int id) { public static final TransportVersion CLUSTER_FEATURES_ADDED = def(8_526_00_0); public static final TransportVersion DSL_ERROR_STORE_INFORMATION_ENHANCED = def(8_527_00_0); public static final TransportVersion INVALID_BUCKET_PATH_EXCEPTION_INTRODUCED = def(8_528_00_0); + public static final TransportVersion KNN_AS_QUERY_ADDED = def(8_529_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 8186c9c2d9a0..c4806dbd3a0a 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -98,6 +98,8 @@ public class SearchExecutionContext extends QueryRewriteContext { private final Map namedQueries = new HashMap<>(); private NestedScope nestedScope; + private QueryBuilder aliasFilter; + /** * Build a {@linkplain SearchExecutionContext}. */ @@ -228,6 +230,15 @@ private void reset() { this.nestedScope = new NestedScope(); } + // Set alias filter, so it can be applied for queries that need it (e.g. knn query) + public void setAliasFilter(QueryBuilder aliasFilter) { + this.aliasFilter = aliasFilter; + } + + public QueryBuilder getAliasFilter() { + return aliasFilter; + } + /** * The similarity to use in searches, which takes into account per-field configuration. */ diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 111bec2c8850..8ba48563c8f5 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -1125,9 +1125,7 @@ private void registerQueryParsers(List plugins) { ); registerQuery(new QuerySpec<>(GeoShapeQueryBuilder.NAME, GeoShapeQueryBuilder::new, GeoShapeQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(KnnVectorQueryBuilder.NAME, KnnVectorQueryBuilder::new, parser -> { - throw new IllegalArgumentException("[knn] queries cannot be provided directly, use the [knn] body parameter instead"); - })); + registerQuery(new QuerySpec<>(KnnVectorQueryBuilder.NAME, KnnVectorQueryBuilder::new, KnnVectorQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(KnnScoreDocQueryBuilder.NAME, KnnScoreDocQueryBuilder::new, parser -> { throw new IllegalArgumentException("[score_doc] queries cannot be provided directly"); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 6919cfdbc00b..44a8f641fae9 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -1249,6 +1249,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc QueryBuilder query = source.query(); if (query != null) { InnerHitContextBuilder.extractInnerHits(query, innerHitBuilders); + searchExecutionContext.setAliasFilter(context.request().getAliasFilter().getQueryBuilder()); context.parsedQuery(searchExecutionContext.toQuery(query)); } if (source.postFilter() != null) { diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index 6c261c040266..3571d77d7376 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -28,7 +28,11 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -36,48 +40,73 @@ import java.util.List; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * A query that performs kNN search using Lucene's {@link org.apache.lucene.search.KnnFloatVectorQuery} or * {@link org.apache.lucene.search.KnnByteVectorQuery}. * - * NOTE: this is an internal class and should not be used outside of core Elasticsearch code. */ public class KnnVectorQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "knn"; + private static final int NUM_CANDS_LIMIT = 10000; + public static final ParseField FIELD_FIELD = new ParseField("field"); + public static final ParseField NUM_CANDS_FIELD = new ParseField("num_candidates"); + public static final ParseField QUERY_VECTOR_FIELD = new ParseField("query_vector"); + public static final ParseField VECTOR_SIMILARITY_FIELD = new ParseField("similarity"); + public static final ParseField FILTER_FIELD = new ParseField("filter"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("knn", args -> { + List vector = (List) args[1]; + final float[] vectorArray; + if (vector != null) { + vectorArray = new float[vector.size()]; + for (int i = 0; i < vector.size(); i++) { + vectorArray[i] = vector.get(i); + } + } else { + vectorArray = null; + } + return new KnnVectorQueryBuilder((String) args[0], vectorArray, (int) args[2], (Float) args[3]); + }); + + static { + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareFloatArray(constructorArg(), QUERY_VECTOR_FIELD); + // TODO: make num_candidates optional + PARSER.declareInt(constructorArg(), NUM_CANDS_FIELD); + PARSER.declareFloat(optionalConstructorArg(), VECTOR_SIMILARITY_FIELD); + PARSER.declareFieldArray( + KnnVectorQueryBuilder::addFilterQueries, + (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p), + FILTER_FIELD, + ObjectParser.ValueType.OBJECT_ARRAY + ); + declareStandardFields(PARSER); + } + + public static KnnVectorQueryBuilder fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } private final String fieldName; private final float[] queryVector; - private final byte[] byteQueryVector; private final int numCands; - private final List filterQueries; + private final List filterQueries = new ArrayList<>(); private final Float vectorSimilarity; public KnnVectorQueryBuilder(String fieldName, float[] queryVector, int numCands, Float vectorSimilarity) { + if (numCands > NUM_CANDS_LIMIT) { + throw new IllegalArgumentException("[" + NUM_CANDS_FIELD.getPreferredName() + "] cannot exceed [" + NUM_CANDS_LIMIT + "]"); + } + if (queryVector == null) { + throw new IllegalArgumentException("[" + QUERY_VECTOR_FIELD.getPreferredName() + "] must be provided"); + } this.fieldName = fieldName; - this.queryVector = Objects.requireNonNull(queryVector); - this.byteQueryVector = null; - this.numCands = numCands; - this.filterQueries = new ArrayList<>(); - this.vectorSimilarity = vectorSimilarity; - } - - public KnnVectorQueryBuilder(String fieldName, byte[] queryVector, int numCands, Float vectorSimilarity) { - this.fieldName = fieldName; - this.queryVector = null; - this.byteQueryVector = Objects.requireNonNull(queryVector); - this.numCands = numCands; - this.filterQueries = new ArrayList<>(); - this.vectorSimilarity = vectorSimilarity; - } - - // Tests only - KnnVectorQueryBuilder(String fieldName, byte[] queryVector, float[] floatQueryVector, int numCands, Float vectorSimilarity) { - assert queryVector != null ^ floatQueryVector != null; - this.fieldName = fieldName; - this.queryVector = floatQueryVector; - this.byteQueryVector = queryVector; + this.queryVector = queryVector; this.numCands = numCands; - this.filterQueries = new ArrayList<>(); this.vectorSimilarity = vectorSimilarity; } @@ -85,17 +114,16 @@ public KnnVectorQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); this.numCands = in.readVInt(); - if (in.getTransportVersion().before(TransportVersions.V_8_7_0)) { + if (in.getTransportVersion().before(TransportVersions.V_8_7_0) + || in.getTransportVersion().onOrAfter(TransportVersions.KNN_AS_QUERY_ADDED)) { this.queryVector = in.readFloatArray(); - this.byteQueryVector = null; } else { - this.queryVector = in.readBoolean() ? in.readFloatArray() : null; - this.byteQueryVector = in.readBoolean() ? in.readByteArray() : null; + in.readBoolean(); + this.queryVector = in.readFloatArray(); + in.readBoolean(); // used for byteQueryVector, which was always null } - if (in.getTransportVersion().before(TransportVersions.V_8_2_0)) { - this.filterQueries = new ArrayList<>(); - } else { - this.filterQueries = readQueries(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { + this.filterQueries.addAll(readQueries(in)); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { this.vectorSimilarity = in.readOptionalFloat(); @@ -113,11 +141,6 @@ public float[] queryVector() { return queryVector; } - @Nullable - public byte[] getByteQueryVector() { - return byteQueryVector; - } - @Nullable public Float getVectorSimilarity() { return vectorSimilarity; @@ -147,28 +170,14 @@ public KnnVectorQueryBuilder addFilterQueries(List filterQueries) protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeVInt(numCands); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) { - boolean queryVectorNotNull = queryVector != null; - out.writeBoolean(queryVectorNotNull); - if (queryVectorNotNull) { - out.writeFloatArray(queryVector); - } - boolean byteVectorNotNull = byteQueryVector != null; - out.writeBoolean(byteVectorNotNull); - if (byteVectorNotNull) { - out.writeByteArray(byteQueryVector); - } + + if (out.getTransportVersion().before(TransportVersions.V_8_7_0) + || out.getTransportVersion().onOrAfter(TransportVersions.KNN_AS_QUERY_ADDED)) { + out.writeFloatArray(queryVector); } else { - final float[] f; - if (queryVector != null) { - f = queryVector; - } else { - f = new float[byteQueryVector.length]; - for (int i = 0; i < byteQueryVector.length; i++) { - f[i] = byteQueryVector[i]; - } - } - out.writeFloatArray(f); + out.writeBoolean(true); + out.writeFloatArray(queryVector); + out.writeBoolean(false); // used for byteQueryVector, which was always null } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { writeQueries(out, filterQueries); @@ -180,21 +189,21 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME) - .field("field", fieldName) - .field("vector", queryVector != null ? queryVector : byteQueryVector) - .field("num_candidates", numCands); + builder.startObject(NAME); + builder.field(FIELD_FIELD.getPreferredName(), fieldName); + builder.field(QUERY_VECTOR_FIELD.getPreferredName(), queryVector); + builder.field(NUM_CANDS_FIELD.getPreferredName(), numCands); if (vectorSimilarity != null) { - builder.field("similarity", vectorSimilarity); + builder.field(VECTOR_SIMILARITY_FIELD.getPreferredName(), vectorSimilarity); } if (filterQueries.isEmpty() == false) { - builder.startArray("filters"); + builder.startArray(FILTER_FIELD.getPreferredName()); for (QueryBuilder filterQuery : filterQueries) { filterQuery.toXContent(builder, params); } builder.endArray(); } - + boostAndQueryNameToXContent(builder); builder.endObject(); } @@ -204,11 +213,11 @@ public String getWriteableName() { } @Override - protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext ctx) throws IOException { boolean changed = false; List rewrittenQueries = new ArrayList<>(filterQueries.size()); for (QueryBuilder query : filterQueries) { - QueryBuilder rewrittenQuery = query.rewrite(queryRewriteContext); + QueryBuilder rewrittenQuery = query.rewrite(ctx); if (rewrittenQuery instanceof MatchNoneQueryBuilder) { return rewrittenQuery; } @@ -218,9 +227,9 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws rewrittenQueries.add(rewrittenQuery); } if (changed) { - return byteQueryVector != null - ? new KnnVectorQueryBuilder(fieldName, byteQueryVector, numCands, vectorSimilarity).addFilterQueries(rewrittenQueries) - : new KnnVectorQueryBuilder(fieldName, queryVector, numCands, vectorSimilarity).addFilterQueries(rewrittenQueries); + return new KnnVectorQueryBuilder(fieldName, queryVector, numCands, vectorSimilarity).boost(boost) + .queryName(queryName) + .addFilterQueries(rewrittenQueries); } return this; } @@ -238,59 +247,53 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { ); } - String parentPath = context.nestedLookup().getNestedParent(fieldName); final BitSetProducer parentFilter; BooleanQuery.Builder builder = new BooleanQuery.Builder(); for (QueryBuilder query : this.filterQueries) { builder.add(query.toQuery(context), BooleanClause.Occur.FILTER); } + if (context.getAliasFilter() != null) { + builder.add(context.getAliasFilter().toQuery(context), BooleanClause.Occur.FILTER); + } BooleanQuery booleanQuery = builder.build(); Query filterQuery = booleanQuery.clauses().isEmpty() ? null : booleanQuery; DenseVectorFieldType vectorFieldType = (DenseVectorFieldType) fieldType; + String parentPath = context.nestedLookup().getNestedParent(fieldName); if (parentPath != null) { - NestedObjectMapper mapper = context.nestedLookup().getNestedMappers().get(parentPath); - NestedObjectMapper objectMapper = context.nestedScope().getObjectMapper(); - if (objectMapper == null) { - parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())); + NestedObjectMapper originalObjectMapper = context.nestedScope().getObjectMapper(); + if (originalObjectMapper != null) { + try { + // we are in a nested context, to get the parent filter we need to go up one level + context.nestedScope().previousLevel(); + NestedObjectMapper objectMapper = context.nestedScope().getObjectMapper(); + parentFilter = objectMapper == null + ? context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())) + : context.bitsetFilter(objectMapper.nestedTypeFilter()); + } finally { + context.nestedScope().nextLevel(originalObjectMapper); + } } else { - parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter()); + // we are NOT in a nested context, coming from the top level knn search + parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())); } - try { - context.nestedScope().nextLevel(mapper); - if (filterQuery != null) { - filterQuery = new ToChildBlockJoinQuery(filterQuery, parentFilter); - } - return queryVector != null - ? vectorFieldType.createKnnQuery(queryVector, numCands, filterQuery, vectorSimilarity, parentFilter) - : vectorFieldType.createKnnQuery(byteQueryVector, numCands, filterQuery, vectorSimilarity, parentFilter); - } finally { - context.nestedScope().previousLevel(); + if (filterQuery != null) { + filterQuery = new ToChildBlockJoinQuery(filterQuery, parentFilter); } + return vectorFieldType.createKnnQuery(queryVector, numCands, filterQuery, vectorSimilarity, parentFilter); } - - return queryVector != null - ? vectorFieldType.createKnnQuery(queryVector, numCands, filterQuery, vectorSimilarity, null) - : vectorFieldType.createKnnQuery(byteQueryVector, numCands, filterQuery, vectorSimilarity, null); + return vectorFieldType.createKnnQuery(queryVector, numCands, filterQuery, vectorSimilarity, null); } @Override protected int doHashCode() { - return Objects.hash( - fieldName, - Arrays.hashCode(queryVector), - Arrays.hashCode(byteQueryVector), - numCands, - filterQueries, - vectorSimilarity - ); + return Objects.hash(fieldName, Arrays.hashCode(queryVector), numCands, filterQueries, vectorSimilarity); } @Override protected boolean doEquals(KnnVectorQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Arrays.equals(queryVector, other.queryVector) - && Arrays.equals(byteQueryVector, other.byteQueryVector) && numCands == other.numCands && Objects.equals(filterQueries, other.filterQueries) && Objects.equals(vectorSimilarity, other.vectorSimilarity); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java index f52f8ecc1aff..0bb170ed0443 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java @@ -76,19 +76,12 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected KnnVectorQueryBuilder doCreateTestQueryBuilder() { String fieldName = randomBoolean() ? VECTOR_FIELD : VECTOR_ALIAS_FIELD; - byte[] byteVector = new byte[VECTOR_DIMENSION]; float[] vector = new float[VECTOR_DIMENSION]; for (int i = 0; i < vector.length; i++) { - vector[i] = randomFloat(); - byteVector[i] = randomByte(); + vector[i] = elementType().equals(DenseVectorFieldMapper.ElementType.BYTE) ? randomByte() : randomFloat(); } int numCands = randomIntBetween(1, 1000); - - KnnVectorQueryBuilder queryBuilder = switch (elementType()) { - case BYTE -> new KnnVectorQueryBuilder(fieldName, byteVector, numCands, randomBoolean() ? null : randomFloat()); - case FLOAT -> new KnnVectorQueryBuilder(fieldName, vector, numCands, randomBoolean() ? null : randomFloat()); - }; - + KnnVectorQueryBuilder queryBuilder = new KnnVectorQueryBuilder(fieldName, vector, numCands, randomBoolean() ? null : randomFloat()); if (randomBoolean()) { List filters = new ArrayList<>(); int numFilters = randomIntBetween(1, 5); @@ -126,7 +119,12 @@ protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query que Query filterQuery = booleanQuery.clauses().isEmpty() ? null : booleanQuery; // The field should always be resolved to the concrete field Query knnVectorQueryBuilt = switch (elementType()) { - case BYTE -> new KnnByteVectorQuery(VECTOR_FIELD, queryBuilder.getByteQueryVector(), queryBuilder.numCands(), filterQuery); + case BYTE -> new KnnByteVectorQuery( + VECTOR_FIELD, + getByteQueryVector(queryBuilder.queryVector()), + queryBuilder.numCands(), + filterQuery + ); case FLOAT -> new KnnFloatVectorQuery(VECTOR_FIELD, queryBuilder.queryVector(), queryBuilder.numCands(), filterQuery); }; if (query instanceof VectorSimilarityQuery vectorSimilarityQuery) { @@ -168,7 +166,7 @@ public void testValidOutput() { { "knn" : { "field" : "vector", - "vector" : [ + "query_vector" : [ 1.0, 2.0, 3.0 @@ -195,26 +193,15 @@ public void testMustRewrite() throws IOException { } public void testBWCVersionSerializationFilters() throws IOException { - float[] bwcFloat = new float[VECTOR_DIMENSION]; KnnVectorQueryBuilder query = createTestQueryBuilder(); - if (query.queryVector() != null) { - bwcFloat = query.queryVector(); - } else { - for (int i = 0; i < query.getByteQueryVector().length; i++) { - bwcFloat[i] = query.getByteQueryVector()[i]; - } - } - - KnnVectorQueryBuilder queryNoFilters = new KnnVectorQueryBuilder(query.getFieldName(), bwcFloat, query.numCands(), null).queryName( - query.queryName() - ).boost(query.boost()); - + KnnVectorQueryBuilder queryNoFilters = new KnnVectorQueryBuilder(query.getFieldName(), query.queryVector(), query.numCands(), null) + .queryName(query.queryName()) + .boost(query.boost()); TransportVersion beforeFilterVersion = TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_8_0_0, TransportVersions.V_8_1_0 ); - assertBWCSerialization(query, queryNoFilters, beforeFilterVersion); } @@ -222,7 +209,6 @@ public void testBWCVersionSerializationSimilarity() throws IOException { KnnVectorQueryBuilder query = createTestQueryBuilder(); KnnVectorQueryBuilder queryNoSimilarity = new KnnVectorQueryBuilder( query.getFieldName(), - query.getByteQueryVector(), query.queryVector(), query.numCands(), null @@ -230,27 +216,21 @@ public void testBWCVersionSerializationSimilarity() throws IOException { assertBWCSerialization(query, queryNoSimilarity, TransportVersions.V_8_7_0); } - public void testBWCVersionSerializationByteQuery() throws IOException { - float[] bwcFloat = new float[VECTOR_DIMENSION]; + public void testBWCVersionSerializationQuery() throws IOException { KnnVectorQueryBuilder query = createTestQueryBuilder(); - if (query.queryVector() != null) { - bwcFloat = query.queryVector(); - } else { - for (int i = 0; i < query.getByteQueryVector().length; i++) { - bwcFloat[i] = query.getByteQueryVector()[i]; - } - } - KnnVectorQueryBuilder queryNoByteQuery = new KnnVectorQueryBuilder(query.getFieldName(), bwcFloat, query.numCands(), null) - .queryName(query.queryName()) - .boost(query.boost()) - .addFilterQueries(query.filterQueries()); - - TransportVersion beforeByteQueryVersion = TransportVersionUtils.randomVersionBetween( + TransportVersion differentQueryVersion = TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_8_2_0, - TransportVersions.V_8_6_0 + TransportVersions.KNN_AS_QUERY_ADDED ); - assertBWCSerialization(query, queryNoByteQuery, beforeByteQueryVersion); + Float similarity = differentQueryVersion.before(TransportVersions.V_8_8_0) ? null : query.getVectorSimilarity(); + KnnVectorQueryBuilder queryOlderVersion = new KnnVectorQueryBuilder( + query.getFieldName(), + query.queryVector(), + query.numCands(), + similarity + ).queryName(query.queryName()).boost(query.boost()).addFilterQueries(query.filterQueries()); + assertBWCSerialization(query, queryOlderVersion, differentQueryVersion); } private void assertBWCSerialization(QueryBuilder newQuery, QueryBuilder bwcQuery, TransportVersion version) throws IOException { @@ -267,18 +247,11 @@ private void assertBWCSerialization(QueryBuilder newQuery, QueryBuilder bwcQuery } } - @Override - public void testUnknownObjectException() { - assumeTrue("Test isn't relevant, since query is never parsed from xContent", false); - } - - @Override - public void testFromXContent() { - assumeTrue("Test isn't relevant, since query is never parsed from xContent", false); - } - - @Override - public void testUnknownField() { - assumeTrue("Test isn't relevant, since query is never parsed from xContent", false); + private static byte[] getByteQueryVector(float[] queryVector) { + byte[] byteQueryVector = new byte[queryVector.length]; + for (int i = 0; i < queryVector.length; i++) { + byteQueryVector[i] = (byte) queryVector[i]; + } + return byteQueryVector; } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml index 5726d75422e2..ca07b888ab5c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml @@ -124,3 +124,100 @@ setup: match: title: query: "title" + +--- +"Test pinned query with knn query": + - skip: + version: ' - 8.11.99' + reason: 'knn as query added in 8.12' + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 1 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + my_name: + type: keyword + store: true + aliases: + my_alias: + filter: + term: + my_name: v2 + my_alias1: + filter: + term: + my_name: v1 + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + - do: + search: + index: my_index + body: + size: 10 + fields: [ my_name ] + query: + pinned: + ids: [ 8, 9, 10 ] + organic: + knn: { field: my_vector, query_vector: [ 1, 1, 1, 1 ], num_candidates: 5 } + - match: { hits.total.value: 8 } # 5 knn results + extra results from pinned query + - match: { hits.hits.0._id: "8" } + - match: { hits.hits.1._id: "9" } + - match: { hits.hits.2._id: "10" } + - match: { hits.hits.3._id: "1" } + - match: { hits.hits.4._id: "2" } + - match: { hits.hits.5._id: "3" } + - match: { hits.hits.6._id: "4" } + - match: { hits.hits.7._id: "5" } + + - do: + search: + index: my_index + body: + size: 10 + fields: [ my_name ] + query: + pinned: + ids: [ 3, 4, 5 ] + organic: + knn: { field: my_vector, query_vector: [ 1, 1, 1, 1 ], num_candidates: 5 } + - match: { hits.total.value: 5 } + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "4" } + - match: { hits.hits.2._id: "5" } + - match: { hits.hits.3._id: "1" } + - match: { hits.hits.4._id: "2" } From df9002acca1d56bcfc7429f6b4ba59bc613b82af Mon Sep 17 00:00:00 2001 From: sunyoung-dev Date: Thu, 2 Nov 2023 03:31:03 +0900 Subject: [PATCH 22/47] [DOCS] Fix painless context _id variable (#101316) --- .../painless-contexts/painless-reindex-context.asciidoc | 2 +- .../painless-contexts/painless-update-by-query-context.asciidoc | 2 +- .../painless/painless-contexts/painless-update-context.asciidoc | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/painless/painless-contexts/painless-reindex-context.asciidoc b/docs/painless/painless-contexts/painless-reindex-context.asciidoc index 13b216bac634..9aae1ae70c5a 100644 --- a/docs/painless/painless-contexts/painless-reindex-context.asciidoc +++ b/docs/painless/painless-contexts/painless-reindex-context.asciidoc @@ -19,7 +19,7 @@ reindexed into a target index. {ref}/mapping-index-field.html[`ctx['_index']`] (`String`):: The name of the index. -{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: +{ref}/mapping-id-field.html[`ctx['_id']`] (`String`):: The unique document id. `ctx['_version']` (`int`):: diff --git a/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc b/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc index d8f9d4d7bae7..78a8b8d36d6b 100644 --- a/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc +++ b/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc @@ -20,7 +20,7 @@ result of query. {ref}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only):: The name of the index. -{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: +{ref}/mapping-id-field.html[`ctx['_id']`] (`String`, read-only):: The unique document id. `ctx['_version']` (`int`, read-only):: diff --git a/docs/painless/painless-contexts/painless-update-context.asciidoc b/docs/painless/painless-contexts/painless-update-context.asciidoc index f9ae3434827d..53b1008cfebf 100644 --- a/docs/painless/painless-contexts/painless-update-context.asciidoc +++ b/docs/painless/painless-contexts/painless-update-context.asciidoc @@ -18,7 +18,7 @@ add, modify, or delete fields within a single document. {ref}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only):: The name of the index. -{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: +{ref}/mapping-id-field.html[`ctx['_id']`] (`String`, read-only):: The unique document id. `ctx['_version']` (`int`, read-only):: From 2ea782b04b640b9f3291efe39381c82310fd5c8e Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 1 Nov 2023 21:50:11 +0100 Subject: [PATCH 23/47] Annotate build scan urls in BK for failed builds as error (#101674) * Annotate build scan urls in BK for failed builds as error * Tweak bk ui for failed builds and buildscans --- .../groovy/elasticsearch.build-scan.gradle | 37 ++++++++++++------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index e6bbaeb19e49..366a2954c670 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -129,19 +129,30 @@ buildScan { link 'Source', "https://github.com/${repository}/tree/${BuildParams.gitRevision}" } - buildScanPublished { scan -> - // Attach build scan link as build metadata - // See: https://buildkite.com/docs/pipelines/build-meta-data - new ProcessBuilder('buildkite-agent', 'meta-data', 'set', "build-scan-${System.getenv('BUILDKITE_JOB_ID')}", "${scan.buildScanUri}") - .start() - .waitFor() - - // Add a build annotation - // See: https://buildkite.com/docs/agent/v3/cli-annotate - def body = """

${System.getenv('BUILDKITE_LABEL')} :gradle: build ran: gradle ${gradle.startParameter.taskNames.join(' ')}
""" - new ProcessBuilder('buildkite-agent', 'annotate', '--context', 'gradle-build-scans', '--append', '--style', 'info', body) - .start() - .waitFor() + buildFinished { result -> + buildScanPublished { scan -> + // Attach build scan link as build metadata + // See: https://buildkite.com/docs/pipelines/build-meta-data + new ProcessBuilder('buildkite-agent', 'meta-data', 'set', "build-scan-${System.getenv('BUILDKITE_JOB_ID')}", "${scan.buildScanUri}") + .start() + .waitFor() + + // Add a build annotation + // See: https://buildkite.com/docs/agent/v3/cli-annotate + def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failure ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" + new ProcessBuilder( + 'buildkite-agent', + 'annotate', + '--context', + 'gradle-build-scans', + '--append', + '--style', + result.failure ? 'error' : 'info', + body + ) + .start() + .waitFor() + } } } else { tag 'LOCAL' From 680a7f22567610dd040c4de2dd33059f0e2d24ca Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 1 Nov 2023 21:11:26 +0000 Subject: [PATCH 24/47] AwaitsFix for #101695 --- .../aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java index 1bdc39fdc8e5..552a3596916a 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java @@ -143,6 +143,7 @@ public void testMultiValuedDocs() throws IOException { testWithSeveralDocs(LuceneTestCase::rarely, null, randomPrecision()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101695") public void testBoundedMultiValuedDocs() throws IOException { int precision = randomPrecision(); testWithSeveralDocs(LuceneTestCase::rarely, randomBBox(precision), precision); From c10f94f3ace1677eb3d3800f8ef399578a214541 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 2 Nov 2023 10:25:21 +0200 Subject: [PATCH 25/47] Remove unused function (#101678) Small clean up of unused method in `ShardsAvailabilityHealthIndicatorServiceTests.java`. --- ...vailabilityHealthIndicatorServiceTests.java | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java index 45e8fe4e525c..11adfc4f8136 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java @@ -1944,24 +1944,6 @@ private static IndexRoutingTable index(String name, ShardAllocation primaryState ); } - private static IndexRoutingTable frozenIndex(String name, ShardAllocation primaryState, String originalIndex) { - return index( - IndexMetadata.builder(name) - .settings( - Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_INDEX_NAME_SETTING_KEY, originalIndex) - .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE) - .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY, randomBoolean()) - .build() - ) - .numberOfShards(1) - .numberOfReplicas(0) - .build(), - primaryState - ); - } - private static IndexRoutingTable index(IndexMetadata indexMetadata, ShardAllocation primaryState, ShardAllocation... replicaStates) { var index = indexMetadata.getIndex(); var shardId = new ShardId(index, 0); From f02bbc356de75393bba70dbfbbb6f6a0020ec1fd Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 2 Nov 2023 09:33:12 +0100 Subject: [PATCH 26/47] Make DockerSupportService configuration cache compatible (#101140) Allow filtering docker command output for better cc compatibility --- .../gradle/internal/docker/DockerResult.java | 70 ++++++++++++++++ .../internal/docker/DockerSupportService.java | 84 +++++++------------ .../internal/docker/DockerValueSource.java | 72 ++++++++++++++++ 3 files changed, 171 insertions(+), 55 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerResult.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerValueSource.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerResult.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerResult.java new file mode 100644 index 000000000000..d101c0046f92 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerResult.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.docker; + +import java.util.Objects; + +/** + * This class models the result of running a command. It captures the exit code, standard output and standard error and allows + * applying String filter for stdout as this is intended to create configuration cache compatible output which + * aims to be agnostic. + */ +public class DockerResult { + + private int exitCode; + private String stdout; + private String stderr; + + public DockerResult(int exitCode, String stdout, String stderr) { + this.exitCode = exitCode; + this.stdout = stdout; + this.stderr = stderr; + } + + public int getExitCode() { + return exitCode; + } + + public String getStdout() { + return stdout; + } + + public String getStderr() { + return stderr; + } + + public void setExitCode(int exitCode) { + this.exitCode = exitCode; + } + + public void setStdout(String stdout) { + this.stdout = stdout; + } + + public void setStderr(String stderr) { + this.stderr = stderr; + } + + public boolean isSuccess() { + return exitCode == 0; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DockerResult that = (DockerResult) o; + return exitCode == that.exitCode && Objects.equals(stdout, that.stdout) && Objects.equals(stderr, that.stderr); + } + + @Override + public int hashCode() { + return Objects.hash(exitCode, stdout, stderr); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java index 2f702b340130..84728d031c40 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java @@ -14,12 +14,10 @@ import org.gradle.api.GradleException; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; import org.gradle.api.services.BuildServiceParameters; -import org.gradle.process.ExecOperations; -import org.gradle.process.ExecResult; -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -56,12 +54,12 @@ public abstract class DockerSupportService implements BuildService ""); - // If docker all checks out, see if docker-compose is available and working Optional composePath = getDockerComposePath(); if (lastResult.isSuccess() && composePath.isPresent()) { isComposeAvailable = runCommand(composePath.get(), "version").isSuccess(); @@ -109,9 +103,12 @@ public DockerAvailability getDockerAvailability() { // Now let's check if buildx is available and what supported platforms exist if (lastResult.isSuccess()) { - Result buildxResult = runCommand(dockerPath, "buildx", "inspect", "--bootstrap"); + DockerResult buildxResult = runCommand( + Arrays.asList(dockerPath, "buildx", "inspect", "--bootstrap"), + input -> input.lines().filter(l -> l.startsWith("Platforms:")).collect(Collectors.joining("\n")) + ); if (buildxResult.isSuccess()) { - supportedArchitectures = buildxResult.stdout() + supportedArchitectures = buildxResult.getStdout() .lines() .filter(l -> l.startsWith("Platforms:")) .map(l -> l.substring(10)) @@ -127,6 +124,8 @@ public DockerAvailability getDockerAvailability() { } } } + } else { + dockerPath = null; } boolean isAvailable = isVersionHighEnough && lastResult != null && lastResult.isSuccess(); @@ -146,6 +145,17 @@ public DockerAvailability getDockerAvailability() { return this.dockerAvailability; } + private DockerResult runCommand(List args, DockerValueSource.OutputFilter outputFilter) { + return providerFactory.of(DockerValueSource.class, params -> { + params.getParameters().getArgs().addAll(args); + params.getParameters().getOutputFilter().set(outputFilter); + }).get(); + } + + private DockerResult runCommand(String... args) { + return runCommand(Arrays.asList(args), input -> input); + } + private boolean dockerDaemonIsRunning(String lastResultOutput) { return lastResultOutput.contains("Cannot connect to the Docker daemon") == false; } @@ -198,8 +208,8 @@ void failIfDockerUnavailable(List tasks) { availability.version == null ? "" : " v" + availability.version, tasks.size() > 1 ? "s" : "", String.join("\n", tasks), - availability.lastCommand.exitCode, - availability.lastCommand.stderr.trim() + availability.lastCommand.getExitCode(), + availability.lastCommand.getStderr().trim() ); throwDockerRequiredException(message); } @@ -319,32 +329,6 @@ private void throwDockerRequiredException(final String message, Exception e) { ); } - /** - * Runs a command and captures the exit code, standard output and standard error. - * - * @param args the command and any arguments to execute - * @return a object that captures the result of running the command. If an exception occurring - * while running the command, or the process was killed after reaching the 10s timeout, - * then the exit code will be -1. - */ - private Result runCommand(String... args) { - if (args.length == 0) { - throw new IllegalArgumentException("Cannot execute with no command"); - } - - ByteArrayOutputStream stdout = new ByteArrayOutputStream(); - ByteArrayOutputStream stderr = new ByteArrayOutputStream(); - - final ExecResult execResult = execOperations.exec(spec -> { - // The redundant cast is to silence a compiler warning. - spec.setCommandLine((Object[]) args); - spec.setStandardOutput(stdout); - spec.setErrorOutput(stderr); - spec.setIgnoreExitValue(true); - }); - return new Result(execResult.getExitValue(), stdout.toString(), stderr.toString()); - } - /** * An immutable class that represents the results of a Docker search from {@link #getDockerAvailability()}}. */ @@ -377,22 +361,12 @@ public record DockerAvailability( Version version, // Information about the last command executes while probing Docker, or null. - Result lastCommand, + DockerResult lastCommand, // Supported build architectures Set supportedArchitectures ) {} - /** - * This class models the result of running a command. It captures the exit code, standard output and standard error. - */ - private record Result(int exitCode, String stdout, String stderr) { - - boolean isSuccess() { - return exitCode == 0; - } - } - interface Parameters extends BuildServiceParameters { File getExclusionsFile(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerValueSource.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerValueSource.java new file mode 100644 index 000000000000..d71208b624d7 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerValueSource.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.docker; + +import org.gradle.api.provider.ListProperty; +import org.gradle.api.provider.Property; +import org.gradle.api.provider.ValueSource; +import org.gradle.api.provider.ValueSourceParameters; +import org.gradle.process.ExecOperations; +import org.gradle.process.ExecResult; + +import java.io.ByteArrayOutputStream; +import java.util.List; + +import javax.inject.Inject; + +public abstract class DockerValueSource implements ValueSource { + public interface OutputFilter { + String filter(String input); + } + + interface Parameters extends ValueSourceParameters { + ListProperty getArgs(); + + Property getOutputFilter(); + } + + @Inject + abstract protected ExecOperations getExecOperations(); + + @Override + public DockerResult obtain() { + return runCommand(getParameters().getArgs().get()); + } + + /** + * Runs a command and captures the exit code, standard output and standard error. + * + * @param args the command and any arguments to execute + * @return a object that captures the result of running the command. If an exception occurring + * while running the command, or the process was killed after reaching the 10s timeout, + * then the exit code will be -1. + */ + private DockerResult runCommand(List args) { + if (args.size() == 0) { + throw new IllegalArgumentException("Cannot execute with no command"); + } + + ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + ByteArrayOutputStream stderr = new ByteArrayOutputStream(); + + final ExecResult execResult = getExecOperations().exec(spec -> { + // The redundant cast is to silence a compiler warning. + spec.setCommandLine(args); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stderr); + spec.setIgnoreExitValue(true); + }); + return new DockerResult(execResult.getExitValue(), filtered(stdout.toString()), stderr.toString()); + } + + private String filtered(String input) { + return getParameters().getOutputFilter().get().filter(input); + } + +} From 70ecb1255666ae63631c89e56b0b62804aec1059 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Thu, 2 Nov 2023 09:44:48 +0100 Subject: [PATCH 27/47] Add undesired shard count (#101426) This change add undesired shard (ones that are allocated not on the desired node) counts to the api output. --- docs/changelog/101426.yaml | 5 ++ .../cluster/get-desired-balance.asciidoc | 26 +++++- .../30_desired_balance.yml | 38 +++++++++ .../test/cluster.desired_balance/10_basic.yml | 38 +++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../TransportGetDesiredBalanceAction.java | 2 +- .../allocator/ClusterBalanceStats.java | 78 ++++++++++++++++-- .../DesiredBalanceResponseTests.java | 49 +++++++++-- .../allocator/ClusterBalanceStatsTests.java | 82 ++++++++++++++----- 9 files changed, 280 insertions(+), 39 deletions(-) create mode 100644 docs/changelog/101426.yaml diff --git a/docs/changelog/101426.yaml b/docs/changelog/101426.yaml new file mode 100644 index 000000000000..f9053ba1c1ec --- /dev/null +++ b/docs/changelog/101426.yaml @@ -0,0 +1,5 @@ +pr: 101426 +summary: Add undesired shard count +area: Allocation +type: enhancement +issues: [] diff --git a/docs/reference/cluster/get-desired-balance.asciidoc b/docs/reference/cluster/get-desired-balance.asciidoc index bd99f1d737bd..2628b5abca9f 100644 --- a/docs/reference/cluster/get-desired-balance.asciidoc +++ b/docs/reference/cluster/get-desired-balance.asciidoc @@ -6,7 +6,12 @@ NOTE: {cloud-only} -Exposes the desired balance and basic metrics. +Exposes: +* the desired balance computation and reconciliation stats +* balancing stats such as distribution of shards, disk and ingest forecasts + across nodes and data tiers (based on the current cluster state) +* routing table with each shard current and desired location +* cluster info with nodes disk usages [[get-desired-balance-request]] ==== {api-request-title} @@ -33,6 +38,8 @@ The API returns the following result: "reconciliation_time_in_millis": 0 }, "cluster_balance_stats" : { + "shard_count": 37, + "undesired_shard_allocation_count": 0, "tiers": { "data_hot" : { "shard_count" : { @@ -42,6 +49,13 @@ The API returns the following result: "average" : 2.3333333333333335, "std_dev" : 0.4714045207910317 }, + "undesired_shard_allocation_count" : { + "total" : 0.0, + "min" : 0.0, + "max" : 0.0, + "average" : 0.0, + "std_dev" : 0.0 + }, "forecast_write_load" : { "total" : 21.0, "min" : 6.0, @@ -72,6 +86,13 @@ The API returns the following result: "average" : 1.0, "std_dev" : 0.0 }, + "undesired_shard_allocation_count" : { + "total" : 0.0, + "min" : 0.0, + "max" : 0.0, + "average" : 0.0, + "std_dev" : 0.0 + }, "forecast_write_load" : { "total" : 0.0, "min" : 0.0, @@ -100,6 +121,7 @@ The API returns the following result: "node_id": "UPYt8VwWTt-IADAEbqpLxA", "roles": ["data_content"], "shard_count": 10, + "undesired_shard_allocation_count": 0, "forecast_write_load": 8.5, "forecast_disk_usage_bytes": 498435, "actual_disk_usage_bytes": 498435 @@ -108,6 +130,7 @@ The API returns the following result: "node_id": "bgC66tboTIeFQ0VgRGI4Gg", "roles": ["data_content"], "shard_count": 15, + "undesired_shard_allocation_count": 0, "forecast_write_load": 3.25, "forecast_disk_usage_bytes": 384935, "actual_disk_usage_bytes": 384935 @@ -116,6 +139,7 @@ The API returns the following result: "node_id": "2x1VTuSOQdeguXPdN73yRw", "roles": ["data_content"], "shard_count": 12, + "undesired_shard_allocation_count": 0, "forecast_write_load": 6.0, "forecast_disk_usage_bytes": 648766, "actual_disk_usage_bytes": 648766 diff --git a/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml b/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml index 0c814fd0f969..f8b1de515552 100644 --- a/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml +++ b/qa/smoke-test-multinode/src/yamlRestTest/resources/rest-api-spec/test/smoke_test_multinode/30_desired_balance.yml @@ -148,3 +148,41 @@ setup: _internal.get_desired_balance: { } - is_true: 'cluster_info' + +--- +"Test undesired_shard_allocation_count": + + - skip: + version: " - 8.11.99" + reason: "undesired_shard_allocation_count added in in 8.12.0" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + cluster.health: + index: test + wait_for_status: green + + - do: + cluster.state: {} + - set: { nodes._arbitrary_key_ : node_id } + - set: { nodes.$node_id.name : node_name } + + - do: + _internal.get_desired_balance: { } + + - gte: { 'cluster_balance_stats.shard_count' : 0 } + - gte: { 'cluster_balance_stats.undesired_shard_allocation_count' : 0 } + - gte: { 'cluster_balance_stats.nodes.$node_name.undesired_shard_allocation_count' : 0 } + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.total' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.min' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.max' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.average' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.std_dev' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml index 4f943abf1106..8e1d3431069c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml @@ -183,3 +183,41 @@ setup: - do: _internal.delete_desired_balance: { } + +--- +"Test undesired_shard_allocation_count": + + - skip: + version: " - 8.11.99" + reason: "undesired_shard_allocation_count added in in 8.12.0" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + cluster.health: + index: test + wait_for_status: green + + - do: + cluster.state: {} + - set: { nodes._arbitrary_key_ : node_id } + - set: { nodes.$node_id.name : node_name } + + - do: + _internal.get_desired_balance: { } + + - gte: { 'cluster_balance_stats.shard_count' : 0 } + - gte: { 'cluster_balance_stats.undesired_shard_allocation_count' : 0 } + - gte: { 'cluster_balance_stats.nodes.$node_name.undesired_shard_allocation_count' : 0 } + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.total' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.min' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.max' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.average' + - exists: 'cluster_balance_stats.tiers.data_content.undesired_shard_allocation_count.std_dev' diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5321d7a0a608..facc95422081 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -158,6 +158,7 @@ static TransportVersion def(int id) { public static final TransportVersion DSL_ERROR_STORE_INFORMATION_ENHANCED = def(8_527_00_0); public static final TransportVersion INVALID_BUCKET_PATH_EXCEPTION_INTRODUCED = def(8_528_00_0); public static final TransportVersion KNN_AS_QUERY_ADDED = def(8_529_00_0); + public static final TransportVersion UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED = def(8_530_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java index b585e891a590..fc1179007952 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java @@ -95,7 +95,7 @@ protected void masterOperation( listener.onResponse( new DesiredBalanceResponse( desiredBalanceShardsAllocator.getStats(), - ClusterBalanceStats.createFrom(state, clusterInfo, writeLoadForecaster), + ClusterBalanceStats.createFrom(state, latestDesiredBalance, clusterInfo, writeLoadForecaster), createRoutingTable(state, latestDesiredBalance), clusterInfo ) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java index 853a26263fe9..5df5de43cffd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStats.java @@ -31,15 +31,18 @@ import java.util.Map; import java.util.function.ToDoubleFunction; -public record ClusterBalanceStats(Map tiers, Map nodes) - implements - Writeable, - ToXContentObject { +public record ClusterBalanceStats( + int shards, + int undesiredShardAllocations, + Map tiers, + Map nodes +) implements Writeable, ToXContentObject { - public static ClusterBalanceStats EMPTY = new ClusterBalanceStats(Map.of(), Map.of()); + public static ClusterBalanceStats EMPTY = new ClusterBalanceStats(0, 0, Map.of(), Map.of()); public static ClusterBalanceStats createFrom( ClusterState clusterState, + DesiredBalance desiredBalance, ClusterInfo clusterInfo, WriteLoadForecaster writeLoadForecaster ) { @@ -50,32 +53,60 @@ public static ClusterBalanceStats createFrom( if (dataRoles.isEmpty()) { continue; } - var nodeStats = NodeBalanceStats.createFrom(routingNode, clusterState.metadata(), clusterInfo, writeLoadForecaster); + var nodeStats = NodeBalanceStats.createFrom( + routingNode, + clusterState.metadata(), + desiredBalance, + clusterInfo, + writeLoadForecaster + ); nodes.put(routingNode.node().getName(), nodeStats); for (DiscoveryNodeRole role : dataRoles) { tierToNodeStats.computeIfAbsent(role.roleName(), ignored -> new ArrayList<>()).add(nodeStats); } } - return new ClusterBalanceStats(Maps.transformValues(tierToNodeStats, TierBalanceStats::createFrom), nodes); + return new ClusterBalanceStats( + nodes.values().stream().mapToInt(NodeBalanceStats::shards).sum(), + nodes.values().stream().mapToInt(NodeBalanceStats::undesiredShardAllocations).sum(), + Maps.transformValues(tierToNodeStats, TierBalanceStats::createFrom), + nodes + ); } public static ClusterBalanceStats readFrom(StreamInput in) throws IOException { - return new ClusterBalanceStats(in.readImmutableMap(TierBalanceStats::readFrom), in.readImmutableMap(NodeBalanceStats::readFrom)); + return new ClusterBalanceStats( + in.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED) ? in.readVInt() : -1, + in.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED) ? in.readVInt() : -1, + in.readImmutableMap(TierBalanceStats::readFrom), + in.readImmutableMap(NodeBalanceStats::readFrom) + ); } @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED)) { + out.writeVInt(shards); + } + if (out.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED)) { + out.writeVInt(undesiredShardAllocations); + } out.writeMap(tiers, StreamOutput::writeWriteable); out.writeMap(nodes, StreamOutput::writeWriteable); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().field("tiers").map(tiers).field("nodes").map(nodes).endObject(); + return builder.startObject() + .field("shard_count", shards) + .field("undesired_shard_allocation_count", undesiredShardAllocations) + .field("tiers", tiers) + .field("nodes", nodes) + .endObject(); } public record TierBalanceStats( MetricStats shardCount, + MetricStats undesiredShardAllocations, MetricStats forecastWriteLoad, MetricStats forecastShardSize, MetricStats actualShardSize @@ -84,6 +115,7 @@ public record TierBalanceStats( private static TierBalanceStats createFrom(List nodes) { return new TierBalanceStats( MetricStats.createFrom(nodes, it -> it.shards), + MetricStats.createFrom(nodes, it -> it.undesiredShardAllocations), MetricStats.createFrom(nodes, it -> it.forecastWriteLoad), MetricStats.createFrom(nodes, it -> it.forecastShardSize), MetricStats.createFrom(nodes, it -> it.actualShardSize) @@ -93,6 +125,9 @@ private static TierBalanceStats createFrom(List nodes) { public static TierBalanceStats readFrom(StreamInput in) throws IOException { return new TierBalanceStats( MetricStats.readFrom(in), + in.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED) + ? MetricStats.readFrom(in) + : new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), MetricStats.readFrom(in), MetricStats.readFrom(in), MetricStats.readFrom(in) @@ -102,6 +137,9 @@ public static TierBalanceStats readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { shardCount.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED)) { + undesiredShardAllocations.writeTo(out); + } forecastWriteLoad.writeTo(out); forecastShardSize.writeTo(out); actualShardSize.writeTo(out); @@ -111,6 +149,7 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.startObject() .field("shard_count", shardCount) + .field("undesired_shard_allocation_count", undesiredShardAllocations) .field("forecast_write_load", forecastWriteLoad) .field("forecast_disk_usage", forecastShardSize) .field("actual_disk_usage", actualShardSize) @@ -172,6 +211,7 @@ public record NodeBalanceStats( String nodeId, List roles, int shards, + int undesiredShardAllocations, double forecastWriteLoad, long forecastShardSize, long actualShardSize @@ -182,9 +222,11 @@ public record NodeBalanceStats( private static NodeBalanceStats createFrom( RoutingNode routingNode, Metadata metadata, + DesiredBalance desiredBalance, ClusterInfo clusterInfo, WriteLoadForecaster writeLoadForecaster ) { + int undesired = 0; double forecastWriteLoad = 0.0; long forecastShardSize = 0L; long actualShardSize = 0L; @@ -196,23 +238,37 @@ private static NodeBalanceStats createFrom( forecastWriteLoad += writeLoadForecaster.getForecastedWriteLoad(indexMetadata).orElse(0.0); forecastShardSize += indexMetadata.getForecastedShardSizeInBytes().orElse(shardSize); actualShardSize += shardSize; + if (isDesiredShardAllocation(shardRouting, desiredBalance) == false) { + undesired++; + } } return new NodeBalanceStats( routingNode.nodeId(), routingNode.node().getRoles().stream().map(DiscoveryNodeRole::roleName).toList(), routingNode.size(), + undesired, forecastWriteLoad, forecastShardSize, actualShardSize ); } + private static boolean isDesiredShardAllocation(ShardRouting shardRouting, DesiredBalance desiredBalance) { + if (shardRouting.relocating()) { + // relocating out shards are temporarily accepted + return true; + } + var assignment = desiredBalance.getAssignment(shardRouting.shardId()); + return assignment != null && assignment.nodeIds().contains(shardRouting.currentNodeId()); + } + public static NodeBalanceStats readFrom(StreamInput in) throws IOException { return new NodeBalanceStats( in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) ? in.readString() : UNKNOWN_NODE_ID, in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) ? in.readStringCollectionAsList() : List.of(), in.readInt(), + in.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED) ? in.readVInt() : -1, in.readDouble(), in.readLong(), in.readLong() @@ -228,6 +284,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(roles); } out.writeInt(shards); + if (out.getTransportVersion().onOrAfter(TransportVersions.UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED)) { + out.writeVInt(undesiredShardAllocations); + } out.writeDouble(forecastWriteLoad); out.writeLong(forecastShardSize); out.writeLong(actualShardSize); @@ -241,6 +300,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } return builder.field("roles", roles) .field("shard_count", shards) + .field("undesired_shard_allocation_count", undesiredShardAllocations) .field("forecast_write_load", forecastWriteLoad) .humanReadableField("forecast_disk_usage_bytes", "forecast_disk_usage", ByteSizeValue.ofBytes(forecastShardSize)) .humanReadableField("actual_disk_usage_bytes", "actual_disk_usage", ByteSizeValue.ofBytes(actualShardSize)) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java index 2154381d497c..3378ff0063bb 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java @@ -65,6 +65,8 @@ private DesiredBalanceStats randomDesiredBalanceStats() { private ClusterBalanceStats randomClusterBalanceStats() { return new ClusterBalanceStats( + randomNonNegativeInt(), + randomNonNegativeInt(), randomBoolean() ? Map.of(DiscoveryNodeRole.DATA_CONTENT_NODE_ROLE.roleName(), randomTierBalanceStats()) : randomSubsetOf( @@ -81,21 +83,27 @@ private ClusterBalanceStats randomClusterBalanceStats() { private ClusterBalanceStats.TierBalanceStats randomTierBalanceStats() { return new ClusterBalanceStats.TierBalanceStats( - new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()), - new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()), - new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()), - new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()) + randomMetricStats(), + randomMetricStats(), + randomMetricStats(), + randomMetricStats(), + randomMetricStats() ); } + private ClusterBalanceStats.MetricStats randomMetricStats() { + return new ClusterBalanceStats.MetricStats(randomDouble(), randomDouble(), randomDouble(), randomDouble(), randomDouble()); + } + private ClusterBalanceStats.NodeBalanceStats randomNodeBalanceStats() { return new ClusterBalanceStats.NodeBalanceStats( randomAlphaOfLength(10), List.of(randomFrom("data_content", "data_hot", "data_warm", "data_cold")), - randomIntBetween(0, Integer.MAX_VALUE), + randomNonNegativeInt(), + randomNonNegativeInt(), randomDouble(), - randomLongBetween(0, Long.MAX_VALUE), - randomLongBetween(0, Long.MAX_VALUE) + randomNonNegativeLong(), + randomNonNegativeLong() ); } @@ -203,8 +211,13 @@ public void testToXContent() throws IOException { // cluster balance stats Map clusterBalanceStats = (Map) json.get("cluster_balance_stats"); - assertThat(clusterBalanceStats.keySet(), containsInAnyOrder("tiers", "nodes")); + assertThat(clusterBalanceStats.keySet(), containsInAnyOrder("shard_count", "undesired_shard_allocation_count", "tiers", "nodes")); + assertEquals(clusterBalanceStats.get("shard_count"), response.getClusterBalanceStats().shards()); + assertEquals( + clusterBalanceStats.get("undesired_shard_allocation_count"), + response.getClusterBalanceStats().undesiredShardAllocations() + ); // tier balance stats Map tiers = (Map) clusterBalanceStats.get("tiers"); assertEquals(tiers.keySet(), response.getClusterBalanceStats().tiers().keySet()); @@ -212,7 +225,13 @@ public void testToXContent() throws IOException { Map tierStats = (Map) tiers.get(entry.getKey()); assertThat( tierStats.keySet(), - containsInAnyOrder("shard_count", "forecast_write_load", "forecast_disk_usage", "actual_disk_usage") + containsInAnyOrder( + "shard_count", + "undesired_shard_allocation_count", + "forecast_write_load", + "forecast_disk_usage", + "actual_disk_usage" + ) ); Map shardCountStats = (Map) tierStats.get("shard_count"); @@ -223,6 +242,16 @@ public void testToXContent() throws IOException { assertEquals(shardCountStats.get("max"), entry.getValue().shardCount().max()); assertEquals(shardCountStats.get("std_dev"), entry.getValue().shardCount().stdDev()); + Map undesiredShardAllocationCountStats = (Map) tierStats.get( + "undesired_shard_allocation_count" + ); + assertThat(undesiredShardAllocationCountStats.keySet(), containsInAnyOrder("total", "average", "min", "max", "std_dev")); + assertEquals(undesiredShardAllocationCountStats.get("total"), entry.getValue().undesiredShardAllocations().total()); + assertEquals(undesiredShardAllocationCountStats.get("average"), entry.getValue().undesiredShardAllocations().average()); + assertEquals(undesiredShardAllocationCountStats.get("min"), entry.getValue().undesiredShardAllocations().min()); + assertEquals(undesiredShardAllocationCountStats.get("max"), entry.getValue().undesiredShardAllocations().max()); + assertEquals(undesiredShardAllocationCountStats.get("std_dev"), entry.getValue().undesiredShardAllocations().stdDev()); + Map forecastWriteLoadStats = (Map) tierStats.get("forecast_write_load"); assertThat(forecastWriteLoadStats.keySet(), containsInAnyOrder("total", "average", "min", "max", "std_dev")); assertEquals(forecastWriteLoadStats.get("total"), entry.getValue().forecastWriteLoad().total()); @@ -258,6 +287,7 @@ public void testToXContent() throws IOException { "node_id", "roles", "shard_count", + "undesired_shard_allocation_count", "forecast_write_load", "forecast_disk_usage_bytes", "actual_disk_usage_bytes" @@ -266,6 +296,7 @@ public void testToXContent() throws IOException { assertEquals(nodesStats.get("node_id"), entry.getValue().nodeId()); assertEquals(nodesStats.get("roles"), entry.getValue().roles()); assertEquals(nodesStats.get("shard_count"), entry.getValue().shards()); + assertEquals(nodesStats.get("undesired_shard_allocation_count"), entry.getValue().undesiredShardAllocations()); assertEquals(nodesStats.get("forecast_write_load"), entry.getValue().forecastWriteLoad()); assertEquals(nodesStats.get("forecast_disk_usage_bytes"), entry.getValue().forecastShardSize()); assertEquals(nodesStats.get("actual_disk_usage_bytes"), entry.getValue().actualShardSize()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStatsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStatsTests.java index 6f2866095c2e..e4b6c0275150 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStatsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterBalanceStatsTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -58,25 +59,33 @@ public void testStatsForSingleTierClusterWithNoForecasts() { List.of(indexSizes("index-1", 1L, 1L), indexSizes("index-2", 2L, 2L), indexSizes("index-3", 3L, 3L)) ); - var stats = ClusterBalanceStats.createFrom(clusterState, clusterInfo, TEST_WRITE_LOAD_FORECASTER); + var stats = ClusterBalanceStats.createFrom( + clusterState, + createDesiredBalance(clusterState), + clusterInfo, + TEST_WRITE_LOAD_FORECASTER + ); assertThat( stats, equalTo( new ClusterBalanceStats( + 6, + 0, Map.of( DATA_CONTENT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(6.0, 2.0, 2.0, 2.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(12.0, 3.0, 5.0, 4.0, stdDev(3.0, 5.0, 4.0)), new MetricStats(12.0, 3.0, 5.0, 4.0, stdDev(3.0, 5.0, 4.0)) ) ), Map.ofEntries( - Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0.0, 4L, 4L)), - Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0.0, 3L, 3L)), - Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0.0, 5L, 5L)) + Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 0.0, 4L, 4L)), + Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 0.0, 3L, 3L)), + Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 0.0, 5L, 5L)) ) ) ) @@ -102,25 +111,33 @@ public void testStatsForSingleTierClusterWithForecasts() { List.of(indexSizes("index-1", 1L, 1L), indexSizes("index-2", 2L, 2L), indexSizes("index-3", 3L, 3L)) ); - var stats = ClusterBalanceStats.createFrom(clusterState, clusterInfo, TEST_WRITE_LOAD_FORECASTER); + var stats = ClusterBalanceStats.createFrom( + clusterState, + createDesiredBalance(clusterState), + clusterInfo, + TEST_WRITE_LOAD_FORECASTER + ); assertThat( stats, equalTo( new ClusterBalanceStats( + 6, + 0, Map.of( DATA_CONTENT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(6.0, 2.0, 2.0, 2.0, 0.0), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(12.0, 3.5, 4.5, 4.0, stdDev(3.5, 4.0, 4.5)), new MetricStats(36.0, 10.0, 14.0, 12.0, stdDev(10.0, 12.0, 14.0)), new MetricStats(12.0, 3.0, 5.0, 4.0, stdDev(3.0, 5.0, 4.0)) ) ), Map.ofEntries( - Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 3.5, 14L, 4L)), - Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 4.0, 12L, 3L)), - Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 4.5, 10L, 5L)) + Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 3.5, 14L, 4L)), + Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 4.0, 12L, 3L)), + Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 2, 0, 4.5, 10L, 5L)) ) ) ) @@ -157,7 +174,12 @@ public void testStatsForHotWarmClusterWithForecasts() { ) ); - var stats = ClusterBalanceStats.createFrom(clusterState, clusterInfo, TEST_WRITE_LOAD_FORECASTER); + var stats = ClusterBalanceStats.createFrom( + clusterState, + createDesiredBalance(clusterState), + clusterInfo, + TEST_WRITE_LOAD_FORECASTER + ); var hotRoleNames = List.of(DATA_CONTENT_NODE_ROLE.roleName(), DATA_HOT_NODE_ROLE.roleName()); var warmRoleNames = List.of(DATA_WARM_NODE_ROLE.roleName()); @@ -165,10 +187,13 @@ public void testStatsForHotWarmClusterWithForecasts() { stats, equalTo( new ClusterBalanceStats( + 10, + 0, Map.of( DATA_CONTENT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(7.0, 2.0, 3.0, 7.0 / 3, stdDev(3.0, 2.0, 2.0)), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(21.0, 6.0, 8.5, 7.0, stdDev(6.0, 8.5, 6.5)), new MetricStats(36.0, 10.0, 16.0, 12.0, stdDev(10.0, 10.0, 16.0)), new MetricStats(34.0, 9.0, 15.0, 34.0 / 3, stdDev(9.0, 10.0, 15.0)) @@ -176,6 +201,7 @@ public void testStatsForHotWarmClusterWithForecasts() { DATA_HOT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(7.0, 2.0, 3.0, 7.0 / 3, stdDev(3.0, 2.0, 2.0)), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(21.0, 6.0, 8.5, 7.0, stdDev(6.0, 8.5, 6.5)), new MetricStats(36.0, 10.0, 16.0, 12.0, stdDev(10.0, 10.0, 16.0)), new MetricStats(34.0, 9.0, 15.0, 34.0 / 3, stdDev(9.0, 10.0, 15.0)) @@ -184,17 +210,18 @@ public void testStatsForHotWarmClusterWithForecasts() { new ClusterBalanceStats.TierBalanceStats( new MetricStats(3.0, 1.0, 1.0, 1.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(42.0, 12.0, 18.0, 14.0, stdDev(12.0, 12.0, 18.0)), new MetricStats(42.0, 12.0, 18.0, 14.0, stdDev(12.0, 12.0, 18.0)) ) ), Map.ofEntries( - Map.entry("node-hot-1", new NodeBalanceStats("node-hot-1", hotRoleNames, 3, 8.5, 16L, 15L)), - Map.entry("node-hot-2", new NodeBalanceStats("node-hot-2", hotRoleNames, 2, 6.0, 10L, 9L)), - Map.entry("node-hot-3", new NodeBalanceStats("node-hot-3", hotRoleNames, 2, 6.5, 10L, 10L)), - Map.entry("node-warm-1", new NodeBalanceStats("node-warm-1", warmRoleNames, 1, 0.0, 12L, 12L)), - Map.entry("node-warm-2", new NodeBalanceStats("node-warm-2", warmRoleNames, 1, 0.0, 12L, 12L)), - Map.entry("node-warm-3", new NodeBalanceStats("node-warm-3", warmRoleNames, 1, 0.0, 18L, 18L)) + Map.entry("node-hot-1", new NodeBalanceStats("node-hot-1", hotRoleNames, 3, 0, 8.5, 16L, 15L)), + Map.entry("node-hot-2", new NodeBalanceStats("node-hot-2", hotRoleNames, 2, 0, 6.0, 10L, 9L)), + Map.entry("node-hot-3", new NodeBalanceStats("node-hot-3", hotRoleNames, 2, 0, 6.5, 10L, 10L)), + Map.entry("node-warm-1", new NodeBalanceStats("node-warm-1", warmRoleNames, 1, 0, 0.0, 12L, 12L)), + Map.entry("node-warm-2", new NodeBalanceStats("node-warm-2", warmRoleNames, 1, 0, 0.0, 12L, 12L)), + Map.entry("node-warm-3", new NodeBalanceStats("node-warm-3", warmRoleNames, 1, 0, 0.0, 18L, 18L)) ) ) ) @@ -213,25 +240,28 @@ public void testStatsForNoIndicesInTier() { ); var clusterInfo = createClusterInfo(List.of()); - var stats = ClusterBalanceStats.createFrom(clusterState, clusterInfo, TEST_WRITE_LOAD_FORECASTER); + var stats = ClusterBalanceStats.createFrom(clusterState, null, clusterInfo, TEST_WRITE_LOAD_FORECASTER); assertThat( stats, equalTo( new ClusterBalanceStats( + 0, + 0, Map.of( DATA_CONTENT_NODE_ROLE.roleName(), new ClusterBalanceStats.TierBalanceStats( new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), + new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0), new MetricStats(0.0, 0.0, 0.0, 0.0, 0.0) ) ), Map.ofEntries( - Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0.0, 0L, 0L)), - Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0.0, 0L, 0L)), - Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0.0, 0L, 0L)) + Map.entry("node-1", new NodeBalanceStats("node-1", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0, 0.0, 0L, 0L)), + Map.entry("node-2", new NodeBalanceStats("node-2", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0, 0.0, 0L, 0L)), + Map.entry("node-3", new NodeBalanceStats("node-3", List.of(DATA_CONTENT_NODE_ROLE.roleName()), 0, 0, 0.0, 0L, 0L)) ) ) ) @@ -269,6 +299,20 @@ private static ClusterState createClusterState(List nodes, List(); + for (var indexRoutingTable : state.getRoutingTable()) { + for (int i = 0; i < indexRoutingTable.size(); i++) { + var indexShardRoutingTable = indexRoutingTable.shard(i); + assignments.put( + indexShardRoutingTable.shardId(), + new ShardAssignment(Set.of(indexShardRoutingTable.primaryShard().currentNodeId()), 1, 0, 0) + ); + } + } + return new DesiredBalance(1, assignments); + } + private static Tuple startedIndex( String indexName, @Nullable Double indexWriteLoadForecast, From 1bb1c7be047380f01051e075c02ce563c02464a9 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 2 Nov 2023 08:51:39 +0000 Subject: [PATCH 28/47] Create a historical feature for the get settings rest action (#101684) --- server/src/main/java/module-info.java | 3 ++- .../elasticsearch/action/ActionModule.java | 5 ++-- .../elasticsearch/node/NodeConstruction.java | 6 ++++- .../org/elasticsearch/rest/RestFeatures.java | 23 +++++++++++++++++++ .../cluster/RestClusterGetSettingsAction.java | 15 ++++++------ ...lasticsearch.features.FeatureSpecification | 1 + .../action/ActionModuleTests.java | 6 ++--- .../xpack/security/SecurityTests.java | 2 +- 8 files changed, 46 insertions(+), 15 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/rest/RestFeatures.java diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 8e636a93e4f0..65792ebcccc6 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -406,7 +406,8 @@ provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.features.FeaturesSupportedSpecification, - org.elasticsearch.health.HealthFeature; + org.elasticsearch.health.HealthFeature, + org.elasticsearch.rest.RestFeatures; uses org.elasticsearch.plugins.internal.SettingsExtension; uses RestExtension; diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 6ac451d5bc93..a855b6b8ee7e 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -282,6 +282,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards; import org.elasticsearch.health.GetHealthAction; import org.elasticsearch.health.RestGetHealthAction; @@ -857,7 +858,7 @@ private static ActionFilters setupActionFilters(List actionPlugins return new ActionFilters(Set.copyOf(finalFilters)); } - public void initRestHandlers(Supplier nodesInCluster) { + public void initRestHandlers(Supplier nodesInCluster, Predicate clusterSupportsFeature) { List catActions = new ArrayList<>(); Predicate catActionsFilter = restExtension.getCatActionsFilter(); Predicate restFilter = restExtension.getActionsFilter(); @@ -889,7 +890,7 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestClusterStateAction(settingsFilter, threadPool)); registerHandler.accept(new RestClusterHealthAction()); registerHandler.accept(new RestClusterUpdateSettingsAction()); - registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter, nodesInCluster)); + registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter, clusterSupportsFeature)); registerHandler.accept(new RestClusterRerouteAction(settingsFilter)); registerHandler.accept(new RestClusterSearchShardsAction()); registerHandler.accept(new RestPendingClusterTasksAction()); diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 45ad99672082..20c852993586 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.CoordinationDiagnosticsService; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.MasterHistoryService; @@ -1220,7 +1221,10 @@ record PluginServiceInstances( this.xContentRegistry = xContentRegistry; logger.debug("initializing HTTP handlers ..."); - actionModule.initRestHandlers(() -> clusterService.state().nodesIfRecovered()); + actionModule.initRestHandlers(() -> clusterService.state().nodesIfRecovered(), f -> { + ClusterState state = clusterService.state(); + return state.clusterRecovered() && featureService.clusterHasFeature(state, f); + }); logger.info("initialized"); } diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java new file mode 100644 index 000000000000..73b788d63b2a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.rest; + +import org.elasticsearch.Version; +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; + +import java.util.Map; + +public class RestFeatures implements FeatureSpecification { + @Override + public Map getHistoricalFeatures() { + return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); + } +} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index 189bd9c2b955..7748944306e3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -8,16 +8,15 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,28 +26,30 @@ import java.io.IOException; import java.util.List; import java.util.Set; -import java.util.function.Supplier; +import java.util.function.Predicate; import static org.elasticsearch.rest.RestRequest.Method.GET; @ServerlessScope(Scope.INTERNAL) public class RestClusterGetSettingsAction extends BaseRestHandler { + public static final NodeFeature SUPPORTS_GET_SETTINGS_ACTION = new NodeFeature("rest.get_settings_action"); + private final Settings settings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; - private final Supplier nodesInCluster; + private final Predicate clusterSupportsFeature; public RestClusterGetSettingsAction( Settings settings, ClusterSettings clusterSettings, SettingsFilter settingsFilter, - Supplier nodesInCluster + Predicate clusterSupportsFeature ) { this.settings = settings; this.clusterSettings = clusterSettings; this.settingsFilter = settingsFilter; - this.nodesInCluster = nodesInCluster; + this.clusterSupportsFeature = clusterSupportsFeature; } @Override @@ -70,7 +71,7 @@ private static void setUpRequestParams(MasterNodeReadRequest clusterRequest, public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); - if (nodesInCluster.get().getMinNodeVersion().before(Version.V_8_3_0)) { + if (clusterSupportsFeature.test(SUPPORTS_GET_SETTINGS_ACTION) == false) { return prepareLegacyRequest(request, client, renderDefaults); } diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index cfec1fe8d532..10f465eb8f3d 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -8,3 +8,4 @@ org.elasticsearch.features.FeaturesSupportedSpecification org.elasticsearch.health.HealthFeature +org.elasticsearch.rest.RestFeatures diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 582f9b44af57..4f72357f8332 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -124,7 +124,7 @@ public void testSetupRestHandlerContainsKnownBuiltin() { List.of(), RestExtension.allowAll() ); - actionModule.initRestHandlers(null); + actionModule.initRestHandlers(null, null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows( IllegalArgumentException.class, @@ -184,7 +184,7 @@ public String getName() { List.of(), RestExtension.allowAll() ); - Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null, null)); assertThat(e.getMessage(), startsWith("Cannot replace existing handler for [/_nodes] for method: GET")); } finally { threadPool.shutdown(); @@ -237,7 +237,7 @@ public List getRestHandlers( List.of(), RestExtension.allowAll() ); - actionModule.initRestHandlers(null); + actionModule.initRestHandlers(null, null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows( IllegalArgumentException.class, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 66790c989823..74c5a1784489 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -780,7 +780,7 @@ public void testSecurityRestHandlerInterceptorCanBeInstalled() throws IllegalAcc List.of(), RestExtension.allowAll() ); - actionModule.initRestHandlers(null); + actionModule.initRestHandlers(null, null); appender.assertAllExpectationsMatched(); } finally { From 150da303d39684c47746042f8b94e6673d8d2ff4 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 2 Nov 2023 08:52:27 +0000 Subject: [PATCH 29/47] Misc cleanup in SnapshotShardsService (#101666) Reorganises the handling of new cluster states a bit, mainly to make it less awkward to add mechanisms to deal with node shutdown in a future PR. Relates #101171 --- .../snapshots/SnapshotShardsService.java | 113 ++++++++++-------- 1 file changed, 64 insertions(+), 49 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index d842fc21f7fe..c122940f1b40 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -130,12 +130,15 @@ protected void doClose() { @Override public void clusterChanged(ClusterChangedEvent event) { try { - SnapshotsInProgress currentSnapshots = SnapshotsInProgress.get(event.state()); + final var currentSnapshots = SnapshotsInProgress.get(event.state()); if (SnapshotsInProgress.get(event.previousState()).equals(currentSnapshots) == false) { + final var localNodeId = clusterService.localNode().getId(); synchronized (shardSnapshots) { cancelRemoved(currentSnapshots); - for (List snapshots : currentSnapshots.entriesByRepo()) { - startNewSnapshots(snapshots); + for (final var oneRepoSnapshotsInProgress : currentSnapshots.entriesByRepo()) { + for (final var snapshotsInProgressEntry : oneRepoSnapshotsInProgress) { + handleUpdatedSnapshotsInProgressEntry(localNodeId, snapshotsInProgressEntry); + } } } } @@ -221,54 +224,22 @@ private void cancelRemoved(SnapshotsInProgress snapshotsInProgress) { } } - private void startNewSnapshots(List snapshotsInProgress) { - final String localNodeId = clusterService.localNode().getId(); - for (SnapshotsInProgress.Entry entry : snapshotsInProgress) { - final State entryState = entry.state(); - if (entry.isClone()) { - // This is a snapshot clone, it will be executed on the current master - continue; - } - if (entryState == State.STARTED && entry.hasShardsInInitState()) { - Map startedShards = null; - final Snapshot snapshot = entry.snapshot(); - Map snapshotShards = shardSnapshots.getOrDefault(snapshot, emptyMap()); - for (Map.Entry shard : entry.shards().entrySet()) { - // Add all new shards to start processing on - final ShardId shardId = shard.getKey(); - final ShardSnapshotStatus shardSnapshotStatus = shard.getValue(); - if (shardSnapshotStatus.state() == ShardState.INIT - && localNodeId.equals(shardSnapshotStatus.nodeId()) - && snapshotShards.containsKey(shardId) == false) { - logger.trace("[{}] adding shard to the queue", shardId); - if (startedShards == null) { - startedShards = new HashMap<>(); - } - startedShards.put(shardId, IndexShardSnapshotStatus.newInitializing(shardSnapshotStatus.generation())); - } - } - if (startedShards != null && startedShards.isEmpty() == false) { - shardSnapshots.computeIfAbsent(snapshot, s -> new HashMap<>()).putAll(startedShards); - - final List shardSnapshotTasks = new ArrayList<>(startedShards.size()); - for (final Map.Entry shardEntry : startedShards.entrySet()) { - final ShardId shardId = shardEntry.getKey(); - final IndexShardSnapshotStatus snapshotStatus = shardEntry.getValue(); - final IndexId indexId = entry.indices().get(shardId.getIndexName()); - assert indexId != null; - assert SnapshotsService.useShardGenerations(entry.version()) - || ShardGenerations.fixShardGeneration(snapshotStatus.generation()) == null - : "Found non-null, non-numeric shard generation [" - + snapshotStatus.generation() - + "] for snapshot with old-format compatibility"; - shardSnapshotTasks.add( - newShardSnapshotTask(shardId, snapshot, indexId, snapshotStatus, entry.version(), entry.startTime()) - ); - } + private void handleUpdatedSnapshotsInProgressEntry(String localNodeId, SnapshotsInProgress.Entry entry) { + if (entry.isClone()) { + // This is a snapshot clone, it will be executed on the current master + return; + } - threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> shardSnapshotTasks.forEach(Runnable::run)); + switch (entry.state()) { + case STARTED -> { + if (entry.hasShardsInInitState() == false) { + // Snapshot is running but has no running shards yet, nothing to do + return; } - } else if (entryState == State.ABORTED) { + + startNewShardSnapshots(localNodeId, entry); + } + case ABORTED -> { // Abort all running shards for this snapshot final Snapshot snapshot = entry.snapshot(); Map snapshotShards = shardSnapshots.getOrDefault(snapshot, emptyMap()); @@ -286,9 +257,53 @@ private void startNewSnapshots(List snapshotsInProgre } } } + // otherwise snapshot is not running, nothing to do } } + private void startNewShardSnapshots(String localNodeId, SnapshotsInProgress.Entry entry) { + Map shardsToStart = null; + final Snapshot snapshot = entry.snapshot(); + final var runningShardsForSnapshot = shardSnapshots.getOrDefault(snapshot, emptyMap()).keySet(); + for (var scheduledShard : entry.shards().entrySet()) { + // Add all new shards to start processing on + final var shardId = scheduledShard.getKey(); + final var shardSnapshotStatus = scheduledShard.getValue(); + if (shardSnapshotStatus.state() == ShardState.INIT + && localNodeId.equals(shardSnapshotStatus.nodeId()) + && runningShardsForSnapshot.contains(shardId) == false) { + logger.trace("[{}] adding shard to the queue", shardId); + if (shardsToStart == null) { + shardsToStart = new HashMap<>(); + } + shardsToStart.put(shardId, shardSnapshotStatus.generation()); + } + } + if (shardsToStart == null) { + return; + } + assert shardsToStart.isEmpty() == false; + + final var newSnapshotShards = shardSnapshots.computeIfAbsent(snapshot, s -> new HashMap<>()); + + final List shardSnapshotTasks = new ArrayList<>(shardsToStart.size()); + for (final Map.Entry shardEntry : shardsToStart.entrySet()) { + final ShardId shardId = shardEntry.getKey(); + final IndexShardSnapshotStatus snapshotStatus = IndexShardSnapshotStatus.newInitializing(shardEntry.getValue()); + newSnapshotShards.put(shardId, snapshotStatus); + final IndexId indexId = entry.indices().get(shardId.getIndexName()); + assert indexId != null; + assert SnapshotsService.useShardGenerations(entry.version()) + || ShardGenerations.fixShardGeneration(snapshotStatus.generation()) == null + : "Found non-null, non-numeric shard generation [" + + snapshotStatus.generation() + + "] for snapshot with old-format compatibility"; + shardSnapshotTasks.add(newShardSnapshotTask(shardId, snapshot, indexId, snapshotStatus, entry.version(), entry.startTime())); + } + + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> shardSnapshotTasks.forEach(Runnable::run)); + } + private Runnable newShardSnapshotTask( final ShardId shardId, final Snapshot snapshot, From 07a68f2f6315d860aeefea2b6a18007dd8f62436 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 2 Nov 2023 09:35:59 +0000 Subject: [PATCH 30/47] [ML] Ignore failed jobs in serverless autoscaling (#101692) Failed jobs have persistent tasks but do not have corresponding native processes running, so should not count towards the memory requirements of the ML tier. This PR filters failed jobs before calculating memory requirements for serverless autoscaling. (This was already accounted for correctly in stateful autoscaling.) Also adds some missing tests. --- .../MlAutoscalingResourceTracker.java | 44 ++++-- .../MlAutoscalingResourceTrackerTests.java | 138 +++++++++++++++++- 2 files changed, 167 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java index 5c89c29a70cd..4b925f678602 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java @@ -22,6 +22,8 @@ import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.utils.MemoryTrackedTaskState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.NodeLoadDetector; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; @@ -130,22 +132,27 @@ static void getMemoryAndProcessors( autoscalingContext.modelAssignments.size() ); - // start with `minNodes = 1` if any ML job is started, further adjustments are made for trained models below - int minNodes = autoscalingContext.anomalyDetectionTasks.isEmpty() - && autoscalingContext.dataframeAnalyticsTasks.isEmpty() - && autoscalingContext.modelAssignments.isEmpty() ? 0 : 1; + // Start with `minNodes = 0`. If any ML job is started this will be increased to 1 in the loops below, + // and further adjustments are made for trained models depending on allocations. + int minNodes = 0; // anomaly detection for (var task : autoscalingContext.anomalyDetectionTasks) { + MemoryTrackedTaskState state = MlTasks.getMemoryTrackedTaskState(task); + if (state != null && state.consumesMemory() == false) { + continue; + } + String jobId = ((OpenJobAction.JobParams) task.getParams()).getJobId(); Long jobMemory = mlMemoryTracker.getAnomalyDetectorJobMemoryRequirement(jobId); - if (jobMemory == null) { logger.debug("could not find memory requirement for job [{}], returning no-scale", jobId); listener.onResponse(noScaleStats(numberMlNodes)); return; } + minNodes = 1; + if (AWAITING_LAZY_ASSIGNMENT.equals(task.getAssignment())) { logger.debug("job [{}] lacks assignment , memory required [{}]", jobId, jobMemory); @@ -165,15 +172,21 @@ static void getMemoryAndProcessors( // data frame analytics for (var task : autoscalingContext.dataframeAnalyticsTasks) { + MemoryTrackedTaskState state = MlTasks.getMemoryTrackedTaskState(task); + if (state != null && state.consumesMemory() == false) { + continue; + } + String jobId = MlTasks.dataFrameAnalyticsId(task.getId()); Long jobMemory = mlMemoryTracker.getDataFrameAnalyticsJobMemoryRequirement(jobId); - if (jobMemory == null) { logger.debug("could not find memory requirement for job [{}], returning no-scale", jobId); listener.onResponse(noScaleStats(numberMlNodes)); return; } + minNodes = 1; + if (AWAITING_LAZY_ASSIGNMENT.equals(task.getAssignment())) { logger.debug("dfa job [{}] lacks assignment , memory required [{}]", jobId, jobMemory); @@ -192,12 +205,12 @@ static void getMemoryAndProcessors( // trained models for (var modelAssignment : autoscalingContext.modelAssignments.entrySet()) { - final int numberOfAllocations = modelAssignment.getValue().getTaskParams().getNumberOfAllocations(); - final int numberOfThreadsPerAllocation = modelAssignment.getValue().getTaskParams().getThreadsPerAllocation(); - final long estimatedMemoryUsage = modelAssignment.getValue().getTaskParams().estimateMemoryUsageBytes(); + TrainedModelAssignment assignment = modelAssignment.getValue(); + final int numberOfAllocations = assignment.getTaskParams().getNumberOfAllocations(); + final int numberOfThreadsPerAllocation = assignment.getTaskParams().getThreadsPerAllocation(); + final long estimatedMemoryUsage = assignment.getTaskParams().estimateMemoryUsageBytes(); - if (AssignmentState.STARTING.equals(modelAssignment.getValue().getAssignmentState()) - && modelAssignment.getValue().getNodeRoutingTable().isEmpty()) { + if (AssignmentState.STARTING.equals(assignment.getAssignmentState()) && assignment.getNodeRoutingTable().isEmpty()) { logger.debug( () -> format( @@ -216,6 +229,9 @@ static void getMemoryAndProcessors( extraSingleNodeProcessors = Math.max(extraSingleNodeProcessors, numberOfThreadsPerAllocation); extraProcessors += numberOfAllocations * numberOfThreadsPerAllocation; } + } else if (assignment.getNodeRoutingTable().values().stream().allMatch(r -> r.getState().consumesMemory() == false)) { + // Ignore states that don't consume memory, for example all allocations are failed + continue; } else { logger.debug( () -> format( @@ -229,9 +245,6 @@ static void getMemoryAndProcessors( modelMemoryBytesSum += estimatedMemoryUsage; processorsSum += numberOfAllocations * numberOfThreadsPerAllocation; - // min(3, max(number of allocations over all deployed models) - minNodes = Math.min(3, Math.max(minNodes, numberOfAllocations)); - for (String node : modelAssignment.getValue().getNodeRoutingTable().keySet()) { perNodeModelMemoryInBytes.computeIfAbsent(node, k -> new ArrayList<>()) .add( @@ -244,6 +257,9 @@ static void getMemoryAndProcessors( ); } } + + // min(3, max(number of allocations over all deployed models) + minNodes = Math.min(3, Math.max(minNodes, numberOfAllocations)); } // check for downscaling diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java index 05478deac811..7ea63cf7945f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTrackerTests.java @@ -14,12 +14,16 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.autoscaling.MlAutoscalingStats; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; @@ -34,7 +38,9 @@ import java.util.function.Consumer; import static org.elasticsearch.xpack.ml.autoscaling.MlAutoscalingResourceTracker.MlJobRequirements; +import static org.elasticsearch.xpack.ml.job.JobNodeSelector.AWAITING_LAZY_ASSIGNMENT; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class MlAutoscalingResourceTrackerTests extends ESTestCase { @@ -83,6 +89,137 @@ public void testGetMemoryAndProcessors() throws InterruptedException { ); } + public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignment() throws InterruptedException { + long memory = 1000000000; + Map nodeAttr = Map.of( + MachineLearning.MACHINE_MEMORY_NODE_ATTR, + Long.toString(memory), + MachineLearning.MAX_JVM_SIZE_NODE_ATTR, + "400000000", + MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, + "7.2.0" + ); + String jobId = "lazy-job"; + MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( + List.of( + new PersistentTasksCustomMetadata.PersistentTask<>( + MlTasks.jobTaskId(jobId), + MlTasks.JOB_TASK_NAME, + new OpenJobAction.JobParams(jobId), + 1, + AWAITING_LAZY_ASSIGNMENT + ) + ), + List.of(), + List.of(), + Map.of(), + List.of( + DiscoveryNodeUtils.builder("ml-1") + .name("ml-1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build(), + DiscoveryNodeUtils.builder("ml-2") + .name("ml-2") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build() + ), + PersistentTasksCustomMetadata.builder().build() + ); + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + when(mockTracker.getAnomalyDetectorJobMemoryRequirement(jobId)).thenReturn(memory / 4); + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + mlAutoscalingContext, + mockTracker, + Map.of("ml-1", memory, "ml-2", memory), + memory / 2, + 10, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + listener + ), + stats -> { + assertEquals(memory, stats.perNodeMemoryInBytes()); + assertEquals(2, stats.nodes()); + assertEquals(1, stats.minNodes()); + assertEquals(0, stats.extraSingleNodeProcessors()); + assertEquals(memory / 4, stats.extraSingleNodeModelMemoryInBytes()); + assertEquals(memory / 4, stats.extraModelMemoryInBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + } + ); + } + + public void testGetMemoryAndProcessorsScaleUpGivenAwaitingLazyAssignmentButFailed() throws InterruptedException { + long memory = 1000000000; + Map nodeAttr = Map.of( + MachineLearning.MACHINE_MEMORY_NODE_ATTR, + Long.toString(memory), + MachineLearning.MAX_JVM_SIZE_NODE_ATTR, + "400000000", + MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, + "7.2.0" + ); + String jobId = "lazy-job"; + MlAutoscalingContext mlAutoscalingContext = new MlAutoscalingContext( + List.of( + new PersistentTasksCustomMetadata.PersistentTask<>( + new PersistentTasksCustomMetadata.PersistentTask<>( + MlTasks.jobTaskId(jobId), + MlTasks.JOB_TASK_NAME, + new OpenJobAction.JobParams(jobId), + 1, + AWAITING_LAZY_ASSIGNMENT + ), + new JobTaskState(JobState.FAILED, 1, "a nasty bug") + ) + ), + List.of(), + List.of(), + Map.of(), + List.of( + DiscoveryNodeUtils.builder("ml-1") + .name("ml-1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build(), + DiscoveryNodeUtils.builder("ml-2") + .name("ml-2") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr) + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .build() + ), + PersistentTasksCustomMetadata.builder().build() + ); + MlMemoryTracker mockTracker = mock(MlMemoryTracker.class); + when(mockTracker.getAnomalyDetectorJobMemoryRequirement(jobId)).thenReturn(memory / 4); + this.assertAsync( + listener -> MlAutoscalingResourceTracker.getMemoryAndProcessors( + mlAutoscalingContext, + mockTracker, + Map.of("ml-1", memory, "ml-2", memory), + memory / 2, + 10, + MachineLearning.DEFAULT_MAX_OPEN_JOBS_PER_NODE, + listener + ), + stats -> { + assertEquals(memory, stats.perNodeMemoryInBytes()); + assertEquals(2, stats.nodes()); + assertEquals(0, stats.minNodes()); + assertEquals(0, stats.extraSingleNodeProcessors()); + assertEquals(0, stats.extraSingleNodeModelMemoryInBytes()); + assertEquals(0, stats.extraModelMemoryInBytes()); + assertEquals(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(), stats.perNodeMemoryOverheadInBytes()); + } + ); + } + public void testCheckIfJobsCanBeMovedInLeastEfficientWayMemoryOnly() { assertEquals( 0L, @@ -897,7 +1034,6 @@ public void testGetMemoryAndProcessorsScaleDown() throws InterruptedException { ) ).addRoutingEntry("ml-node-3", new RoutingInfo(1, 1, RoutingState.STARTED, "")).build() ), - List.of( DiscoveryNodeUtils.builder("ml-node-1") .name("ml-node-name-1") From f8922080d40410d0823b24f8e9f4bf04138dda65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 2 Nov 2023 13:02:29 +0100 Subject: [PATCH 31/47] [DOCS] Adds technical preview note to inference APIs. (#101703) --- docs/reference/inference/delete-inference.asciidoc | 2 ++ docs/reference/inference/get-inference.asciidoc | 2 ++ docs/reference/inference/inference-apis.asciidoc | 2 ++ docs/reference/inference/post-inference.asciidoc | 2 ++ docs/reference/inference/put-inference.asciidoc | 2 ++ 5 files changed, 10 insertions(+) diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 874bfa64d355..c9c3e1645861 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -2,6 +2,8 @@ [[delete-inference-api]] === Delete {infer} API +experimental[] + Deletes an {infer} model deployment. diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index bbf1d59c5621..b81f2663ec9e 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -2,6 +2,8 @@ [[get-inference-api]] === Get {infer} API +experimental[] + Retrieves {infer} model information. [discrete] diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index ec1f01bc4d09..0476ac57287d 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -2,6 +2,8 @@ [[inference-apis]] == {infer-cap} APIs +experimental[] + You can use the following APIs to manage {infer} models and perform {infer}: * <> diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 99dd4a059519..f26a73d09309 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -2,6 +2,8 @@ [[post-inference-api]] === Perform inference API +experimental[] + Performs an inference task on an input text by using an {infer} model. diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index f4737875971c..3b8cd19aded5 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -2,6 +2,8 @@ [[put-inference-api]] === Create {infer} API +experimental[] + Creates a model to perform an {infer} task. From 8e6e0e59ea34be29a6495f0680944d6a7f559756 Mon Sep 17 00:00:00 2001 From: William Brafford Date: Thu, 2 Nov 2023 08:34:02 -0400 Subject: [PATCH 32/47] Make some classes final to avoid suppressing "this-escape" warning (#101699) * Avoid "this-escape" by making classes final The "this-escape" compiler warning is intended to alert developers to potential bugs in object initialization due to subclassing. This class of bugs cannot occur when a class is final. Here, we take cases where a class has no implementations but generates a "this-escape" warning, and we make those classes final rather than suppressing the compiler warning. This makes the remaining suppressions more meaningful, since they now indicate places where we may want to look for initialization bugs. In a few cases, making a class final meant changing some of its protected fields and methods to private or default accessibility. Some classes with no implementations are mocked in testing. Since making those classes final would involve non-trivial rewrites of tests, I've left them alone. * Spotless, remove redundant modifiers, clean up "protected" usage * Revert a few more mocked classes --- .../ssl/SslConfigurationLoaderTests.java | 3 +-- .../org/elasticsearch/tdigest/Centroid.java | 5 +--- .../mustache/CustomMustacheFactory.java | 3 +-- .../RestMultiSearchTemplateActionTests.java | 3 +-- .../RestSearchTemplateActionTests.java | 3 +-- .../index/rankeval/RankEvalRequest.java | 3 +-- .../TransportRankEvalActionTests.java | 5 ++-- .../reindex/RestDeleteByQueryActionTests.java | 3 +-- .../reindex/RestUpdateByQueryActionTests.java | 3 +-- .../packaging/util/Packages.java | 3 +-- .../NoShardAvailableActionException.java | 6 +---- .../action/RoutingMissingException.java | 3 +-- .../cluster/node/info/NodesInfoRequest.java | 3 +-- .../shards/ClusterSearchShardsRequest.java | 5 ++-- .../admin/indices/analyze/AnalyzeAction.java | 3 +-- ...TransportVerifyShardBeforeCloseAction.java | 3 +-- .../TransportVerifyShardIndexBlockAction.java | 3 +-- .../admin/indices/stats/CommonStatsFlags.java | 3 +-- .../validate/query/ValidateQueryRequest.java | 3 +-- .../action/bulk/BulkShardRequest.java | 6 +++-- .../action/search/SearchResponse.java | 3 +-- ...roadcastShardOperationFailedException.java | 3 +-- .../replication/ReplicationOperation.java | 4 +--- .../TransportReplicationAction.java | 3 +-- .../termvectors/TermVectorsRequest.java | 5 ++-- .../action/shard/ShardStateAction.java | 3 +-- .../coordination/FollowersChecker.java | 3 +-- .../metadata/IndexTemplateMetadata.java | 3 +-- .../elasticsearch/common/geo/GeoPoint.java | 7 +++--- .../common/inject/CreationException.java | 3 +-- .../io/stream/ByteArrayStreamInput.java | 4 +--- .../stream/VersionCheckingStreamOutput.java | 3 +-- .../common/logging/ECSJsonLayout.java | 3 +-- .../common/logging/ESJsonLayout.java | 3 +-- .../common/metrics/Counters.java | 3 +-- .../settings/LocallyMountedSecrets.java | 3 +-- .../common/util/BytesRefArray.java | 4 +--- .../common/util/LongObjectPagedHashMap.java | 3 +-- .../elasticsearch/env/NodeEnvironment.java | 5 ++-- .../env/ShardLockObtainFailedException.java | 4 +--- .../gateway/GatewayMetaState.java | 9 ++++---- .../index/codec/PerFieldMapperCodec.java | 3 +-- .../index/mapper/BinaryFieldMapper.java | 3 +-- .../index/mapper/BooleanFieldMapper.java | 3 +-- .../index/mapper/DateFieldMapper.java | 13 +++++------ .../index/mapper/GeoPointFieldMapper.java | 3 +-- .../index/mapper/IpFieldMapper.java | 3 +-- .../index/mapper/KeywordFieldMapper.java | 5 ++-- .../index/mapper/NumberFieldMapper.java | 3 +-- .../index/mapper/TextFieldMapper.java | 5 ++-- .../query/CombinedFieldsQueryBuilder.java | 3 +-- .../index/query/MultiMatchQueryBuilder.java | 4 +--- .../index/query/QueryShardException.java | 3 +-- .../index/query/QueryStringQueryBuilder.java | 3 +-- .../index/query/SimpleQueryStringBuilder.java | 3 +-- .../shard/IndexShardRecoveryException.java | 3 +-- .../index/shard/ShardNotFoundException.java | 3 +-- .../BlobStoreIndexShardSnapshot.java | 3 +-- .../index/translog/TranslogException.java | 3 +-- .../indices/AliasFilterParsingException.java | 3 +-- .../indices/IndexClosedException.java | 3 +-- .../indices/IndexCreationException.java | 3 +-- ...ndexPrimaryShardNotAllocatedException.java | 3 +-- .../indices/InvalidAliasNameException.java | 3 +-- .../indices/InvalidIndexNameException.java | 4 +--- .../indices/TypeMissingException.java | 5 +--- .../indices/analysis/HunspellService.java | 3 +-- .../cache/IndicesFieldDataCache.java | 3 +-- .../RecoverFilesRecoveryException.java | 3 +-- .../RecoveryCommitTooNewException.java | 3 +-- .../indices/store/IndicesStore.java | 3 +-- .../DeDuplicatingTokenFilter.java | 3 +-- .../uhighlight/CustomUnifiedHighlighter.java | 3 +-- .../monitor/fs/FsHealthService.java | 3 +-- .../PersistentTasksClusterService.java | 3 +-- .../decider/EnableAssignmentDecider.java | 3 +-- .../repositories/SnapshotIndexCommit.java | 3 +-- .../org/elasticsearch/rest/RestResponse.java | 3 +-- .../indices/AliasesNotFoundException.java | 3 +-- .../script/field/WriteField.java | 23 +++++++++---------- .../aggregations/AggregatorFactories.java | 3 +-- .../GeoHashGridAggregationBuilder.java | 3 +-- .../GeoTileGridAggregationBuilder.java | 3 +-- .../bucket/global/GlobalAggregator.java | 3 +-- .../terms/MapStringTermsAggregator.java | 3 +-- .../bucket/terms/NumericTermsAggregator.java | 3 +-- .../metrics/PercentilesConfig.java | 6 ++--- .../metrics/ValueCountAggregator.java | 3 +-- .../search/dfs/DfsSearchResult.java | 4 +--- .../search/fetch/FetchPhase.java | 3 +-- .../subphase/highlight/HighlightBuilder.java | 6 ++--- .../search/internal/LegacyReaderContext.java | 3 +-- .../search/sort/FieldSortBuilder.java | 3 +-- .../search/sort/ScoreSortBuilder.java | 4 +--- .../elasticsearch/search/suggest/Suggest.java | 3 +-- .../InternalSnapshotsInfoService.java | 3 +-- .../snapshots/RestoreService.java | 3 +-- .../snapshots/SnapshotShardsService.java | 3 +-- .../snapshots/SnapshotsService.java | 3 +-- .../index/mapper/ObjectMapperMergeTests.java | 3 +-- .../SkipStartingWithDigitTokenFilter.java | 3 +-- .../indices/RestGetIndicesActionTests.java | 3 +-- .../RestPutIndexTemplateActionTests.java | 3 +-- .../document/RestDeleteActionTests.java | 3 +-- .../action/document/RestGetActionTests.java | 3 +-- .../document/RestGetSourceActionTests.java | 3 +-- .../action/document/RestIndexActionTests.java | 3 +-- .../document/RestMultiGetActionTests.java | 4 +--- .../RestMultiTermVectorsActionTests.java | 3 +-- .../document/RestTermVectorsActionTests.java | 3 +-- .../document/RestUpdateActionTests.java | 3 +-- .../action/search/RestCountActionTests.java | 3 +-- .../action/search/RestExplainActionTests.java | 3 +-- .../search/RestMultiSearchActionTests.java | 3 +-- .../action/search/RestSearchActionTests.java | 3 +-- .../AbstractCoordinatorTestCase.java | 3 +-- .../CoordinationStateTestCluster.java | 3 +-- .../elasticsearch/test/BackgroundIndexer.java | 3 +-- .../test/rest/RestActionTestCase.java | 3 +-- .../local/DefaultLocalClusterSpecBuilder.java | 3 +-- .../HistoBackedHistogramAggregator.java | 3 +-- .../metrics/HistoBackedAvgAggregator.java | 3 +-- .../metrics/HistoBackedMaxAggregator.java | 3 +-- .../metrics/HistoBackedMinAggregator.java | 3 +-- .../metrics/HistoBackedSumAggregator.java | 3 +-- .../HistoBackedValueCountAggregator.java | 3 +-- .../rate/TimeSeriesRateAggregator.java | 17 +++++++------- .../ccr/action/ShardFollowTaskCleaner.java | 3 +-- .../ccr/action/ShardFollowTasksExecutor.java | 3 +-- .../core/ccr/action/PutFollowAction.java | 3 +-- .../DeleteDataFrameAnalyticsAction.java | 3 +-- .../action/GetDataFrameAnalyticsAction.java | 4 +--- .../core/ml/action/GetDatafeedsAction.java | 3 +-- .../core/ml/action/GetFiltersAction.java | 4 +--- .../xpack/core/ml/action/GetJobsAction.java | 3 +-- .../ml/action/GetTrainedModelsAction.java | 3 +-- .../action/GetTrainedModelsStatsAction.java | 4 +--- .../action/StopDataFrameAnalyticsAction.java | 5 ++-- .../classification/Classification.java | 3 +-- .../outlierdetection/OutlierDetection.java | 3 +-- .../evaluation/regression/Regression.java | 3 +-- .../assignment/TrainedModelAssignment.java | 3 +-- .../ml/inference/trainedmodel/tree/Tree.java | 3 +-- .../core/ml/job/config/AnalysisConfig.java | 3 +-- .../process/autodetect/state/DataCounts.java | 3 +-- .../ml/job/results/ForecastRequestStats.java | 3 +-- .../privilege/ActionClusterPrivilege.java | 3 +-- .../xpack/core/ssl/SslSettingsLoader.java | 3 +-- .../termsenum/action/TermsEnumRequest.java | 4 +--- .../structurefinder/TextStructure.java | 3 +-- .../action/GetTransformStatsAction.java | 7 +++--- .../action/ScheduleNowTransformAction.java | 3 +-- .../transform/action/StopTransformAction.java | 3 +-- .../action/UpdateTransformAction.java | 3 +-- .../transform/transforms/TransformConfig.java | 4 +--- .../transforms/TransformConfigUpdate.java | 3 +-- .../core/watcher/common/stats/Counters.java | 4 +--- .../watcher/transform/TransformRegistry.java | 3 +-- .../transform/chain/ChainTransform.java | 3 +-- .../AggregateMetricFieldValueFetcher.java | 5 ++-- .../function/EqlFunctionRegistry.java | 3 +-- .../function/scalar/math/ToNumber.java | 3 +-- .../function/scalar/string/Between.java | 5 ++-- .../function/scalar/string/IndexOf.java | 5 ++-- .../function/scalar/string/Match.java | 4 +--- .../function/scalar/string/Substring.java | 5 ++-- .../eql/plugin/TransportEqlSearchAction.java | 3 +-- .../elasticsearch/compute/data/DocVector.java | 3 +-- .../function/EsqlFunctionRegistry.java | 3 +-- .../function/UnsupportedAttribute.java | 3 +-- .../function/scalar/conditional/Case.java | 3 +-- .../rest/action/RestGraphActionTests.java | 3 +-- .../AggregateDoubleMetricFieldMapper.java | 3 +-- .../unsignedlong/UnsignedLongFieldMapper.java | 9 ++++---- .../xpack/ml/MlInitializationService.java | 3 +-- .../MlAutoscalingDeciderService.java | 4 +--- .../NodeFakeAvailabilityZoneMapper.java | 1 - .../NodeRealAvailabilityZoneMapper.java | 1 - .../monitoring/cleaner/CleanerService.java | 1 - .../exporter/local/LocalExporter.java | 3 +-- .../exporter/http/HttpResourceTests.java | 3 +-- .../http/NodeFailureListenerTests.java | 3 +-- .../http/WatcherExistsHttpResourceTests.java | 3 +-- ...ShardsOnInvalidLicenseClusterListener.java | 3 +-- .../Lucene60MetadataOnlyPointsReader.java | 3 +-- .../xpack/ql/expression/AttributeMap.java | 5 ++-- .../xpack/ql/expression/ExpressionSet.java | 3 +-- .../fulltext/StringQueryPredicate.java | 3 +-- .../xpack/ql/index/RemoteClusterResolver.java | 3 +-- .../security/UnstableLocalStateSecurity.java | 3 +-- .../support/DummyUsernamePasswordRealm.java | 3 +-- .../xpack/shutdown/NodeSeenService.java | 3 +-- .../xpack/slm/SlmHealthIndicatorService.java | 3 +-- .../GeoShapeWithDocValuesFieldMapper.java | 5 ++-- .../geogrid/GeoHexGridAggregationBuilder.java | 3 +-- .../xpack/sql/proto/RequestInfo.java | 4 +--- .../xpack/sql/analysis/analyzer/Analyzer.java | 3 +-- .../function/SqlFunctionRegistry.java | 3 +-- .../xpack/sql/plan/logical/Pivot.java | 3 +-- .../sql/plugin/TransportSqlQueryAction.java | 3 +-- .../attachment/ReportingAttachmentParser.java | 3 +-- .../trigger/schedule/support/DayTimes.java | 4 +--- .../trigger/schedule/support/MonthTimes.java | 3 +-- .../trigger/schedule/support/YearTimes.java | 3 +-- .../WatchExecutionContextMockBuilder.java | 3 +-- .../wildcard/mapper/WildcardFieldMapper.java | 7 +++--- .../example/realm/CustomRoleMappingRealm.java | 3 +-- 207 files changed, 257 insertions(+), 487 deletions(-) diff --git a/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java b/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java index 61d42e5db708..5ec0d129b8f9 100644 --- a/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java +++ b/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java @@ -29,9 +29,8 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class SslConfigurationLoaderTests extends ESTestCase { +public final class SslConfigurationLoaderTests extends ESTestCase { - @SuppressWarnings("this-escape") private final Path certRoot = getDataPath("/certs/ca1/ca.crt").getParent().getParent(); private Settings settings; diff --git a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java index fe9b1f673f71..37bdf37ce51a 100644 --- a/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java +++ b/libs/tdigest/src/main/java/org/elasticsearch/tdigest/Centroid.java @@ -26,7 +26,7 @@ /** * A single centroid which represents a number of data points. */ -public class Centroid implements Comparable { +public final class Centroid implements Comparable { private static final AtomicInteger uniqueCount = new AtomicInteger(1); private double centroid = 0; @@ -40,19 +40,16 @@ private Centroid() { id = uniqueCount.getAndIncrement(); } - @SuppressWarnings("this-escape") public Centroid(double x) { this(); start(x, 1, uniqueCount.getAndIncrement()); } - @SuppressWarnings("this-escape") public Centroid(double x, long w) { this(); start(x, w, uniqueCount.getAndIncrement()); } - @SuppressWarnings("this-escape") public Centroid(double x, long w, int id) { this(); start(x, w, id); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java index 6a3ee1feb73f..73669ccacdbc 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java @@ -39,7 +39,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -public class CustomMustacheFactory extends DefaultMustacheFactory { +public final class CustomMustacheFactory extends DefaultMustacheFactory { static final String V7_JSON_MEDIA_TYPE_WITH_CHARSET = "application/json; charset=UTF-8"; static final String JSON_MEDIA_TYPE_WITH_CHARSET = "application/json;charset=utf-8"; static final String JSON_MEDIA_TYPE = "application/json"; @@ -63,7 +63,6 @@ public class CustomMustacheFactory extends DefaultMustacheFactory { private final Encoder encoder; - @SuppressWarnings("this-escape") public CustomMustacheFactory(String mediaType) { super(); setObjectHandler(new CustomReflectionObjectHandler()); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java index c234ea58c7ea..fffa5295522a 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java @@ -23,8 +23,7 @@ import java.util.List; import java.util.Map; -public class RestMultiSearchTemplateActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestMultiSearchTemplateActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java index bc0a5f87e25d..4e30d87b6a17 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java @@ -21,8 +21,7 @@ import java.util.List; import java.util.Map; -public class RestSearchTemplateActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestSearchTemplateActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java index 15f9798abe88..ce63bcba0345 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java @@ -26,7 +26,7 @@ /** * Request to perform a search ranking evaluation. */ -public class RankEvalRequest extends ActionRequest implements IndicesRequest.Replaceable { +public final class RankEvalRequest extends ActionRequest implements IndicesRequest.Replaceable { private RankEvalSpec rankingEvaluationSpec; @@ -35,7 +35,6 @@ public class RankEvalRequest extends ActionRequest implements IndicesRequest.Rep private SearchType searchType = SearchType.DEFAULT; - @SuppressWarnings("this-escape") public RankEvalRequest(RankEvalSpec rankingEvaluationSpec, String[] indices) { this.rankingEvaluationSpec = Objects.requireNonNull(rankingEvaluationSpec, "ranking evaluation specification must not be null"); indices(indices); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java index f99a22cbac6e..982d1afcf6dd 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TransportRankEvalActionTests.java @@ -30,10 +30,9 @@ import static org.mockito.Mockito.mock; -public class TransportRankEvalActionTests extends ESTestCase { +public final class TransportRankEvalActionTests extends ESTestCase { - @SuppressWarnings("this-escape") - private Settings settings = Settings.builder() + private final Settings settings = Settings.builder() .put("path.home", createTempDir().toString()) .put("node.name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java index 8e1cfb309a67..fdd98992503d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java @@ -23,9 +23,8 @@ import java.util.List; import java.util.Map; -public class RestDeleteByQueryActionTests extends RestActionTestCase { +public final class RestDeleteByQueryActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java index 7222b5efe9c8..889c8d0091c8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java @@ -23,9 +23,8 @@ import java.util.List; import java.util.Map; -public class RestUpdateByQueryActionTests extends RestActionTestCase { +public final class RestUpdateByQueryActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java index d1fefd425ae7..54f82b2366d1 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java @@ -310,7 +310,7 @@ public static void restartElasticsearch(Shell sh, Installation installation) thr * when instantiated, and advancing that cursor when the {@code clear()} * method is called. */ - public static class JournaldWrapper { + public static final class JournaldWrapper { private Shell sh; private String cursor; @@ -318,7 +318,6 @@ public static class JournaldWrapper { * Create a new wrapper for Elasticsearch JournalD logs. * @param sh A shell with appropriate permissions. */ - @SuppressWarnings("this-escape") public JournaldWrapper(Shell sh) { this.sh = sh; clear(); diff --git a/server/src/main/java/org/elasticsearch/action/NoShardAvailableActionException.java b/server/src/main/java/org/elasticsearch/action/NoShardAvailableActionException.java index bb4eb6c202b7..e018cf48fcef 100644 --- a/server/src/main/java/org/elasticsearch/action/NoShardAvailableActionException.java +++ b/server/src/main/java/org/elasticsearch/action/NoShardAvailableActionException.java @@ -16,7 +16,7 @@ import java.io.IOException; import java.io.PrintWriter; -public class NoShardAvailableActionException extends ElasticsearchException { +public final class NoShardAvailableActionException extends ElasticsearchException { private static final StackTraceElement[] EMPTY_STACK_TRACE = new StackTraceElement[0]; @@ -28,22 +28,18 @@ public static NoShardAvailableActionException forOnShardFailureWrapper(String ms return new NoShardAvailableActionException(null, msg, null, true); } - @SuppressWarnings("this-escape") public NoShardAvailableActionException(ShardId shardId) { this(shardId, null, null, false); } - @SuppressWarnings("this-escape") public NoShardAvailableActionException(ShardId shardId, String msg) { this(shardId, msg, null, false); } - @SuppressWarnings("this-escape") public NoShardAvailableActionException(ShardId shardId, String msg, Throwable cause) { this(shardId, msg, cause, false); } - @SuppressWarnings("this-escape") private NoShardAvailableActionException(ShardId shardId, String msg, Throwable cause, boolean onShardFailureWrapper) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/action/RoutingMissingException.java b/server/src/main/java/org/elasticsearch/action/RoutingMissingException.java index a90bc14f9ac8..0999e7154b05 100644 --- a/server/src/main/java/org/elasticsearch/action/RoutingMissingException.java +++ b/server/src/main/java/org/elasticsearch/action/RoutingMissingException.java @@ -18,11 +18,10 @@ import java.io.IOException; import java.util.Objects; -public class RoutingMissingException extends ElasticsearchException { +public final class RoutingMissingException extends ElasticsearchException { private final String id; - @SuppressWarnings("this-escape") public RoutingMissingException(String index, String id) { super("routing is required for [" + index + "]/[" + id + "]"); Objects.requireNonNull(index, "index must not be null"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java index 0cf0baa75a8d..ebf01feaaa89 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -20,7 +20,7 @@ /** * A request to get node (cluster) level information. */ -public class NodesInfoRequest extends BaseNodesRequest { +public final class NodesInfoRequest extends BaseNodesRequest { private final NodesInfoMetrics nodesInfoMetrics; @@ -39,7 +39,6 @@ public NodesInfoRequest(StreamInput in) throws IOException { * Get information from nodes based on the nodes ids specified. If none are passed, information * for all nodes will be returned. */ - @SuppressWarnings("this-escape") public NodesInfoRequest(String... nodesIds) { super(nodesIds); nodesInfoMetrics = new NodesInfoMetrics(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index 39205715dca8..6f6253491c58 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -20,7 +20,9 @@ import java.io.IOException; import java.util.Objects; -public class ClusterSearchShardsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { +public final class ClusterSearchShardsRequest extends MasterNodeReadRequest + implements + IndicesRequest.Replaceable { private String[] indices = Strings.EMPTY_ARRAY; @Nullable @@ -31,7 +33,6 @@ public class ClusterSearchShardsRequest extends MasterNodeReadRequest { + public static final class Request extends SingleShardRequest { private String[] text; private String analyzer; @@ -91,7 +91,6 @@ public Request() {} * * @param index The text to analyze */ - @SuppressWarnings("this-escape") public Request(String index) { this.index(index); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java index d2df8e20f99e..444246de7a1b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java @@ -163,7 +163,7 @@ public void markShardCopyAsStaleIfNeeded( } } - public static class ShardRequest extends ReplicationRequest { + public static final class ShardRequest extends ReplicationRequest { private final ClusterBlock clusterBlock; @@ -175,7 +175,6 @@ public static class ShardRequest extends ReplicationRequest { phase1 = in.readBoolean(); } - @SuppressWarnings("this-escape") public ShardRequest(final ShardId shardId, final ClusterBlock clusterBlock, final boolean phase1, final TaskId parentTaskId) { super(shardId); this.clusterBlock = Objects.requireNonNull(clusterBlock); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java index aec5718b31a8..eca4f6a1463b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java @@ -157,7 +157,7 @@ public void markShardCopyAsStaleIfNeeded( } } - public static class ShardRequest extends ReplicationRequest { + public static final class ShardRequest extends ReplicationRequest { private final ClusterBlock clusterBlock; @@ -166,7 +166,6 @@ public static class ShardRequest extends ReplicationRequest { clusterBlock = new ClusterBlock(in); } - @SuppressWarnings("this-escape") public ShardRequest(final ShardId shardId, final ClusterBlock clusterBlock, final TaskId parentTaskId) { super(shardId); this.clusterBlock = Objects.requireNonNull(clusterBlock); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java index bccc7a8f7e24..391ac532a0c3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java @@ -24,7 +24,7 @@ * The SHARD_LEVEL flags are for stat fields that can be calculated at the shard level and then may be later aggregated at the index level * along with index-level flag stat fields (e.g., Mappings). */ -public class CommonStatsFlags implements Writeable, Cloneable { +public final class CommonStatsFlags implements Writeable, Cloneable { public static final CommonStatsFlags ALL = new CommonStatsFlags().all(); public static final CommonStatsFlags SHARD_LEVEL = new CommonStatsFlags().all().set(Flag.Mappings, false); @@ -40,7 +40,6 @@ public class CommonStatsFlags implements Writeable, Cloneable { /** * @param flags flags to set. If no flags are supplied, default flags will be set. */ - @SuppressWarnings("this-escape") public CommonStatsFlags(Flag... flags) { if (flags.length > 0) { clear(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 64505d76e26b..0505f41b2759 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -29,7 +29,7 @@ *

* The request requires the query to be set using {@link #query(QueryBuilder)} */ -public class ValidateQueryRequest extends BroadcastRequest implements ToXContentObject { +public final class ValidateQueryRequest extends BroadcastRequest implements ToXContentObject { public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.fromOptions(false, false, true, false); @@ -65,7 +65,6 @@ public ValidateQueryRequest(StreamInput in) throws IOException { * Constructs a new validate request against the provided indices. No indices provided means it will * run against all indices. */ - @SuppressWarnings("this-escape") public ValidateQueryRequest(String... indices) { super(indices); indicesOptions(DEFAULT_INDICES_OPTIONS); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java index f3473f274bf3..bd929b9a2204 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java @@ -24,7 +24,10 @@ import java.io.IOException; import java.util.Set; -public class BulkShardRequest extends ReplicatedWriteRequest implements Accountable, RawIndexingDataTransportRequest { +public final class BulkShardRequest extends ReplicatedWriteRequest + implements + Accountable, + RawIndexingDataTransportRequest { private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(BulkShardRequest.class); @@ -35,7 +38,6 @@ public BulkShardRequest(StreamInput in) throws IOException { items = in.readArray(i -> i.readOptionalWriteable(inpt -> new BulkItemRequest(shardId, inpt)), BulkItemRequest[]::new); } - @SuppressWarnings("this-escape") public BulkShardRequest(ShardId shardId, RefreshPolicy refreshPolicy, BulkItemRequest[] items) { super(shardId); this.items = items; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 487a5c565399..b6a9179b1e95 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -455,7 +455,7 @@ public String toString() { * and how many of them were skipped and further details in a Map of Cluster objects * (when doing a cross-cluster search). */ - public static class Clusters implements ToXContentFragment, Writeable { + public static final class Clusters implements ToXContentFragment, Writeable { public static final Clusters EMPTY = new Clusters(0, 0, 0); @@ -538,7 +538,6 @@ public Clusters(int total, int successful, int skipped) { this.clusterInfo = Collections.emptyMap(); // will never be used if created from this constructor } - @SuppressWarnings("this-escape") public Clusters(StreamInput in) throws IOException { this.total = in.readVInt(); int successfulTemp = in.readVInt(); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationFailedException.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationFailedException.java index 9cfe0a1f1b99..b1594bf5ba93 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationFailedException.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationFailedException.java @@ -20,7 +20,7 @@ * * */ -public class BroadcastShardOperationFailedException extends ElasticsearchException implements ElasticsearchWrapperException { +public final class BroadcastShardOperationFailedException extends ElasticsearchException implements ElasticsearchWrapperException { public BroadcastShardOperationFailedException(ShardId shardId, String msg) { this(shardId, msg, null); @@ -30,7 +30,6 @@ public BroadcastShardOperationFailedException(ShardId shardId, Throwable cause) this(shardId, "", cause); } - @SuppressWarnings("this-escape") public BroadcastShardOperationFailedException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java index 1f347ec2b8ca..1604ff81603a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java @@ -661,13 +661,11 @@ public interface ReplicaResponse { } - public static class RetryOnPrimaryException extends ElasticsearchException { - @SuppressWarnings("this-escape") + public static final class RetryOnPrimaryException extends ElasticsearchException { public RetryOnPrimaryException(ShardId shardId, String msg) { this(shardId, msg, null); } - @SuppressWarnings("this-escape") RetryOnPrimaryException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 411f23a0fc0a..0abe7ad678dc 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -605,9 +605,8 @@ protected Releasable checkReplicaLimits(final ReplicaRequest request) { return () -> {}; } - public static class RetryOnReplicaException extends ElasticsearchException { + public static final class RetryOnReplicaException extends ElasticsearchException { - @SuppressWarnings("this-escape") public RetryOnReplicaException(ShardId shardId, String msg) { super(msg); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index 56edc5117a28..650b9db7f3d6 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -50,7 +50,7 @@ */ // It's not possible to suppress teh warning at #realtime(boolean) at a method-level. @SuppressWarnings("unchecked") -public class TermVectorsRequest extends SingleShardRequest implements RealtimeRequest { +public final class TermVectorsRequest extends SingleShardRequest implements RealtimeRequest { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TermVectorsRequest.class); private static final ParseField INDEX = new ParseField("_index"); @@ -79,7 +79,7 @@ public class TermVectorsRequest extends SingleShardRequest i private long version = Versions.MATCH_ANY; - protected String preference; + private String preference; private static final AtomicInteger randomInt = new AtomicInteger(0); @@ -204,7 +204,6 @@ public TermVectorsRequest(TermVectorsRequest other) { this.filterSettings = other.filterSettings(); } - @SuppressWarnings("this-escape") public TermVectorsRequest(MultiGetRequest.Item item) { super(item.index()); this.id = item.id(); diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 8bcb6a28fb50..0f84ecab5f8b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -841,9 +841,8 @@ public String toString() { } } - public static class NoLongerPrimaryShardException extends ElasticsearchException { + public static final class NoLongerPrimaryShardException extends ElasticsearchException { - @SuppressWarnings("this-escape") public NoLongerPrimaryShardException(ShardId shardId, String msg) { super(msg); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java index 86e5d6739fcb..feb0543aad62 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java @@ -61,7 +61,7 @@ * considering a follower to be faulty, to allow for a brief network partition or a long GC cycle to occur without triggering the removal of * a node and the consequent shard reallocation. */ -public class FollowersChecker { +public final class FollowersChecker { private static final Logger logger = LogManager.getLogger(FollowersChecker.class); @@ -105,7 +105,6 @@ public class FollowersChecker { private final NodeHealthService nodeHealthService; private volatile FastResponseState fastResponseState; - @SuppressWarnings("this-escape") public FollowersChecker( Settings settings, TransportService transportService, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java index 7a40d7fd774d..35b7d957bf07 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java @@ -224,7 +224,7 @@ public String toString() { } } - public static class Builder { + public static final class Builder { private static final Set VALID_FIELDS = Set.of("order", "mappings", "settings", "index_patterns", "aliases", "version"); @@ -248,7 +248,6 @@ public Builder(String name) { aliases = new HashMap<>(); } - @SuppressWarnings("this-escape") public Builder(IndexTemplateMetadata indexTemplateMetadata) { this.name = indexTemplateMetadata.name(); order(indexTemplateMetadata.order()); diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index e6ed24dc7220..74de0b2e03e6 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -25,10 +25,10 @@ import java.io.IOException; import java.util.Locale; -public class GeoPoint implements SpatialPoint, ToXContentFragment { +public final class GeoPoint implements SpatialPoint, ToXContentFragment { - protected double lat; - protected double lon; + private double lat; + private double lon; public GeoPoint() {} @@ -38,7 +38,6 @@ public GeoPoint() {} * * @param value String to create the point from */ - @SuppressWarnings("this-escape") public GeoPoint(String value) { this.resetFromString(value); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/CreationException.java b/server/src/main/java/org/elasticsearch/common/inject/CreationException.java index f09248de947e..78f89e95e5ff 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/CreationException.java +++ b/server/src/main/java/org/elasticsearch/common/inject/CreationException.java @@ -27,13 +27,12 @@ * * @author crazybob@google.com (Bob Lee) */ -public class CreationException extends RuntimeException { +public final class CreationException extends RuntimeException { private final Collection messages; /** * Creates a CreationException containing {@code messages}. */ - @SuppressWarnings("this-escape") public CreationException(Collection messages) { this.messages = messages; if (this.messages.isEmpty()) { diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java index c7e9a4abf2c5..478ae231e16f 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/ByteArrayStreamInput.java @@ -17,18 +17,16 @@ * Resettable {@link StreamInput} that wraps a byte array. It is heavily inspired in Lucene's * {@link org.apache.lucene.store.ByteArrayDataInput}. */ -public class ByteArrayStreamInput extends StreamInput { +public final class ByteArrayStreamInput extends StreamInput { private byte[] bytes; private int pos; private int limit; - @SuppressWarnings("this-escape") public ByteArrayStreamInput() { reset(BytesRef.EMPTY_BYTES); } - @SuppressWarnings("this-escape") public ByteArrayStreamInput(byte[] bytes) { reset(bytes); } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutput.java index 6a02bedcdf08..42fb7f4a6afe 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/VersionCheckingStreamOutput.java @@ -17,9 +17,8 @@ * This {@link StreamOutput} writes nowhere. It can be used to check if serialization would * be successful writing to a specific version. */ -public class VersionCheckingStreamOutput extends StreamOutput { +public final class VersionCheckingStreamOutput extends StreamOutput { - @SuppressWarnings("this-escape") public VersionCheckingStreamOutput(TransportVersion version) { setTransportVersion(version); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java b/server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java index 54b5749b797f..93ca7a9615be 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java @@ -32,14 +32,13 @@ public static ECSJsonLayout.Builder newBuilder() { return new ECSJsonLayout.Builder().asBuilder(); } - public static class Builder extends AbstractStringLayout.Builder + public static final class Builder extends AbstractStringLayout.Builder implements org.apache.logging.log4j.core.util.Builder { @PluginAttribute("dataset") String dataset; - @SuppressWarnings("this-escape") public Builder() { setCharset(StandardCharsets.UTF_8); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESJsonLayout.java b/server/src/main/java/org/elasticsearch/common/logging/ESJsonLayout.java index fb7475e3cba5..a5272b8074d7 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESJsonLayout.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESJsonLayout.java @@ -147,7 +147,7 @@ PatternLayout getPatternLayout() { return patternLayout; } - public static class Builder> extends AbstractStringLayout.Builder + public static final class Builder> extends AbstractStringLayout.Builder implements org.apache.logging.log4j.core.util.Builder { @@ -163,7 +163,6 @@ public static class Builder> extends AbstractS @PluginConfiguration private Configuration config; - @SuppressWarnings("this-escape") public Builder() { setCharset(StandardCharsets.UTF_8); } diff --git a/server/src/main/java/org/elasticsearch/common/metrics/Counters.java b/server/src/main/java/org/elasticsearch/common/metrics/Counters.java index 665ed371955c..9606fc768759 100644 --- a/server/src/main/java/org/elasticsearch/common/metrics/Counters.java +++ b/server/src/main/java/org/elasticsearch/common/metrics/Counters.java @@ -28,11 +28,10 @@ * that will not have conflicts, which means that there no counter will have a label which is a substring of the label of another counter. * For example, the counters `foo: 1` and `foo.bar: 3` cannot co-exist in a nested map. */ -public class Counters implements Writeable { +public final class Counters implements Writeable { private final ConcurrentMap counters = new ConcurrentHashMap<>(); - @SuppressWarnings("this-escape") public Counters(StreamInput in) throws IOException { int numCounters = in.readVInt(); for (int i = 0; i < numCounters; i++) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java index 1ac3db3827eb..b3639079cc92 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java +++ b/server/src/main/java/org/elasticsearch/common/settings/LocallyMountedSecrets.java @@ -65,7 +65,7 @@ * } * } */ -public class LocallyMountedSecrets implements SecureSettings { +public final class LocallyMountedSecrets implements SecureSettings { public static final String SECRETS_FILE_NAME = "secrets.json"; public static final String SECRETS_DIRECTORY = "secrets"; @@ -116,7 +116,6 @@ public class LocallyMountedSecrets implements SecureSettings { /** * Direct constructor to be used by the CLI */ - @SuppressWarnings("this-escape") public LocallyMountedSecrets(Environment environment) { var secretsDirPath = resolveSecretsDir(environment); var secretsFilePath = resolveSecretsFile(environment); diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java index 91dbfc30123f..c78db448380b 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java @@ -22,7 +22,7 @@ /** * Compact serializable container for ByteRefs */ -public class BytesRefArray implements Accountable, Releasable, Writeable { +public final class BytesRefArray implements Accountable, Releasable, Writeable { // base size of the bytes ref array private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArray.class); @@ -32,7 +32,6 @@ public class BytesRefArray implements Accountable, Releasable, Writeable { private ByteArray bytes; private long size; - @SuppressWarnings("this-escape") public BytesRefArray(long capacity, BigArrays bigArrays) { this.bigArrays = bigArrays; boolean success = false; @@ -49,7 +48,6 @@ public BytesRefArray(long capacity, BigArrays bigArrays) { size = 0; } - @SuppressWarnings("this-escape") public BytesRefArray(StreamInput in, BigArrays bigArrays) throws IOException { this.bigArrays = bigArrays; // we allocate big arrays so we have to `close` if we fail here or we'll leak them. diff --git a/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java b/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java index f54500a806cc..860060ca5a34 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongObjectPagedHashMap.java @@ -17,7 +17,7 @@ * A hash table from native longs to objects. This implementation resolves collisions * using open-addressing and does not support null values. This class is not thread-safe. */ -public class LongObjectPagedHashMap extends AbstractPagedHashMap implements Iterable> { +public final class LongObjectPagedHashMap extends AbstractPagedHashMap implements Iterable> { private LongArray keys; private ObjectArray values; @@ -26,7 +26,6 @@ public LongObjectPagedHashMap(long capacity, BigArrays bigArrays) { this(capacity, DEFAULT_MAX_LOAD_FACTOR, bigArrays); } - @SuppressWarnings("this-escape") public LongObjectPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { super(capacity, maxLoadFactor, bigArrays); boolean success = false; diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index cc685b26ce23..0380bb80e001 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -199,7 +199,7 @@ public String toString() { */ static final String SEARCHABLE_SHARED_CACHE_FILE = "shared_snapshot_cache"; - public static class NodeLock implements Releasable { + public static final class NodeLock implements Releasable { private final Lock[] locks; private final DataPath[] dataPaths; @@ -213,7 +213,6 @@ public NodeLock(final Logger logger, final Environment environment, final Checke * Tries to acquire a node lock for a node id, throws {@code IOException} if it is unable to acquire it * @param pathFunction function to check node path before attempt of acquiring a node lock */ - @SuppressWarnings("this-escape") public NodeLock( final Logger logger, final Environment environment, @@ -990,7 +989,7 @@ private final class InternalShardLock { lockDetails = Tuple.tuple(System.nanoTime(), details); } - protected void release() { + private void release() { mutex.release(); decWaitCount(); } diff --git a/server/src/main/java/org/elasticsearch/env/ShardLockObtainFailedException.java b/server/src/main/java/org/elasticsearch/env/ShardLockObtainFailedException.java index 001b35ab11cb..4aab8b91a073 100644 --- a/server/src/main/java/org/elasticsearch/env/ShardLockObtainFailedException.java +++ b/server/src/main/java/org/elasticsearch/env/ShardLockObtainFailedException.java @@ -17,15 +17,13 @@ /** * Exception used when the in-memory lock for a shard cannot be obtained */ -public class ShardLockObtainFailedException extends ElasticsearchException { +public final class ShardLockObtainFailedException extends ElasticsearchException { - @SuppressWarnings("this-escape") public ShardLockObtainFailedException(ShardId shardId, String message) { super(buildMessage(shardId, message)); this.setShard(shardId); } - @SuppressWarnings("this-escape") public ShardLockObtainFailedException(ShardId shardId, String message, Throwable cause) { super(buildMessage(shardId, message), cause); this.setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index a7cf7299a850..1a81f3053012 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -495,7 +495,7 @@ boolean allPendingAsyncStatesWritten() { /** * Encapsulates the incremental writing of metadata to a {@link PersistedClusterStateService.Writer}. */ - public static class LucenePersistedState implements PersistedState { + public static final class LucenePersistedState implements PersistedState { private long currentTerm; private ClusterState lastAcceptedState; @@ -505,7 +505,6 @@ public static class LucenePersistedState implements PersistedState { private final AtomicReference persistenceWriter = new AtomicReference<>(); private boolean writeNextStateFully; - @SuppressWarnings("this-escape") public LucenePersistedState( PersistedClusterStateService persistedClusterStateService, long currentTerm, @@ -526,7 +525,7 @@ public LucenePersistedState( persistenceWriter.set(writer); } - protected void maybeWriteInitialState(long currentTerm, ClusterState lastAcceptedState, PersistedClusterStateService.Writer writer) + private void maybeWriteInitialState(long currentTerm, ClusterState lastAcceptedState, PersistedClusterStateService.Writer writer) throws IOException { try { writer.writeFullStateAndCommit(currentTerm, lastAcceptedState); @@ -556,7 +555,7 @@ public void setCurrentTerm(long currentTerm) { this.currentTerm = currentTerm; } - protected void writeCurrentTermToDisk(long currentTerm) { + private void writeCurrentTermToDisk(long currentTerm) { try { if (writeNextStateFully) { getWriterSafe().writeFullStateAndCommit(currentTerm, lastAcceptedState); @@ -584,7 +583,7 @@ public void setLastAcceptedState(ClusterState clusterState) { lastAcceptedState = clusterState; } - protected void writeClusterStateToDisk(ClusterState clusterState) { + private void writeClusterStateToDisk(ClusterState clusterState) { try { if (writeNextStateFully) { getWriterSafe().writeFullStateAndCommit(currentTerm, clusterState); diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index df1aca3dc7b5..ee2cb06cb955 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -37,7 +37,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public class PerFieldMapperCodec extends Lucene95Codec { +public final class PerFieldMapperCodec extends Lucene95Codec { private final MapperService mapperService; private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); @@ -49,7 +49,6 @@ public class PerFieldMapperCodec extends Lucene95Codec { : "PerFieldMapperCodec must subclass the latest lucene codec: " + Lucene.LATEST_CODEC; } - @SuppressWarnings("this-escape") public PerFieldMapperCodec(Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.mapperService = mapperService; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index 0457a23d8510..403156c95540 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -191,11 +191,10 @@ protected String contentType() { return CONTENT_TYPE; } - public static class CustomBinaryDocValuesField extends CustomDocValuesField { + public static final class CustomBinaryDocValuesField extends CustomDocValuesField { private final List bytesList; - @SuppressWarnings("this-escape") public CustomBinaryDocValuesField(String name, byte[] bytes) { super(name); bytesList = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 1fb3f706c56a..a5793df3b82e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -68,7 +68,7 @@ private static BooleanFieldMapper toType(FieldMapper in) { return (BooleanFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter docValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); @@ -93,7 +93,6 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; - @SuppressWarnings("this-escape") public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalformedByDefault, IndexVersion indexCreatedVersion) { super(name); this.scriptCompiler = Objects.requireNonNull(scriptCompiler); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 21b9ec04c56c..9d12fc6910d6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -223,7 +223,7 @@ private static DateFieldMapper toType(FieldMapper in) { return (DateFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter index = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter docValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -253,7 +253,6 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; private final ScriptCompiler scriptCompiler; - @SuppressWarnings("this-escape") public Builder( String name, Resolution resolution, @@ -396,11 +395,11 @@ public DateFieldMapper build(MapperBuilderContext context) { }, MINIMUM_COMPATIBILITY_VERSION); public static final class DateFieldType extends MappedFieldType { - protected final DateFormatter dateTimeFormatter; - protected final DateMathParser dateMathParser; - protected final Resolution resolution; - protected final String nullValue; - protected final FieldValues scriptValues; + final DateFormatter dateTimeFormatter; + final DateMathParser dateMathParser; + private final Resolution resolution; + private final String nullValue; + private final FieldValues scriptValues; private final boolean pointsMetadataAvailable; public DateFieldType( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index f97817570838..10e24fbeebb8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -76,7 +76,7 @@ private static GeoPointFieldMapper toType(FieldMapper in) { return (GeoPointFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { final Parameter> ignoreMalformed; final Parameter> ignoreZValue = ignoreZValueParam(m -> builder(m).ignoreZValue.get()); @@ -94,7 +94,6 @@ public static class Builder extends FieldMapper.Builder { private final Parameter dimension; // can only support time_series_dimension: false private final IndexMode indexMode; // either STANDARD or TIME_SERIES - @SuppressWarnings("this-escape") public Builder( String name, ScriptCompiler scriptCompiler, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 7d6b7711360f..80fd384f15fb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -69,7 +69,7 @@ private static IpFieldMapper toType(FieldMapper in) { return (IpFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -89,7 +89,6 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; private final ScriptCompiler scriptCompiler; - @SuppressWarnings("this-escape") public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalformedByDefault, IndexVersion indexCreatedVersion) { super(name); this.scriptCompiler = Objects.requireNonNull(scriptCompiler); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 9bc3db22365d..f15bb0069570 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -137,7 +137,7 @@ private static KeywordFieldMapper toType(FieldMapper in) { return (KeywordFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -184,7 +184,6 @@ public static class Builder extends FieldMapper.Builder { private final ScriptCompiler scriptCompiler; private final IndexVersion indexCreatedVersion; - @SuppressWarnings("this-escape") public Builder(String name, IndexAnalyzers indexAnalyzers, ScriptCompiler scriptCompiler, IndexVersion indexCreatedVersion) { super(name); this.indexAnalyzers = indexAnalyzers; @@ -1008,7 +1007,7 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { return syntheticFieldLoader(simpleName()); } - protected SourceLoader.SyntheticFieldLoader syntheticFieldLoader(String simpleName) { + SourceLoader.SyntheticFieldLoader syntheticFieldLoader(String simpleName) { if (hasScript()) { return SourceLoader.SyntheticFieldLoader.NOTHING; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 7dfc5a98037d..84e9e84fb8ce 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -89,7 +89,7 @@ private static NumberFieldMapper toType(FieldMapper in) { private static final IndexVersion MINIMUM_COMPATIBILITY_VERSION = IndexVersion.fromId(5000099); - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter indexed; private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -143,7 +143,6 @@ public static Builder docValuesOnly(String name, NumberType type, IndexVersion i return builder; } - @SuppressWarnings("this-escape") public Builder( String name, NumberType type, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 91616041f65f..1949249b9be2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -87,7 +87,7 @@ import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; /** A {@link FieldMapper} for full-text fields. */ -public class TextFieldMapper extends FieldMapper { +public final class TextFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "text"; private static final String FAST_PHRASE_SUFFIX = "._index_phrase"; @@ -1155,8 +1155,7 @@ public Query existsQuery(SearchExecutionContext context) { private final SubFieldInfo prefixFieldInfo; private final SubFieldInfo phraseFieldInfo; - @SuppressWarnings("this-escape") - protected TextFieldMapper( + private TextFieldMapper( String simpleName, FieldType fieldType, TextFieldType mappedFieldType, diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index 2533b5b61106..b869096c12fb 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -50,7 +50,7 @@ * A query that matches on multiple text fields, as if the field contents had been indexed * into a single combined field. */ -public class CombinedFieldsQueryBuilder extends AbstractQueryBuilder { +public final class CombinedFieldsQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "combined_fields"; private static final ParseField QUERY_FIELD = new ParseField("query"); @@ -109,7 +109,6 @@ public class CombinedFieldsQueryBuilder extends AbstractQueryBuilder { +public final class MultiMatchQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "multi_match"; private static final String CUTOFF_FREQUENCY_DEPRECATION_MSG = "cutoff_freqency is not supported." @@ -185,7 +185,6 @@ public MultiMatchQueryBuilder.Type getType() { /** * Constructs a new text query. */ - @SuppressWarnings("this-escape") public MultiMatchQueryBuilder(Object value, String... fields) { if (value == null) { throw new IllegalArgumentException("[" + NAME + "] requires query value"); @@ -203,7 +202,6 @@ public MultiMatchQueryBuilder(Object value, String... fields) { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public MultiMatchQueryBuilder(StreamInput in) throws IOException { super(in); value = in.readGenericValue(); diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java index 0b0b35b61953..fd290e56f7e4 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java @@ -18,7 +18,7 @@ /** * Exception that is thrown when creating lucene queries on the shard */ -public class QueryShardException extends ElasticsearchException { +public final class QueryShardException extends ElasticsearchException { public QueryShardException(QueryRewriteContext context, String msg, Object... args) { this(context, msg, null, args); @@ -32,7 +32,6 @@ public QueryShardException(QueryRewriteContext context, String msg, Throwable ca * This constructor is provided for use in unit tests where a * {@link SearchExecutionContext} may not be available */ - @SuppressWarnings("this-escape") public QueryShardException(Index index, String msg, Throwable cause, Object... args) { super(msg, cause, args); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 195e1d51c806..38ca88e8a937 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -45,7 +45,7 @@ * (using {@link #field(String)}), will run the parsed query against the provided fields, and combine * them using Dismax. */ -public class QueryStringQueryBuilder extends AbstractQueryBuilder { +public final class QueryStringQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "query_string"; @@ -153,7 +153,6 @@ public QueryStringQueryBuilder(String queryString) { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public QueryStringQueryBuilder(StreamInput in) throws IOException { super(in); queryString = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index 4f6ba803eb7a..b2067549fab6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -69,7 +69,7 @@ * "https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html" * > online documentation. */ -public class SimpleQueryStringBuilder extends AbstractQueryBuilder { +public final class SimpleQueryStringBuilder extends AbstractQueryBuilder { /** Default for using lenient query parsing.*/ public static final boolean DEFAULT_LENIENT = false; @@ -142,7 +142,6 @@ public SimpleQueryStringBuilder(String queryText) { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public SimpleQueryStringBuilder(StreamInput in) throws IOException { super(in); queryText = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShardRecoveryException.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShardRecoveryException.java index d03c1c2db06e..38ad67da2417 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShardRecoveryException.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShardRecoveryException.java @@ -13,8 +13,7 @@ import java.io.IOException; -public class IndexShardRecoveryException extends ElasticsearchException { - @SuppressWarnings("this-escape") +public final class IndexShardRecoveryException extends ElasticsearchException { public IndexShardRecoveryException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java b/server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java index fa66d8fe86ad..8c9ab0e30b28 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java @@ -13,7 +13,7 @@ import java.io.IOException; -public class ShardNotFoundException extends ResourceNotFoundException { +public final class ShardNotFoundException extends ResourceNotFoundException { public ShardNotFoundException(ShardId shardId) { this(shardId, null); } @@ -26,7 +26,6 @@ public ShardNotFoundException(ShardId shardId, String msg, Object... args) { this(shardId, msg, null, args); } - @SuppressWarnings("this-escape") public ShardNotFoundException(ShardId shardId, String msg, Throwable ex, Object... args) { super(msg, ex, args); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java index 5b67f58f06a3..2a8fe96151c1 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java @@ -38,7 +38,7 @@ public class BlobStoreIndexShardSnapshot implements ToXContentFragment { /** * Information about snapshotted file */ - public static class FileInfo implements Writeable { + public static final class FileInfo implements Writeable { public static final String SERIALIZE_WRITER_UUID = "serialize_writer_uuid"; private final String name; @@ -55,7 +55,6 @@ public static class FileInfo implements Writeable { * @param metadata the files meta data * @param partSize size of the single chunk */ - @SuppressWarnings("this-escape") public FileInfo(String name, StoreFileMetadata metadata, @Nullable ByteSizeValue partSize) { this.name = Objects.requireNonNull(name); this.metadata = metadata; diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogException.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogException.java index 000b36b299fd..78b9b6424ece 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogException.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogException.java @@ -14,13 +14,12 @@ import java.io.IOException; -public class TranslogException extends ElasticsearchException { +public final class TranslogException extends ElasticsearchException { public TranslogException(ShardId shardId, String msg) { this(shardId, msg, null); } - @SuppressWarnings("this-escape") public TranslogException(ShardId shardId, String msg, Throwable cause) { super(msg, cause); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java b/server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java index 371444b6a72d..744f8c2ed303 100644 --- a/server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java +++ b/server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java @@ -14,9 +14,8 @@ import java.io.IOException; -public class AliasFilterParsingException extends ElasticsearchException { +public final class AliasFilterParsingException extends ElasticsearchException { - @SuppressWarnings("this-escape") public AliasFilterParsingException(Index index, String name, String desc, Throwable ex) { super("[" + name + "], " + desc, ex); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/IndexClosedException.java b/server/src/main/java/org/elasticsearch/indices/IndexClosedException.java index 4c4035dbc635..f64e6758130b 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndexClosedException.java +++ b/server/src/main/java/org/elasticsearch/indices/IndexClosedException.java @@ -18,9 +18,8 @@ /** * Exception indicating that one or more requested indices are closed. */ -public class IndexClosedException extends ElasticsearchException { +public final class IndexClosedException extends ElasticsearchException { - @SuppressWarnings("this-escape") public IndexClosedException(Index index) { super("closed"); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/IndexCreationException.java b/server/src/main/java/org/elasticsearch/indices/IndexCreationException.java index a38bcc8ae02d..4558c5a7bc2b 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndexCreationException.java +++ b/server/src/main/java/org/elasticsearch/indices/IndexCreationException.java @@ -14,9 +14,8 @@ import java.io.IOException; -public class IndexCreationException extends ElasticsearchException implements ElasticsearchWrapperException { +public final class IndexCreationException extends ElasticsearchException implements ElasticsearchWrapperException { - @SuppressWarnings("this-escape") public IndexCreationException(String index, Throwable cause) { super("failed to create index [{}]", cause, index); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java b/server/src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java index 819378bae0fe..1591ae4428fe 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java +++ b/server/src/main/java/org/elasticsearch/indices/IndexPrimaryShardNotAllocatedException.java @@ -19,12 +19,11 @@ * Thrown when some action cannot be performed because the primary shard of * some shard group in an index has not been allocated post api action. */ -public class IndexPrimaryShardNotAllocatedException extends ElasticsearchException { +public final class IndexPrimaryShardNotAllocatedException extends ElasticsearchException { public IndexPrimaryShardNotAllocatedException(StreamInput in) throws IOException { super(in); } - @SuppressWarnings("this-escape") public IndexPrimaryShardNotAllocatedException(Index index) { super("primary not allocated post api"); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java b/server/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java index 4b06757652f7..b9d25b83d24e 100644 --- a/server/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java +++ b/server/src/main/java/org/elasticsearch/indices/InvalidAliasNameException.java @@ -15,9 +15,8 @@ import java.io.IOException; -public class InvalidAliasNameException extends ElasticsearchException { +public final class InvalidAliasNameException extends ElasticsearchException { - @SuppressWarnings("this-escape") public InvalidAliasNameException(Index index, String name, String desc) { super("Invalid alias name [{}], {}", name, desc); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java b/server/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java index fec791364cf9..7a1e1c8cede4 100644 --- a/server/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java +++ b/server/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java @@ -15,15 +15,13 @@ import java.io.IOException; -public class InvalidIndexNameException extends ElasticsearchException { +public final class InvalidIndexNameException extends ElasticsearchException { - @SuppressWarnings("this-escape") public InvalidIndexNameException(String name, String desc) { super("Invalid index name [" + name + "], " + desc); setIndex(name); } - @SuppressWarnings("this-escape") public InvalidIndexNameException(Index index, String name, String desc) { super("Invalid index name [" + name + "], " + desc); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/TypeMissingException.java b/server/src/main/java/org/elasticsearch/indices/TypeMissingException.java index c53b72cbe380..ab4c43397b12 100644 --- a/server/src/main/java/org/elasticsearch/indices/TypeMissingException.java +++ b/server/src/main/java/org/elasticsearch/indices/TypeMissingException.java @@ -16,21 +16,18 @@ import java.io.IOException; import java.util.Arrays; -public class TypeMissingException extends ElasticsearchException { +public final class TypeMissingException extends ElasticsearchException { - @SuppressWarnings("this-escape") public TypeMissingException(Index index, String... types) { super("type" + Arrays.toString(types) + " missing"); setIndex(index); } - @SuppressWarnings("this-escape") public TypeMissingException(Index index, Throwable cause, String... types) { super("type" + Arrays.toString(types) + " missing", cause); setIndex(index); } - @SuppressWarnings("this-escape") public TypeMissingException(String index, String... types) { super("type[" + Arrays.toString(types) + "] missing"); setIndex(index); diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 36a89f4c0d40..d692c331927d 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -65,7 +65,7 @@ * * @see org.elasticsearch.index.analysis.HunspellTokenFilterFactory */ -public class HunspellService { +public final class HunspellService { private static final Logger logger = LogManager.getLogger(HunspellService.class); @@ -89,7 +89,6 @@ public class HunspellService { private final Path hunspellDir; private final Function loadingFunction; - @SuppressWarnings("this-escape") public HunspellService(final Settings settings, final Environment env, final Map knownDictionaries) throws IOException { this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); diff --git a/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 76de0d43b7f2..c77cf15a2b2e 100644 --- a/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -39,7 +39,7 @@ import java.util.List; import java.util.function.ToLongBiFunction; -public class IndicesFieldDataCache implements RemovalListener, Releasable { +public final class IndicesFieldDataCache implements RemovalListener, Releasable { private static final Logger logger = LogManager.getLogger(IndicesFieldDataCache.class); @@ -51,7 +51,6 @@ public class IndicesFieldDataCache implements RemovalListener cache; - @SuppressWarnings("this-escape") public IndicesFieldDataCache(Settings settings, IndexFieldDataCache.Listener indicesFieldDataCacheListener) { this.indicesFieldDataCacheListener = indicesFieldDataCacheListener; final long sizeInBytes = INDICES_FIELDDATA_CACHE_SIZE_KEY.get(settings).getBytes(); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java index 1a40f7526240..87f491a598d5 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java @@ -18,13 +18,12 @@ import java.io.IOException; import java.util.Objects; -public class RecoverFilesRecoveryException extends ElasticsearchException implements ElasticsearchWrapperException { +public final class RecoverFilesRecoveryException extends ElasticsearchException implements ElasticsearchWrapperException { private final int numberOfFiles; private final ByteSizeValue totalFilesSize; - @SuppressWarnings("this-escape") public RecoverFilesRecoveryException(ShardId shardId, int numberOfFiles, ByteSizeValue totalFilesSize, Throwable cause) { super("Failed to transfer [{}] files with total size of [{}]", cause, numberOfFiles, totalFilesSize); Objects.requireNonNull(totalFilesSize, "totalFilesSize must not be null"); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCommitTooNewException.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCommitTooNewException.java index d89a429dc853..c42f88c9b843 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCommitTooNewException.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCommitTooNewException.java @@ -14,8 +14,7 @@ import java.io.IOException; -public class RecoveryCommitTooNewException extends ElasticsearchException { - @SuppressWarnings("this-escape") +public final class RecoveryCommitTooNewException extends ElasticsearchException { public RecoveryCommitTooNewException(ShardId shardId, String message) { super(message); setShard(shardId); diff --git a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 6c32ebd491ed..d631c7a11d10 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -64,7 +64,7 @@ import static org.elasticsearch.core.Strings.format; -public class IndicesStore implements ClusterStateListener, Closeable { +public final class IndicesStore implements ClusterStateListener, Closeable { private static final Logger logger = LogManager.getLogger(IndicesStore.class); @@ -88,7 +88,6 @@ public class IndicesStore implements ClusterStateListener, Closeable { private final TimeValue deleteShardTimeout; - @SuppressWarnings("this-escape") @Inject public IndicesStore( Settings settings, diff --git a/server/src/main/java/org/elasticsearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilter.java b/server/src/main/java/org/elasticsearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilter.java index e51470edf523..8b6b87ea2e73 100644 --- a/server/src/main/java/org/elasticsearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilter.java +++ b/server/src/main/java/org/elasticsearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilter.java @@ -36,8 +36,7 @@ * results or are output (the {@link DuplicateSequenceAttribute} attribute can * be used to inspect the number of prior sightings when emitDuplicates is true) */ -public class DeDuplicatingTokenFilter extends FilteringTokenFilter { - @SuppressWarnings("this-escape") +public final class DeDuplicatingTokenFilter extends FilteringTokenFilter { private final DuplicateSequenceAttribute seqAtt = addAttribute(DuplicateSequenceAttribute.class); private final boolean emitDuplicates; static final MurmurHash3.Hash128 seed = new MurmurHash3.Hash128(); diff --git a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 838dff420777..d5c3bdbbc65c 100644 --- a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -50,7 +50,7 @@ * value as a discrete passage for highlighting (unless the whole content needs to be highlighted). * Supports both returning empty snippets and non highlighted snippets when no highlighting can be performed. */ -public class CustomUnifiedHighlighter extends UnifiedHighlighter { +public final class CustomUnifiedHighlighter extends UnifiedHighlighter { public static final char MULTIVAL_SEP_CHAR = (char) 0; private static final Snippet[] EMPTY_SNIPPET = new Snippet[0]; @@ -79,7 +79,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { * offset source for it because it'd be super slow * @param weightMatchesEnabled whether the {@link HighlightFlag#WEIGHT_MATCHES} should be enabled */ - @SuppressWarnings("this-escape") public CustomUnifiedHighlighter( Builder builder, OffsetSource offsetSource, diff --git a/server/src/main/java/org/elasticsearch/monitor/fs/FsHealthService.java b/server/src/main/java/org/elasticsearch/monitor/fs/FsHealthService.java index 16d44b572cf8..de26d23a149e 100644 --- a/server/src/main/java/org/elasticsearch/monitor/fs/FsHealthService.java +++ b/server/src/main/java/org/elasticsearch/monitor/fs/FsHealthService.java @@ -42,7 +42,7 @@ /** * Runs periodically and attempts to create a temp file to see if the filesystem is writable. If not then it marks the path as unhealthy. */ -public class FsHealthService extends AbstractLifecycleComponent implements NodeHealthService { +public final class FsHealthService extends AbstractLifecycleComponent implements NodeHealthService { private static final Logger logger = LogManager.getLogger(FsHealthService.class); @@ -82,7 +82,6 @@ public class FsHealthService extends AbstractLifecycleComponent implements NodeH Setting.Property.Dynamic ); - @SuppressWarnings("this-escape") public FsHealthService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, NodeEnvironment nodeEnv) { this.threadPool = threadPool; this.enabled = ENABLED_SETTING.get(settings); diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 38d67efa734b..ba7b4bb51d9c 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -46,7 +46,7 @@ /** * Component that runs only on the master node and is responsible for assigning running tasks to nodes */ -public class PersistentTasksClusterService implements ClusterStateListener, Closeable { +public final class PersistentTasksClusterService implements ClusterStateListener, Closeable { public static final Setting CLUSTER_TASKS_ALLOCATION_RECHECK_INTERVAL_SETTING = Setting.timeSetting( "cluster.persistent_tasks.allocation.recheck_interval", @@ -65,7 +65,6 @@ public class PersistentTasksClusterService implements ClusterStateListener, Clos private final PeriodicRechecker periodicRechecker; private final AtomicBoolean reassigningTasks = new AtomicBoolean(false); - @SuppressWarnings("this-escape") public PersistentTasksClusterService( Settings settings, PersistentTasksExecutorRegistry registry, diff --git a/server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java b/server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java index ebabec42ef11..ae600dfda39a 100644 --- a/server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java +++ b/server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java @@ -28,7 +28,7 @@ * * @see Allocation */ -public class EnableAssignmentDecider { +public final class EnableAssignmentDecider { public static final Setting CLUSTER_TASKS_ALLOCATION_ENABLE_SETTING = new Setting<>( "cluster.persistent_tasks.allocation.enable", @@ -41,7 +41,6 @@ public class EnableAssignmentDecider { private volatile Allocation enableAssignment; - @SuppressWarnings("this-escape") public EnableAssignmentDecider(final Settings settings, final ClusterSettings clusterSettings) { this.enableAssignment = CLUSTER_TASKS_ALLOCATION_ENABLE_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(CLUSTER_TASKS_ALLOCATION_ENABLE_SETTING, this::setEnableAssignment); diff --git a/server/src/main/java/org/elasticsearch/repositories/SnapshotIndexCommit.java b/server/src/main/java/org/elasticsearch/repositories/SnapshotIndexCommit.java index 43594aa6047e..b041f51afa6d 100644 --- a/server/src/main/java/org/elasticsearch/repositories/SnapshotIndexCommit.java +++ b/server/src/main/java/org/elasticsearch/repositories/SnapshotIndexCommit.java @@ -19,13 +19,12 @@ * A (closeable) {@link IndexCommit} plus ref-counting to keep track of active users, and with the facility to drop the "main" initial ref * early if the shard snapshot is aborted. */ -public class SnapshotIndexCommit extends AbstractRefCounted { +public final class SnapshotIndexCommit extends AbstractRefCounted { private final Engine.IndexCommitRef commitRef; private final Runnable releaseInitialRef; private final SubscribableListener completionListeners = new SubscribableListener<>(); - @SuppressWarnings("this-escape") public SnapshotIndexCommit(Engine.IndexCommitRef commitRef) { this.commitRef = commitRef; this.releaseInitialRef = new RunOnce(this::decRef); diff --git a/server/src/main/java/org/elasticsearch/rest/RestResponse.java b/server/src/main/java/org/elasticsearch/rest/RestResponse.java index 73b24b21e546..55adc67bf18e 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/RestResponse.java @@ -36,7 +36,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER; -public class RestResponse { +public final class RestResponse { public static final String TEXT_CONTENT_TYPE = "text/plain; charset=UTF-8"; @@ -111,7 +111,6 @@ public RestResponse(RestChannel channel, Exception e) throws IOException { this(channel, ExceptionsHelper.status(e), e); } - @SuppressWarnings("this-escape") public RestResponse(RestChannel channel, RestStatus status, Exception e) throws IOException { this.status = status; ToXContent.Params params = channel.request(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/AliasesNotFoundException.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/AliasesNotFoundException.java index 4ebe5350e055..e46468205da6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/AliasesNotFoundException.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/AliasesNotFoundException.java @@ -13,9 +13,8 @@ import java.io.IOException; import java.util.Arrays; -public class AliasesNotFoundException extends ResourceNotFoundException { +public final class AliasesNotFoundException extends ResourceNotFoundException { - @SuppressWarnings("this-escape") public AliasesNotFoundException(String... names) { super("aliases " + Arrays.toString(names) + " missing"); this.setResources("aliases", names); diff --git a/server/src/main/java/org/elasticsearch/script/field/WriteField.java b/server/src/main/java/org/elasticsearch/script/field/WriteField.java index 6a50434b4004..a420c8c7c085 100644 --- a/server/src/main/java/org/elasticsearch/script/field/WriteField.java +++ b/server/src/main/java/org/elasticsearch/script/field/WriteField.java @@ -23,16 +23,15 @@ import java.util.function.Predicate; import java.util.function.Supplier; -public class WriteField implements Field { - protected String path; - protected Supplier> rootSupplier; +public final class WriteField implements Field { + private String path; + private Supplier> rootSupplier; - protected Map container; - protected String leaf; + private Map container; + private String leaf; private static final Object MISSING = new Object(); - @SuppressWarnings("this-escape") public WriteField(String path, Supplier> rootSupplier) { this.path = path; this.rootSupplier = rootSupplier; @@ -501,7 +500,7 @@ public NestedDocument doc(int index) { * If there is a value that is not a List or a Map, {@throws IllegalStateException}. */ @SuppressWarnings("unchecked") - protected List> getDocsAsList() { + private List> getDocsAsList() { Object value = get(MISSING); if (value == MISSING) { return null; @@ -604,7 +603,7 @@ public void remove() { * Change the path and clear the existing resolution by setting {@link #leaf} and {@link #container} to null. * Caller needs to re-resolve after this call. */ - protected void setPath(String path) { + private void setPath(String path) { this.path = path; this.leaf = null; this.container = null; @@ -613,7 +612,7 @@ protected void setPath(String path) { /** * Get the path to a leaf or create it if one does not exist. */ - protected void setLeaf() { + private void setLeaf() { if (leaf == null) { resolveDepthFlat(); } @@ -636,7 +635,7 @@ protected void setLeaf() { * {@link #container} and {@link #leaf} and non-null if resolved. */ @SuppressWarnings("unchecked") - protected void resolveDepthFlat() { + private void resolveDepthFlat() { container = rootSupplier.get(); int index = path.indexOf('.'); @@ -670,7 +669,7 @@ protected void resolveDepthFlat() { * @throws IllegalArgumentException if a non-leaf segment maps to a non-Map Object. */ @SuppressWarnings("unchecked") - protected void createDepth() { + private void createDepth() { container = rootSupplier.get(); String[] segments = path.split("\\."); @@ -692,7 +691,7 @@ protected void createDepth() { leaf = segments[segments.length - 1]; } - protected String typeName(Object value) { + private String typeName(Object value) { return value != null ? value.getClass().getName() : "null"; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index 1d12b3339065..98c131213b3f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -274,7 +274,7 @@ public Aggregator[] createSubAggregators(Aggregator parent, CardinalityUpperBoun * A mutable collection of {@link AggregationBuilder}s and * {@link PipelineAggregationBuilder}s. */ - public static class Builder implements Writeable, ToXContentObject { + public static final class Builder implements Writeable, ToXContentObject { private final Set names = new HashSet<>(); // Using LinkedHashSets to preserve the order of insertion, that makes the results @@ -290,7 +290,6 @@ public Builder() {} /** * Read from a stream. */ - @SuppressWarnings("this-escape") public Builder(StreamInput in) throws IOException { int factoriesSize = in.readVInt(); for (int i = 0; i < factoriesSize; i++) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java index d51f14a516bc..e0edebd7e520 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java @@ -26,7 +26,7 @@ import java.io.IOException; import java.util.Map; -public class GeoHashGridAggregationBuilder extends GeoGridAggregationBuilder { +public final class GeoHashGridAggregationBuilder extends GeoGridAggregationBuilder { public static final String NAME = "geohash_grid"; public static final int DEFAULT_PRECISION = 5; public static final int DEFAULT_MAX_NUM_CELLS = 10000; @@ -41,7 +41,6 @@ public class GeoHashGridAggregationBuilder extends GeoGridAggregationBuilder { GeoHashGridAggregationBuilder::new ); - @SuppressWarnings("this-escape") public GeoHashGridAggregationBuilder(String name) { super(name); precision(DEFAULT_PRECISION); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java index 76286fc1605a..b7532bdcb4e5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java @@ -25,7 +25,7 @@ import java.io.IOException; import java.util.Map; -public class GeoTileGridAggregationBuilder extends GeoGridAggregationBuilder { +public final class GeoTileGridAggregationBuilder extends GeoGridAggregationBuilder { public static final String NAME = "geotile_grid"; public static final int DEFAULT_PRECISION = 7; private static final int DEFAULT_MAX_NUM_CELLS = 10000; @@ -40,7 +40,6 @@ public class GeoTileGridAggregationBuilder extends GeoGridAggregationBuilder { GeoTileGridAggregationBuilder::new ); - @SuppressWarnings("this-escape") public GeoTileGridAggregationBuilder(String name) { super(name); precision(DEFAULT_PRECISION); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 2062d7dd7394..3beec89853b7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -24,10 +24,9 @@ import java.io.IOException; import java.util.Map; -public class GlobalAggregator extends BucketsAggregator implements SingleBucketAggregator { +public final class GlobalAggregator extends BucketsAggregator implements SingleBucketAggregator { private final Weight weight; - @SuppressWarnings("this-escape") public GlobalAggregator(String name, AggregatorFactories subFactories, AggregationContext context, Map metadata) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index a26507413128..e4650ad9fddd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -47,13 +47,12 @@ * An aggregator of string values that hashes the strings on the fly rather * than up front like the {@link GlobalOrdinalsStringTermsAggregator}. */ -public class MapStringTermsAggregator extends AbstractStringTermsAggregator { +public final class MapStringTermsAggregator extends AbstractStringTermsAggregator { private final CollectorSource collectorSource; private final ResultStrategy resultStrategy; private final BytesKeyedBucketOrds bucketOrds; private final IncludeExclude.StringFilter includeExclude; - @SuppressWarnings("this-escape") public MapStringTermsAggregator( String name, AggregatorFactories factories, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index b0d60962300b..96d81aad86c4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -45,13 +45,12 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; -public class NumericTermsAggregator extends TermsAggregator { +public final class NumericTermsAggregator extends TermsAggregator { private final ResultStrategy resultStrategy; private final ValuesSource.Numeric valuesSource; private final LongKeyedBucketOrds bucketOrds; private final LongFilter longFilter; - @SuppressWarnings("this-escape") public NumericTermsAggregator( String name, AggregatorFactories factories, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java index 92ff5cfb09c0..b8402208673d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java @@ -120,7 +120,7 @@ public int hashCode() { return Objects.hash(method); } - public static class TDigest extends PercentilesConfig { + public static final class TDigest extends PercentilesConfig { static final double DEFAULT_COMPRESSION = 100.0; private double compression; @@ -134,7 +134,6 @@ public TDigest(double compression) { this(compression, null); } - @SuppressWarnings("this-escape") public TDigest(double compression, TDigestExecutionHint executionHint) { super(PercentilesMethod.TDIGEST); this.executionHint = executionHint; @@ -281,7 +280,7 @@ public int hashCode() { } } - public static class Hdr extends PercentilesConfig { + public static final class Hdr extends PercentilesConfig { static final int DEFAULT_NUMBER_SIG_FIGS = 3; private int numberOfSignificantValueDigits; @@ -289,7 +288,6 @@ public Hdr() { this(DEFAULT_NUMBER_SIG_FIGS); } - @SuppressWarnings("this-escape") public Hdr(int numberOfSignificantValueDigits) { super(PercentilesMethod.HDR); setNumberOfSignificantValueDigits(numberOfSignificantValueDigits); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java index e0c11530541e..ae14b4601b55 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java @@ -31,14 +31,13 @@ * This aggregator works in a multi-bucket mode, that is, when serves as a sub-aggregator, a single aggregator instance aggregates the * counts for all buckets owned by the parent aggregator) */ -public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue { +public final class ValueCountAggregator extends NumericMetricsAggregator.SingleValue { final ValuesSource valuesSource; // a count per bucket LongArray counts; - @SuppressWarnings("this-escape") public ValueCountAggregator( String name, ValuesSourceConfig valuesSourceConfig, diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index e72a0361e9db..3269de91da60 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -26,7 +26,7 @@ import java.util.List; import java.util.Map; -public class DfsSearchResult extends SearchPhaseResult { +public final class DfsSearchResult extends SearchPhaseResult { private static final Term[] EMPTY_TERMS = new Term[0]; private static final TermStatistics[] EMPTY_TERM_STATS = new TermStatistics[0]; @@ -37,7 +37,6 @@ public class DfsSearchResult extends SearchPhaseResult { private int maxDoc; private SearchProfileDfsPhaseResult searchProfileDfsPhaseResult; - @SuppressWarnings("this-escape") public DfsSearchResult(StreamInput in) throws IOException { super(in); contextId = new ShardSearchContextId(in); @@ -70,7 +69,6 @@ public DfsSearchResult(StreamInput in) throws IOException { } } - @SuppressWarnings("this-escape") public DfsSearchResult(ShardSearchContextId contextId, SearchShardTarget shardTarget, ShardSearchRequest shardSearchRequest) { this.setSearchShardTarget(shardTarget); this.contextId = contextId; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 2aeb36d75de6..5a04404c2e38 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -46,12 +46,11 @@ * Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified * after reducing all of the matches returned by the query phase */ -public class FetchPhase { +public final class FetchPhase { private static final Logger LOGGER = LogManager.getLogger(FetchPhase.class); private final FetchSubPhase[] fetchSubPhases; - @SuppressWarnings("this-escape") public FetchPhase(List fetchSubPhases) { this.fetchSubPhases = fetchSubPhases.toArray(new FetchSubPhase[fetchSubPhases.size() + 1]); this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsPhase(this); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java index cace74f4189f..7d371ac37277 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java @@ -44,7 +44,7 @@ * * @see org.elasticsearch.search.builder.SearchSourceBuilder#highlight() */ -public class HighlightBuilder extends AbstractHighlighterBuilder { +public final class HighlightBuilder extends AbstractHighlighterBuilder { /** default for whether to highlight fields based on the source even if stored separately */ public static final boolean DEFAULT_FORCE_SOURCE = false; /** default for whether a field should be highlighted only if a query matches that field */ @@ -124,7 +124,6 @@ public HighlightBuilder(HighlightBuilder template, QueryBuilder highlightQuery, /** * Read from a stream. */ - @SuppressWarnings("this-escape") public HighlightBuilder(StreamInput in) throws IOException { super(in); encoder(in.readOptionalString()); @@ -445,7 +444,7 @@ public HighlightBuilder rewrite(QueryRewriteContext ctx) throws IOException { } - public static class Field extends AbstractHighlighterBuilder { + public static final class Field extends AbstractHighlighterBuilder { static final NamedObjectParser PARSER; static { ObjectParser parser = new ObjectParser<>("highlight_field"); @@ -475,7 +474,6 @@ private Field(Field template, QueryBuilder builder) { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public Field(StreamInput in) throws IOException { super(in); name = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java b/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java index bbd626e05d1c..adf7b797bc5b 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java @@ -16,7 +16,7 @@ import java.util.Objects; -public class LegacyReaderContext extends ReaderContext { +public final class LegacyReaderContext extends ReaderContext { private final ShardSearchRequest shardSearchRequest; private final ScrollContext scrollContext; private final Engine.Searcher searcher; @@ -24,7 +24,6 @@ public class LegacyReaderContext extends ReaderContext { private AggregatedDfs aggregatedDfs; private RescoreDocIds rescoreDocIds; - @SuppressWarnings("this-escape") public LegacyReaderContext( ShardSearchContextId id, IndexService indexService, diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 618de8c6f06f..b8dc104c0731 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -61,7 +61,7 @@ /** * A sort builder to sort based on a document field. */ -public class FieldSortBuilder extends SortBuilder { +public final class FieldSortBuilder extends SortBuilder { public static final String NAME = "field_sort"; public static final ParseField MISSING = new ParseField("missing"); @@ -101,7 +101,6 @@ public class FieldSortBuilder extends SortBuilder { private String format; /** Copy constructor. */ - @SuppressWarnings("this-escape") public FieldSortBuilder(FieldSortBuilder template) { this(template.fieldName); this.order(template.order()); diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 88eaadcec513..5d11563b5d8e 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -29,7 +29,7 @@ /** * A sort builder allowing to sort by score. */ -public class ScoreSortBuilder extends SortBuilder { +public final class ScoreSortBuilder extends SortBuilder { public static final String NAME = "_score"; private static final SortFieldAndFormat SORT_SCORE = new SortFieldAndFormat( @@ -44,7 +44,6 @@ public class ScoreSortBuilder extends SortBuilder { /** * Build a ScoreSortBuilder default to descending sort order. */ - @SuppressWarnings("this-escape") public ScoreSortBuilder() { // order defaults to desc when sorting on the _score order(SortOrder.DESC); @@ -53,7 +52,6 @@ public ScoreSortBuilder() { /** * Read from a stream. */ - @SuppressWarnings("this-escape") public ScoreSortBuilder(StreamInput in) throws IOException { order(SortOrder.readFromStream(in)); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java index 657fc5a898b9..f126091c785d 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -44,7 +44,7 @@ /** * Top level suggest result, containing the result for each suggestion. */ -public class Suggest implements Iterable>>, Writeable, ToXContentFragment { +public final class Suggest implements Iterable>>, Writeable, ToXContentFragment { public static final String NAME = "suggest"; @@ -61,7 +61,6 @@ public class Suggest implements Iterable>> suggestMap; - @SuppressWarnings("this-escape") public Suggest(List>> suggestions) { // we sort suggestions by their names to ensure iteration over suggestions are consistent // this is needed as we need to fill in suggestion docs in SearchPhaseController#sortDocs diff --git a/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java b/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java index 29b69b545e5d..da0b0d134b0f 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/InternalSnapshotsInfoService.java @@ -41,7 +41,7 @@ import static org.elasticsearch.core.Strings.format; -public class InternalSnapshotsInfoService implements ClusterStateListener, SnapshotsInfoService { +public final class InternalSnapshotsInfoService implements ClusterStateListener, SnapshotsInfoService { public static final Setting INTERNAL_SNAPSHOT_INFO_MAX_CONCURRENT_FETCHES_SETTING = Setting.intSetting( "cluster.snapshot.info.max_concurrent_fetches", @@ -84,7 +84,6 @@ public class InternalSnapshotsInfoService implements ClusterStateListener, Snaps private final Object mutex; - @SuppressWarnings("this-escape") public InternalSnapshotsInfoService( final Settings settings, final ClusterService clusterService, diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index a0d4735a91cf..a2a4c1bd444a 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -136,7 +136,7 @@ * which removes {@link RestoreInProgress} when all shards are completed. In case of * restore failure a normal recovery fail-over process kicks in. */ -public class RestoreService implements ClusterStateApplier { +public final class RestoreService implements ClusterStateApplier { private static final Logger logger = LogManager.getLogger(RestoreService.class); @@ -190,7 +190,6 @@ public class RestoreService implements ClusterStateApplier { private volatile boolean refreshRepositoryUuidOnRestore; - @SuppressWarnings("this-escape") public RestoreService( ClusterService clusterService, RepositoriesService repositoriesService, diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index c122940f1b40..411205c5261e 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -68,7 +68,7 @@ * starting and stopping shard level snapshots. * See package level documentation of {@link org.elasticsearch.snapshots} for details. */ -public class SnapshotShardsService extends AbstractLifecycleComponent implements ClusterStateListener, IndexEventListener { +public final class SnapshotShardsService extends AbstractLifecycleComponent implements ClusterStateListener, IndexEventListener { private static final Logger logger = LogManager.getLogger(SnapshotShardsService.class); private final ClusterService clusterService; @@ -89,7 +89,6 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements // Runs the tasks that promptly notify shards of aborted snapshots so that resources can be released ASAP private final ThrottledTaskRunner notifyOnAbortTaskRunner; - @SuppressWarnings("this-escape") public SnapshotShardsService( Settings settings, ClusterService clusterService, diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 8a15572c3e7f..e6b140a3e70b 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -130,7 +130,7 @@ * deletion. * See package level documentation of {@link org.elasticsearch.snapshots} for details. */ -public class SnapshotsService extends AbstractLifecycleComponent implements ClusterStateApplier { +public final class SnapshotsService extends AbstractLifecycleComponent implements ClusterStateApplier { public static final IndexVersion SHARD_GEN_IN_REPO_DATA_VERSION = IndexVersions.V_7_6_0; @@ -200,7 +200,6 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus private volatile int maxConcurrentOperations; - @SuppressWarnings("this-escape") public SnapshotsService( Settings settings, ClusterService clusterService, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index 3de2b0a5d19a..559eb4712d7c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -13,9 +13,8 @@ import java.util.Collections; -public class ObjectMapperMergeTests extends ESTestCase { +public final class ObjectMapperMergeTests extends ESTestCase { - @SuppressWarnings("this-escape") private final RootObjectMapper rootObjectMapper = createMapping(false, true, true, false); private RootObjectMapper createMapping( diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/lucene/SkipStartingWithDigitTokenFilter.java b/server/src/test/java/org/elasticsearch/indices/analysis/lucene/SkipStartingWithDigitTokenFilter.java index 5f6ebc09f4c0..93c221a924e0 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/lucene/SkipStartingWithDigitTokenFilter.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/lucene/SkipStartingWithDigitTokenFilter.java @@ -14,9 +14,8 @@ import java.io.IOException; -public class SkipStartingWithDigitTokenFilter extends FilteringTokenFilter { +public final class SkipStartingWithDigitTokenFilter extends FilteringTokenFilter { - @SuppressWarnings("this-escape") private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); private final long asciiDigitsToSkip; diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java index 81684d749d57..85088a6031b3 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java @@ -23,8 +23,7 @@ import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.mockito.Mockito.mock; -public class RestGetIndicesActionTests extends ESTestCase { - @SuppressWarnings("this-escape") +public final class RestGetIndicesActionTests extends ESTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); /** diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java index e3a68b971ba4..4830935cafbd 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java @@ -28,8 +28,7 @@ import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.mockito.Mockito.mock; -public class RestPutIndexTemplateActionTests extends ESTestCase { - @SuppressWarnings("this-escape") +public final class RestPutIndexTemplateActionTests extends ESTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); private RestPutIndexTemplateAction action; diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java index 912cc13f49b3..d7751709302a 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java @@ -20,9 +20,8 @@ import java.util.List; import java.util.Map; -public class RestDeleteActionTests extends RestActionTestCase { +public final class RestDeleteActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java index fc11557c2ec0..dd27e3367300 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java @@ -23,8 +23,7 @@ import static org.hamcrest.Matchers.instanceOf; -public class RestGetActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestGetActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java index db859a4a15ff..c60c016acd70 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java @@ -37,12 +37,11 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class RestGetSourceActionTests extends RestActionTestCase { +public final class RestGetSourceActionTests extends RestActionTestCase { private static RestRequest request = new FakeRestRequest(); private static FakeRestChannel channel = new FakeRestChannel(request, true, 0); private static RestGetSourceResponseListener listener = new RestGetSourceResponseListener(channel, request); - @SuppressWarnings("this-escape") private final List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java index 3b3a94a54595..67c730e86819 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java @@ -37,9 +37,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class RestIndexActionTests extends RestActionTestCase { +public final class RestIndexActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); private final AtomicReference clusterStateSupplier = new AtomicReference<>(); diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java index 9e1d7e7a5306..45cfc8510876 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiGetActionTests.java @@ -28,10 +28,8 @@ import static org.hamcrest.Matchers.instanceOf; -public class RestMultiGetActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestMultiGetActionTests extends RestActionTestCase { XContentType VND_TYPE = randomVendorType(); - @SuppressWarnings("this-escape") List contentTypeHeader = Collections.singletonList(compatibleMediaType(VND_TYPE, RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java index 323861171723..e8d21ecd10ee 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java @@ -26,8 +26,7 @@ import java.util.List; import java.util.Map; -public class RestMultiTermVectorsActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestMultiTermVectorsActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java index 7702bda69561..fc40ba365e61 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java @@ -25,8 +25,7 @@ import java.util.List; import java.util.Map; -public class RestTermVectorsActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestTermVectorsActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java index e7c00ab8bb0d..a9fc1e92506f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java @@ -29,8 +29,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.mockito.Mockito.mock; -public class RestUpdateActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestUpdateActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); private RestUpdateAction action; diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java index d8a7a4a1dfe5..4498a8344eb9 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java @@ -25,9 +25,8 @@ import static org.hamcrest.Matchers.instanceOf; -public class RestCountActionTests extends RestActionTestCase { +public final class RestCountActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java index d9ae400a860f..52f5396db1c8 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java @@ -21,8 +21,7 @@ import java.util.List; import java.util.Map; -public class RestExplainActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestExplainActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); @Before diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java index 74bd423cce96..6fadb7165216 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java @@ -25,8 +25,7 @@ import java.util.List; import java.util.Map; -public class RestMultiSearchActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestMultiSearchActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); private RestMultiSearchAction action; diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 3c9f5422c30f..6d0480048982 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -28,8 +28,7 @@ import java.util.List; import java.util.Map; -public class RestSearchActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestSearchActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); private RestSearchAction action; diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 3d90a253e48c..fefc0bf428c1 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -941,7 +941,7 @@ protected long transportDelayMillis(String actionName) { return 0; } - public class ClusterNode { + public final class ClusterNode { private final Logger logger = LogManager.getLogger(ClusterNode.class); private final int nodeIndex; @@ -962,7 +962,6 @@ public class ClusterNode { private ClearableRecycler clearableRecycler; private List blackholedRegisterOperations = new ArrayList<>(); - @SuppressWarnings("this-escape") ClusterNode(int nodeIndex, boolean masterEligible, Settings nodeSettings, NodeHealthService nodeHealthService) { this( nodeIndex, diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java index 3efe729fbe36..026e1b7b975e 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java @@ -37,7 +37,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertThat; -public class CoordinationStateTestCluster { +public final class CoordinationStateTestCluster { public static ClusterState clusterState( long term, @@ -181,7 +181,6 @@ void setInitialState(CoordinationMetadata.VotingConfiguration initialConfig, lon final CoordinationMetadata.VotingConfiguration initialConfiguration; final long initialValue; - @SuppressWarnings("this-escape") public CoordinationStateTestCluster(List nodes, ElectionStrategy electionStrategy) { this.electionStrategy = electionStrategy; messages = new ArrayList<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index d23b79ed0cde..408cf7199c71 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; -public class BackgroundIndexer implements AutoCloseable { +public final class BackgroundIndexer implements AutoCloseable { private final Logger logger = LogManager.getLogger(getClass()); @@ -98,7 +98,6 @@ public BackgroundIndexer(String index, Client client, int numOfDocs, final int w * @param autoStart set to true to start indexing as soon as all threads have been created. * @param random random instance to use */ - @SuppressWarnings("this-escape") public BackgroundIndexer( final String index, final Client client, diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java index c20e1ce70e60..1229b3470775 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java @@ -74,11 +74,10 @@ protected void dispatchRequest(RestRequest request) { * By default, will throw {@link AssertionError} when any execution method is called, unless configured otherwise using * {@link #setExecuteVerifier} or {@link #setExecuteLocallyVerifier}. */ - public static class VerifyingClient extends NoOpNodeClient { + public static final class VerifyingClient extends NoOpNodeClient { AtomicReference, ActionRequest, ActionResponse>> executeVerifier = new AtomicReference<>(); AtomicReference, ActionRequest, ActionResponse>> executeLocallyVerifier = new AtomicReference<>(); - @SuppressWarnings("this-escape") public VerifyingClient(String testName) { super(testName); reset(); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java index d2a9564d05c1..4b20afcf1e8b 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterSpecBuilder.java @@ -14,9 +14,8 @@ import org.elasticsearch.test.cluster.local.distribution.SnapshotDistributionResolver; import org.elasticsearch.test.cluster.util.resource.Resource; -public class DefaultLocalClusterSpecBuilder extends AbstractLocalClusterSpecBuilder { +public final class DefaultLocalClusterSpecBuilder extends AbstractLocalClusterSpecBuilder { - @SuppressWarnings("this-escape") public DefaultLocalClusterSpecBuilder() { super(); this.apply(c -> c.systemProperty("ingest.geoip.downloader.enabled.default", "false")); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java index 88e039b6013e..04d525112aed 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregator.java @@ -25,11 +25,10 @@ import java.io.IOException; import java.util.Map; -public class HistoBackedHistogramAggregator extends AbstractHistogramAggregator { +public final class HistoBackedHistogramAggregator extends AbstractHistogramAggregator { private final HistogramValuesSource.Histogram valuesSource; - @SuppressWarnings("this-escape") public HistoBackedHistogramAggregator( String name, AggregatorFactories factories, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java index 2b80084e2d92..8be41867e9bb 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedAvgAggregator.java @@ -32,7 +32,7 @@ * Average aggregator operating over histogram datatypes {@link HistogramValuesSource} * The aggregation computes weighted average by taking counts into consideration for each value */ -public class HistoBackedAvgAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedAvgAggregator extends NumericMetricsAggregator.SingleValue { private final HistogramValuesSource.Histogram valuesSource; @@ -41,7 +41,6 @@ public class HistoBackedAvgAggregator extends NumericMetricsAggregator.SingleVal DoubleArray compensations; DocValueFormat format; - @SuppressWarnings("this-escape") public HistoBackedAvgAggregator( String name, ValuesSourceConfig valuesSourceConfig, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java index f094c22e4dff..c153225e4577 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java @@ -26,13 +26,12 @@ import java.io.IOException; import java.util.Map; -public class HistoBackedMaxAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedMaxAggregator extends NumericMetricsAggregator.SingleValue { private final HistogramValuesSource.Histogram valuesSource; final DocValueFormat formatter; DoubleArray maxes; - @SuppressWarnings("this-escape") public HistoBackedMaxAggregator( String name, ValuesSourceConfig config, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java index ecf89f8eab15..5efd279ea6ee 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregator.java @@ -26,13 +26,12 @@ import java.io.IOException; import java.util.Map; -public class HistoBackedMinAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedMinAggregator extends NumericMetricsAggregator.SingleValue { private final HistogramValuesSource.Histogram valuesSource; final DocValueFormat format; DoubleArray mins; - @SuppressWarnings("this-escape") public HistoBackedMinAggregator( String name, ValuesSourceConfig config, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java index ebf1a43e38e2..55428a7f9723 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregator.java @@ -33,7 +33,7 @@ * The aggregator sums each histogram value multiplied by its count. * Eg for a histogram of response times, this is an approximate "total time spent". */ -public class HistoBackedSumAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedSumAggregator extends NumericMetricsAggregator.SingleValue { private final HistogramValuesSource.Histogram valuesSource; private final DocValueFormat format; @@ -41,7 +41,6 @@ public class HistoBackedSumAggregator extends NumericMetricsAggregator.SingleVal private DoubleArray sums; private DoubleArray compensations; - @SuppressWarnings("this-escape") public HistoBackedSumAggregator( String name, ValuesSourceConfig valuesSourceConfig, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java index 184fd7072fa6..b3f5cd88730a 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregator.java @@ -29,14 +29,13 @@ * The aggregation counts the number of values a histogram field has within the aggregation context * by adding the counts of the histograms. */ -public class HistoBackedValueCountAggregator extends NumericMetricsAggregator.SingleValue { +public final class HistoBackedValueCountAggregator extends NumericMetricsAggregator.SingleValue { final HistogramValuesSource.Histogram valuesSource; /** Count per bucket */ LongArray counts; - @SuppressWarnings("this-escape") public HistoBackedValueCountAggregator( String name, ValuesSourceConfig valuesSourceConfig, diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregator.java index 2c2f213cf430..5eefa7cfc56f 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregator.java @@ -28,15 +28,15 @@ import java.io.IOException; import java.util.Map; -public class TimeSeriesRateAggregator extends NumericMetricsAggregator.SingleValue { +public final class TimeSeriesRateAggregator extends NumericMetricsAggregator.SingleValue { - protected final ValuesSource.Numeric valuesSource; + private final ValuesSource.Numeric valuesSource; - protected DoubleArray startValues; - protected DoubleArray endValues; - protected LongArray startTimes; - protected LongArray endTimes; - protected DoubleArray resetCompensations; + private DoubleArray startValues; + private DoubleArray endValues; + private LongArray startTimes; + private LongArray endTimes; + private DoubleArray resetCompensations; private long currentBucket = -1; private long currentEndTime = -1; @@ -49,8 +49,7 @@ public class TimeSeriesRateAggregator extends NumericMetricsAggregator.SingleVal private final Rounding.DateTimeUnit rateUnit; // Unused parameters are so that the constructor implements `RateAggregatorSupplier` - @SuppressWarnings("this-escape") - protected TimeSeriesRateAggregator( + TimeSeriesRateAggregator( String name, ValuesSourceConfig valuesSourceConfig, Rounding.DateTimeUnit rateUnit, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java index 30c20f419ebb..7a05a4e712fc 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskCleaner.java @@ -33,7 +33,7 @@ /** * A {@link ClusterStateListener} that completes any {@link ShardFollowTask} which concerns a deleted index. */ -public class ShardFollowTaskCleaner implements ClusterStateListener { +public final class ShardFollowTaskCleaner implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(ShardFollowTaskCleaner.class); @@ -45,7 +45,6 @@ public class ShardFollowTaskCleaner implements ClusterStateListener { */ private final Set completing = Collections.synchronizedSet(new HashSet<>()); - @SuppressWarnings("this-escape") public ShardFollowTaskCleaner(final ClusterService clusterService, final ThreadPool threadPool, final Client client) { this.threadPool = threadPool; this.client = client; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index f53e7bb56212..b73aab1dbfd5 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -88,7 +88,7 @@ import static org.elasticsearch.xpack.ccr.CcrLicenseChecker.wrapClient; import static org.elasticsearch.xpack.ccr.action.TransportResumeFollowAction.extractLeaderShardHistoryUUIDs; -public class ShardFollowTasksExecutor extends PersistentTasksExecutor { +public final class ShardFollowTasksExecutor extends PersistentTasksExecutor { private static final Logger logger = LogManager.getLogger(ShardFollowTasksExecutor.class); @@ -100,7 +100,6 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor implements IndicesRequest, ToXContentObject { + public static final class Request extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { private static final ParseField REMOTE_CLUSTER_FIELD = new ParseField("remote_cluster"); private static final ParseField LEADER_INDEX_FIELD = new ParseField("leader_index"); @@ -188,7 +188,6 @@ public IndicesOptions indicesOptions() { return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); } - @SuppressWarnings("this-escape") public Request(StreamInput in) throws IOException { super(in); this.remoteCluster = in.readString(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java index e96836ab6a5f..1388fb3e3db8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java @@ -31,7 +31,7 @@ private DeleteDataFrameAnalyticsAction() { super(NAME, AcknowledgedResponse::readFrom); } - public static class Request extends AcknowledgedRequest { + public static final class Request extends AcknowledgedRequest { public static final ParseField FORCE = new ParseField("force"); public static final ParseField TIMEOUT = new ParseField("timeout"); @@ -48,7 +48,6 @@ public Request(StreamInput in) throws IOException { force = in.readBoolean(); } - @SuppressWarnings("this-escape") public Request() { timeout(DEFAULT_TIMEOUT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java index 599d99281942..c8c8e211fb1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDataFrameAnalyticsAction.java @@ -27,16 +27,14 @@ private GetDataFrameAnalyticsAction() { super(NAME, Response::new); } - public static class Request extends AbstractGetResourcesRequest { + public static final class Request extends AbstractGetResourcesRequest { public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - @SuppressWarnings("this-escape") public Request() { setAllowNoResources(true); } - @SuppressWarnings("this-escape") public Request(String id) { setResourceId(id); setAllowNoResources(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 9637ba3c0f92..4ea73d1b7a6e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -37,7 +37,7 @@ private GetDatafeedsAction() { super(NAME, Response::new); } - public static class Request extends MasterNodeReadRequest { + public static final class Request extends MasterNodeReadRequest { public static final String ALLOW_NO_MATCH = "allow_no_match"; @@ -49,7 +49,6 @@ public Request(String datafeedId) { this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); } - @SuppressWarnings("this-escape") public Request() { local(true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java index f37264cc6bc9..dd838d368ee0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java @@ -27,14 +27,12 @@ private GetFiltersAction() { super(NAME, Response::new); } - public static class Request extends AbstractGetResourcesRequest { + public static final class Request extends AbstractGetResourcesRequest { - @SuppressWarnings("this-escape") public Request() { setAllowNoResources(true); } - @SuppressWarnings("this-escape") public Request(String filterId) { setResourceId(filterId); setAllowNoResources(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java index 41358dc34f40..490c4dd99fcb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java @@ -35,7 +35,7 @@ private GetJobsAction() { super(NAME, Response::new); } - public static class Request extends MasterNodeReadRequest { + public static final class Request extends MasterNodeReadRequest { public static final String ALLOW_NO_MATCH = "allow_no_match"; @@ -47,7 +47,6 @@ public Request(String jobId) { this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); } - @SuppressWarnings("this-escape") public Request() { local(true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java index 3d152048563c..0c5fbbc065e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsAction.java @@ -123,7 +123,7 @@ public int hashCode() { } } - public static class Request extends AbstractGetResourcesRequest { + public static final class Request extends AbstractGetResourcesRequest { public static final ParseField INCLUDE = new ParseField("include"); public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); @@ -136,7 +136,6 @@ public Request(String id) { this(id, null, null); } - @SuppressWarnings("this-escape") public Request(String id, List tags, Set includes) { setResourceId(id); setAllowNoResources(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java index cd1b5674dcb7..cc91daf966ee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java @@ -52,16 +52,14 @@ private GetTrainedModelsStatsAction() { super(NAME, GetTrainedModelsStatsAction.Response::new); } - public static class Request extends AbstractGetResourcesRequest { + public static final class Request extends AbstractGetResourcesRequest { public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - @SuppressWarnings("this-escape") public Request() { setAllowNoResources(true); } - @SuppressWarnings("this-escape") public Request(String id) { setResourceId(id); setAllowNoResources(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java index 48a793155e54..d1c82635a83c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDataFrameAnalyticsAction.java @@ -45,7 +45,7 @@ private StopDataFrameAnalyticsAction() { super(NAME, StopDataFrameAnalyticsAction.Response::new); } - public static class Request extends BaseTasksRequest implements ToXContentObject { + public static final class Request extends BaseTasksRequest implements ToXContentObject { public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); public static final ParseField FORCE = new ParseField("force"); @@ -90,12 +90,11 @@ public Request(StreamInput in) throws IOException { expandedIds = new HashSet<>(Arrays.asList(in.readStringArray())); } - @SuppressWarnings("this-escape") public Request() { setTimeout(DEFAULT_TIMEOUT); } - public final Request setId(String id) { + public Request setId(String id) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameAnalyticsConfig.ID); return this; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Classification.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Classification.java index 956c4713adb3..b433e2c89410 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Classification.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Classification.java @@ -31,7 +31,7 @@ /** * Evaluation of classification results. */ -public class Classification implements Evaluation { +public final class Classification implements Evaluation { public static final ParseField NAME = new ParseField("classification"); @@ -75,7 +75,6 @@ public static Classification fromXContent(XContentParser parser) { */ private final List metrics; - @SuppressWarnings("this-escape") public Classification( String actualField, @Nullable String predictedField, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java index 84b82bde909a..6ca9c20fda14 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java @@ -32,7 +32,7 @@ /** * Evaluation of outlier detection results. */ -public class OutlierDetection implements Evaluation { +public final class OutlierDetection implements Evaluation { public static final ParseField NAME = new ParseField("outlier_detection", "binary_soft_classification"); @@ -75,7 +75,6 @@ public static QueryBuilder actualIsTrueQuery(String actualField) { */ private final List metrics; - @SuppressWarnings("this-escape") public OutlierDetection(String actualField, String predictedProbabilityField, @Nullable List metrics) { this.fields = new EvaluationFields( ExceptionsHelper.requireNonNull(actualField, ACTUAL_FIELD), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java index 4b1e444f3222..1add514355e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java @@ -30,7 +30,7 @@ /** * Evaluation of regression results. */ -public class Regression implements Evaluation { +public final class Regression implements Evaluation { public static final ParseField NAME = new ParseField("regression"); @@ -69,7 +69,6 @@ public static Regression fromXContent(XContentParser parser) { */ private final List metrics; - @SuppressWarnings("this-escape") public Regression(String actualField, String predictedField, @Nullable List metrics) { this.fields = new EvaluationFields( ExceptionsHelper.requireNonNull(actualField, ACTUAL_FIELD), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index e92e6e9b9911..f69be31939b3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -40,7 +40,7 @@ /** * Trained model assignment object that contains assignment options and the assignment routing table */ -public class TrainedModelAssignment implements SimpleDiffable, ToXContentObject { +public final class TrainedModelAssignment implements SimpleDiffable, ToXContentObject { private static final ParseField REASON = new ParseField("reason"); private static final ParseField ASSIGNMENT_STATE = new ParseField("assignment_state"); @@ -137,7 +137,6 @@ private TrainedModelAssignment( : Math.max(maxAssignedAllocations, totalCurrentAllocations()); } - @SuppressWarnings("this-escape") public TrainedModelAssignment(StreamInput in) throws IOException { this.taskParams = new StartTrainedModelDeploymentAction.TaskParams(in); this.nodeRoutingTable = in.readOrderedMap(StreamInput::readString, RoutingInfo::new); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java index b472c6ef3216..966db6e785c5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java @@ -295,14 +295,13 @@ public TransportVersion getMinimalCompatibilityVersion() { return TransportVersions.V_7_6_0; } - public static class Builder { + public static final class Builder { private List featureNames; private ArrayList nodes; private int numNodes; private TargetType targetType = TargetType.REGRESSION; private List classificationLabels; - @SuppressWarnings("this-escape") public Builder() { nodes = new ArrayList<>(); // allocate space in the root node and set to a leaf diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java index 21b572907d03..51d2ac41cc15 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java @@ -448,7 +448,7 @@ public int hashCode() { ); } - public static class Builder { + public static final class Builder { public static final TimeValue DEFAULT_BUCKET_SPAN = TimeValue.timeValueMinutes(5); @@ -464,7 +464,6 @@ public static class Builder { private Boolean multivariateByFields; private TimeValue modelPruneWindow; - @SuppressWarnings("this-escape") public Builder(List detectors) { setDetectors(detectors); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java index 827b25f39f23..775640ac2048 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCounts.java @@ -35,7 +35,7 @@ * so the field is visible. */ -public class DataCounts implements ToXContentObject, Writeable { +public final class DataCounts implements ToXContentObject, Writeable { private static final String DOCUMENT_SUFFIX = "_data_counts"; @@ -161,7 +161,6 @@ public static String v54DocumentId(String jobId) { private Date latestSparseBucketTimeStamp; private Instant logTime; - @SuppressWarnings("this-escape") public DataCounts( String jobId, long processedRecordCount, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ForecastRequestStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ForecastRequestStats.java index 3cccb0006d65..618264c61051 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ForecastRequestStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ForecastRequestStats.java @@ -30,7 +30,7 @@ * information about errors, progress and counters. There is exactly 1 document * per forecast request, getting updated while the request is processed. */ -public class ForecastRequestStats implements ToXContentObject, Writeable { +public final class ForecastRequestStats implements ToXContentObject, Writeable { /** * Result type */ @@ -147,7 +147,6 @@ public ForecastRequestStats(ForecastRequestStats forecastRequestStats) { this.status = forecastRequestStats.status; } - @SuppressWarnings("this-escape") public ForecastRequestStats(StreamInput in) throws IOException { jobId = in.readString(); forecastId = in.readString(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java index 20ff6f4c4ad1..f8dccd244fea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java @@ -14,7 +14,7 @@ /** * A {@link NamedClusterPrivilege} that can be used to define an access to cluster level actions. */ -public class ActionClusterPrivilege implements NamedClusterPrivilege { +public final class ActionClusterPrivilege implements NamedClusterPrivilege { private final String name; private final Set allowedActionPatterns; private final Set excludedActionPatterns; @@ -39,7 +39,6 @@ public ActionClusterPrivilege(final String name, final Set allowedAction * @param allowedActionPatterns a set of cluster action patterns * @param excludedActionPatterns a set of cluster action patterns */ - @SuppressWarnings("this-escape") public ActionClusterPrivilege(final String name, final Set allowedActionPatterns, final Set excludedActionPatterns) { this.name = name; this.allowedActionPatterns = allowedActionPatterns; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java index 438c457d8430..cb55de79342b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SslSettingsLoader.java @@ -33,14 +33,13 @@ /** * A configuration loader for SSL Settings */ -public class SslSettingsLoader extends SslConfigurationLoader { +public final class SslSettingsLoader extends SslConfigurationLoader { private final Settings settings; private final Map> secureSettings; private final Map> standardSettings; private final Map> disabledSettings; - @SuppressWarnings("this-escape") public SslSettingsLoader(Settings settings, String settingPrefix, boolean acceptNonSecurePasswords) { super(settingPrefix); this.settings = settings; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java index 46850e5f4d06..95edd3fc3bb9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumRequest.java @@ -30,7 +30,7 @@ /** * A request to gather terms for a given field matching a string prefix */ -public class TermsEnumRequest extends BroadcastRequest implements ToXContentObject { +public final class TermsEnumRequest extends BroadcastRequest implements ToXContentObject { public static final IndicesOptions DEFAULT_INDICES_OPTIONS = SearchRequest.DEFAULT_INDICES_OPTIONS; public static int DEFAULT_SIZE = 10; @@ -51,14 +51,12 @@ public TermsEnumRequest() { * Constructs a new term enum request against the provided indices. No indices provided means it will * run against all indices. */ - @SuppressWarnings("this-escape") public TermsEnumRequest(String... indices) { super(indices); indicesOptions(DEFAULT_INDICES_OPTIONS); timeout(DEFAULT_TIMEOUT); } - @SuppressWarnings("this-escape") public TermsEnumRequest(TermsEnumRequest clone) { this.field = clone.field; this.string = clone.string; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructure.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructure.java index 1a59f373d75f..b39fc27b7a14 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructure.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructure.java @@ -523,7 +523,7 @@ public boolean equals(Object other) { && Objects.equals(this.explanation, that.explanation); } - public static class Builder { + public static final class Builder { private int numLinesAnalyzed; private int numMessagesAnalyzed; @@ -553,7 +553,6 @@ public Builder() { this(Format.SEMI_STRUCTURED_TEXT); } - @SuppressWarnings("this-escape") public Builder(Format format) { setFormat(format); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java index 5ff153db7467..46e844f93695 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java @@ -49,7 +49,7 @@ public GetTransformStatsAction() { super(NAME, GetTransformStatsAction.Response::new); } - public static class Request extends BaseTasksRequest { + public static final class Request extends BaseTasksRequest { private final String id; private PageParams pageParams = PageParams.defaultParams(); private boolean allowNoMatch = true; @@ -58,7 +58,6 @@ public static class Request extends BaseTasksRequest { // used internally to expand the queried id expression private List expandedIds; - @SuppressWarnings("this-escape") public Request(String id, @Nullable TimeValue timeout) { setTimeout(timeout); if (Strings.isNullOrEmpty(id) || id.equals("*")) { @@ -96,11 +95,11 @@ public void setExpandedIds(List expandedIds) { this.expandedIds = List.copyOf(expandedIds); } - public final void setPageParams(PageParams pageParams) { + public void setPageParams(PageParams pageParams) { this.pageParams = Objects.requireNonNull(pageParams); } - public final PageParams getPageParams() { + public PageParams getPageParams() { return pageParams; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java index 57c14d17cce4..f48e06a3f743 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java @@ -37,11 +37,10 @@ private ScheduleNowTransformAction() { super(NAME, ScheduleNowTransformAction.Response::new); } - public static class Request extends BaseTasksRequest { + public static final class Request extends BaseTasksRequest { private final String id; - @SuppressWarnings("this-escape") public Request(String id, TimeValue timeout) { this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.setTimeout(ExceptionsHelper.requireNonNull(timeout, TransformField.TIMEOUT.getPreferredName())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java index 097ae6bb05a0..794bf009764f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StopTransformAction.java @@ -46,7 +46,7 @@ private StopTransformAction() { super(NAME, StopTransformAction.Response::new); } - public static class Request extends BaseTasksRequest { + public static final class Request extends BaseTasksRequest { private final String id; private final boolean waitForCompletion; private final boolean force; @@ -54,7 +54,6 @@ public static class Request extends BaseTasksRequest { private final boolean waitForCheckpoint; private Set expandedIds; - @SuppressWarnings("this-escape") public Request( String id, boolean waitForCompletion, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java index e943d0dd50ac..f9da4082dbfa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java @@ -42,7 +42,7 @@ private UpdateTransformAction() { super(NAME, Response::new); } - public static class Request extends BaseTasksRequest { + public static final class Request extends BaseTasksRequest { private final TransformConfigUpdate update; private final String id; @@ -50,7 +50,6 @@ public static class Request extends BaseTasksRequest { private TransformConfig config; private AuthorizationState authState; - @SuppressWarnings("this-escape") public Request(TransformConfigUpdate update, String id, boolean deferValidation, TimeValue timeout) { this.update = update; this.id = id; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java index 6a33a3beaa19..9658cdd74e19 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java @@ -53,7 +53,7 @@ /** * This class holds the configuration details of a data frame transform */ -public class TransformConfig implements SimpleDiffable, Writeable, ToXContentObject { +public final class TransformConfig implements SimpleDiffable, Writeable, ToXContentObject { /** * Version of the last time the config defaults have been changed. @@ -209,7 +209,6 @@ public static String documentId(String transformId) { return NAME + "-" + transformId; } - @SuppressWarnings("this-escape") public TransformConfig( final String id, final SourceConfig source, @@ -245,7 +244,6 @@ public TransformConfig( this.transformVersion = version == null ? null : TransformConfigVersion.fromString(version); } - @SuppressWarnings("this-escape") public TransformConfig(final StreamInput in) throws IOException { id = in.readString(); source = new SourceConfig(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java index af13fcbcb713..05e43a11ba84 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java @@ -30,7 +30,7 @@ /** * This class holds the mutable configuration items for a data frame transform */ -public class TransformConfigUpdate implements Writeable { +public final class TransformConfigUpdate implements Writeable { public static final String NAME = "data_frame_transform_config_update"; @@ -107,7 +107,6 @@ public TransformConfigUpdate( this.retentionPolicyConfig = retentionPolicyConfig; } - @SuppressWarnings("this-escape") public TransformConfigUpdate(final StreamInput in) throws IOException { source = in.readOptionalWriteable(SourceConfig::new); dest = in.readOptionalWriteable(DestConfig::new); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/common/stats/Counters.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/common/stats/Counters.java index fe649a203bcc..c1bda220671b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/common/stats/Counters.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/common/stats/Counters.java @@ -21,11 +21,10 @@ * Calling toNestedMap() will create a nested map, where each dot of the key name will nest deeper * The main reason for this class is that the stats producer should not be worried about how the map is actually nested */ -public class Counters implements Writeable { +public final class Counters implements Writeable { private Map counters = new HashMap<>(); - @SuppressWarnings("this-escape") public Counters(StreamInput in) throws IOException { int numCounters = in.readVInt(); for (int i = 0; i < numCounters; i++) { @@ -33,7 +32,6 @@ public Counters(StreamInput in) throws IOException { } } - @SuppressWarnings("this-escape") public Counters(String... names) { for (String name : names) { set(name); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java index 8d1f521fcd7e..d681a84933bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/TransformRegistry.java @@ -16,13 +16,12 @@ import java.util.HashMap; import java.util.Map; -public class TransformRegistry { +public final class TransformRegistry { private final Map< String, TransformFactory>> factories; - @SuppressWarnings("this-escape") public TransformRegistry( Map>> factories ) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransform.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransform.java index 6d587468ff0d..f5225e4981e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransform.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/chain/ChainTransform.java @@ -143,11 +143,10 @@ protected XContentBuilder typeXContent(XContentBuilder builder, Params params) t } } - public static class Builder implements Transform.Builder { + public static final class Builder implements Transform.Builder { private final List transforms = new ArrayList<>(); - @SuppressWarnings("this-escape") public Builder(Transform... transforms) { add(transforms); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java index dbdc09712eb2..a451439fadea 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/AggregateMetricFieldValueFetcher.java @@ -13,14 +13,13 @@ import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper; import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; -public class AggregateMetricFieldValueFetcher extends FieldValueFetcher { +public final class AggregateMetricFieldValueFetcher extends FieldValueFetcher { private final AggregateDoubleMetricFieldType aggMetricFieldType; private final AbstractDownsampleFieldProducer fieldProducer; - @SuppressWarnings("this-escape") - protected AggregateMetricFieldValueFetcher( + AggregateMetricFieldValueFetcher( MappedFieldType fieldType, AggregateDoubleMetricFieldType aggMetricFieldType, IndexFieldData fieldData diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java index 3ac70ccc4b9c..c0993bbb9c7c 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java @@ -40,9 +40,8 @@ import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; -public class EqlFunctionRegistry extends FunctionRegistry { +public final class EqlFunctionRegistry extends FunctionRegistry { - @SuppressWarnings("this-escape") public EqlFunctionRegistry() { register(functions()); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumber.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumber.java index 6bd7d7673784..851f7786c529 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumber.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumber.java @@ -36,11 +36,10 @@ /** * EQL specific function for parsing strings into numbers. */ -public class ToNumber extends ScalarFunction implements OptionalArgument { +public final class ToNumber extends ScalarFunction implements OptionalArgument { private final Expression value, base; - @SuppressWarnings("this-escape") public ToNumber(Source source, Expression value, Expression base) { super(source, Arrays.asList(value, base != null ? base : new Literal(source, null, DataTypes.NULL))); this.value = value; diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Between.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Between.java index c0eb7e42e7e5..a6854dc76c0c 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Between.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Between.java @@ -40,11 +40,10 @@ * between(source, left, right[, greedy=false]) * Extracts a substring from source that’s between left and right substrings */ -public class Between extends CaseInsensitiveScalarFunction implements OptionalArgument { +public final class Between extends CaseInsensitiveScalarFunction implements OptionalArgument { private final Expression input, left, right, greedy; - @SuppressWarnings("this-escape") public Between(Source source, Expression input, Expression left, Expression right, Expression greedy, boolean caseInsensitive) { super(source, Arrays.asList(input, left, right, defaultGreedy(greedy)), caseInsensitive); this.input = input; @@ -135,7 +134,7 @@ public ScriptTemplate asScript() { return asScriptFrom(inputScript, leftScript, rightScript, greedyScript); } - protected ScriptTemplate asScriptFrom( + private ScriptTemplate asScriptFrom( ScriptTemplate inputScript, ScriptTemplate leftScript, ScriptTemplate rightScript, diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOf.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOf.java index fe8d60cc4698..9dabb523f93a 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOf.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOf.java @@ -36,11 +36,10 @@ * Find the first position (zero-indexed) of a string where a substring is found. * If the optional parameter start is provided, then this will find the first occurrence at or after the start position. */ -public class IndexOf extends CaseInsensitiveScalarFunction implements OptionalArgument { +public final class IndexOf extends CaseInsensitiveScalarFunction implements OptionalArgument { private final Expression input, substring, start; - @SuppressWarnings("this-escape") public IndexOf(Source source, Expression input, Expression substring, Expression start, boolean caseInsensitive) { super(source, asList(input, substring, start != null ? start : new Literal(source, null, DataTypes.NULL)), caseInsensitive); this.input = input; @@ -103,7 +102,7 @@ public ScriptTemplate asScript() { return asScriptFrom(inputScript, substringScript, startScript); } - protected ScriptTemplate asScriptFrom(ScriptTemplate inputScript, ScriptTemplate substringScript, ScriptTemplate startScript) { + private ScriptTemplate asScriptFrom(ScriptTemplate inputScript, ScriptTemplate substringScript, ScriptTemplate startScript) { return new ScriptTemplate( format( Locale.ROOT, diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Match.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Match.java index 4137d8d41e3f..f79785636ca5 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Match.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Match.java @@ -32,18 +32,16 @@ * Returns true if the source field matches any of the provided regular expressions * Refer to: https://eql.readthedocs.io/en/latest/query-guide/functions.html#match */ -public class Match extends BaseSurrogateFunction { +public final class Match extends BaseSurrogateFunction { private final Expression field; private final List patterns; private final boolean caseInsensitive; - @SuppressWarnings("this-escape") public Match(Source source, Expression field, List patterns, boolean caseInsensitive) { this(source, CollectionUtils.combine(singletonList(field), patterns), caseInsensitive); } - @SuppressWarnings("this-escape") private Match(Source source, List children, boolean caseInsensitive) { super(source, children); this.field = children().get(0); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Substring.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Substring.java index 524ccf0422cc..632ed8e72e4b 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/Substring.java @@ -38,11 +38,10 @@ * EQL specific substring function - similar to the one in Python. * Note this is different than the one in SQL. */ -public class Substring extends ScalarFunction implements OptionalArgument { +public final class Substring extends ScalarFunction implements OptionalArgument { private final Expression input, start, end; - @SuppressWarnings("this-escape") public Substring(Source source, Expression input, Expression start, Expression end) { super(source, Arrays.asList(input, start, end != null ? end : new Literal(source, null, DataTypes.NULL))); this.input = input; @@ -98,7 +97,7 @@ public ScriptTemplate asScript() { return asScriptFrom(inputScript, startScript, endScript); } - protected ScriptTemplate asScriptFrom(ScriptTemplate inputScript, ScriptTemplate startScript, ScriptTemplate endScript) { + private ScriptTemplate asScriptFrom(ScriptTemplate inputScript, ScriptTemplate startScript, ScriptTemplate endScript) { return new ScriptTemplate( format( Locale.ROOT, diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java index 5f71d316333e..f573ea805b0b 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java @@ -61,7 +61,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; import static org.elasticsearch.xpack.ql.plugin.TransportActionUtils.executeRequestWithRetryAttempt; -public class TransportEqlSearchAction extends HandledTransportAction +public final class TransportEqlSearchAction extends HandledTransportAction implements AsyncTaskManagementService.AsyncOperation { @@ -73,7 +73,6 @@ public class TransportEqlSearchAction extends HandledTransportAction asyncTaskManagementService; - @SuppressWarnings("this-escape") @Inject public TransportEqlSearchAction( Settings settings, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 842f8719f19d..b6ba42f95360 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -16,7 +16,7 @@ /** * {@link Vector} where each entry references a lucene document. */ -public class DocVector extends AbstractVector implements Vector { +public final class DocVector extends AbstractVector implements Vector { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DocVector.class); @@ -48,7 +48,6 @@ public class DocVector extends AbstractVector implements Vector { final DocBlock block; - @SuppressWarnings("this-escape") public DocVector(IntVector shards, IntVector segments, IntVector docs, Boolean singleSegmentNonDecreasing) { super(shards.getPositionCount(), null); this.shards = shards; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 2d4a026afaf6..2199d4bddaf7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -92,9 +92,8 @@ import java.util.Locale; import java.util.stream.Collectors; -public class EsqlFunctionRegistry extends FunctionRegistry { +public final class EsqlFunctionRegistry extends FunctionRegistry { - @SuppressWarnings("this-escape") public EsqlFunctionRegistry() { register(functions()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index 6dcba915186a..e5ac3e395f6a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -25,7 +25,7 @@ * the engine). * As such the field is marked as unresolved (so the verifier can pick up its usage outside project). */ -public class UnsupportedAttribute extends FieldAttribute implements Unresolvable { +public final class UnsupportedAttribute extends FieldAttribute implements Unresolvable { private final String message; private final boolean hasCustomMessage; @@ -42,7 +42,6 @@ public UnsupportedAttribute(Source source, String name, UnsupportedEsField field this(source, name, field, customMessage, null); } - @SuppressWarnings("this-escape") public UnsupportedAttribute(Source source, String name, UnsupportedEsField field, String customMessage, NameId id) { super(source, null, name, field, null, Nullability.TRUE, id, false); this.hasCustomMessage = customMessage != null; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 2117828be653..a3d08e4cb630 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -37,14 +37,13 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.ql.type.DataTypes.NULL; -public class Case extends ScalarFunction implements EvaluatorMapper { +public final class Case extends ScalarFunction implements EvaluatorMapper { record Condition(Expression condition, Expression value) {} private final List conditions; private final Expression elseValue; private DataType dataType; - @SuppressWarnings("this-escape") public Case(Source source, Expression first, List rest) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); int conditionCount = children().size() / 2; diff --git a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java index d4964f14b3fc..4961efd7253e 100644 --- a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java +++ b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java @@ -25,8 +25,7 @@ import static org.hamcrest.Matchers.instanceOf; -public class RestGraphActionTests extends RestActionTestCase { - @SuppressWarnings("this-escape") +public final class RestGraphActionTests extends RestActionTestCase { private final List compatibleMediaType = Collections.singletonList(randomCompatibleMediaType(RestApiVersion.V_7)); @Before diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index 97c17d18d716..b5c35e758a65 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -115,13 +115,12 @@ public static class Defaults { public static final EnumSet METRICS = EnumSet.noneOf(Metric.class); } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter> meta = Parameter.metaParam(); private final Parameter ignoreMalformed; - @SuppressWarnings("this-escape") private final Parameter> metrics = new Parameter<>(Names.METRICS, false, () -> Defaults.METRICS, (n, c, o) -> { @SuppressWarnings("unchecked") List metricsList = (List) o; diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 62b02f5a3d85..90c055f3e77b 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -76,7 +76,7 @@ private static UnsignedLongFieldMapper toType(FieldMapper in) { return (UnsignedLongFieldMapper) in; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { private final Parameter indexed; private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); private final Parameter stored = Parameter.storeParam(m -> toType(m).stored, false); @@ -102,7 +102,6 @@ public Builder(String name, Settings settings, IndexMode mode) { this(name, IGNORE_MALFORMED_SETTING.get(settings), mode); } - @SuppressWarnings("this-escape") public Builder(String name, boolean ignoreMalformedByDefault, IndexMode mode) { super(name); this.ignoreMalformed = Parameter.explicitBoolParam( @@ -438,7 +437,7 @@ public CollapseType collapseType() { * null, if a value represents some other number * throws an exception if a value is wrongly formatted number */ - protected static Long parseTerm(Object value) { + static Long parseTerm(Object value) { if (value instanceof Number) { if ((value instanceof Long) || (value instanceof Integer) || (value instanceof Short) || (value instanceof Byte)) { long lv = ((Number) value).longValue(); @@ -472,7 +471,7 @@ protected static Long parseTerm(Object value) { * null, if value is higher than the maximum allowed value for unsigned long * throws an exception is value represents wrongly formatted number */ - protected static Long parseLowerRangeTerm(Object value, boolean include) { + static Long parseLowerRangeTerm(Object value, boolean include) { if ((value instanceof Long) || (value instanceof Integer) || (value instanceof Short) || (value instanceof Byte)) { long longValue = ((Number) value).longValue(); if (longValue < 0) return 0L; // limit lowerTerm to min value for unsigned long: 0 @@ -509,7 +508,7 @@ protected static Long parseLowerRangeTerm(Object value, boolean include) { * -1 (unsigned long of 18446744073709551615) for values greater than 18446744073709551615 * throws an exception is value represents wrongly formatted number */ - protected static Long parseUpperRangeTerm(Object value, boolean include) { + static Long parseUpperRangeTerm(Object value, boolean include) { if ((value instanceof Long) || (value instanceof Integer) || (value instanceof Short) || (value instanceof Byte)) { long longValue = ((Number) value).longValue(); if ((longValue < 0) || (longValue == 0 && include == false)) return null; // upperTerm is below minimum diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index 3c5b5c4243c5..e52a60691d15 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -42,7 +42,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -public class MlInitializationService implements ClusterStateListener { +public final class MlInitializationService implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(MlInitializationService.class); @@ -85,7 +85,6 @@ public class MlInitializationService implements ClusterStateListener { } // For testing - @SuppressWarnings("this-escape") public MlInitializationService( Client client, ThreadPool threadPool, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java index 11b0633ee720..18d974473251 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java @@ -30,7 +30,7 @@ import static org.elasticsearch.core.Strings.format; -public class MlAutoscalingDeciderService implements AutoscalingDeciderService, LocalNodeMasterListener { +public final class MlAutoscalingDeciderService implements AutoscalingDeciderService, LocalNodeMasterListener { private static final Logger logger = LogManager.getLogger(MlAutoscalingDeciderService.class); @@ -46,7 +46,6 @@ public class MlAutoscalingDeciderService implements AutoscalingDeciderService, L private volatile boolean isMaster; private volatile int allocatedProcessorsScale; - @SuppressWarnings("this-escape") public MlAutoscalingDeciderService( MlMemoryTracker memoryTracker, Settings settings, @@ -56,7 +55,6 @@ public MlAutoscalingDeciderService( this(new NodeLoadDetector(memoryTracker), settings, nodeAvailabilityZoneMapper, clusterService, System::currentTimeMillis); } - @SuppressWarnings("this-escape") MlAutoscalingDeciderService( NodeLoadDetector nodeLoadDetector, Settings settings, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeFakeAvailabilityZoneMapper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeFakeAvailabilityZoneMapper.java index 108b7eaff06e..df2f66f6c5a4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeFakeAvailabilityZoneMapper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeFakeAvailabilityZoneMapper.java @@ -29,7 +29,6 @@ public class NodeFakeAvailabilityZoneMapper extends AbstractNodeAvailabilityZone private static final Logger logger = LogManager.getLogger(NodeFakeAvailabilityZoneMapper.class); - @SuppressWarnings("this-escape") public NodeFakeAvailabilityZoneMapper(Settings settings, ClusterSettings clusterSettings) { this(settings, clusterSettings, null); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeRealAvailabilityZoneMapper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeRealAvailabilityZoneMapper.java index 24da7d2e4656..cf55af02b814 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeRealAvailabilityZoneMapper.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/NodeRealAvailabilityZoneMapper.java @@ -34,7 +34,6 @@ public class NodeRealAvailabilityZoneMapper extends AbstractNodeAvailabilityZone private volatile List awarenessAttributes; - @SuppressWarnings("this-escape") public NodeRealAvailabilityZoneMapper(Settings settings, ClusterSettings clusterSettings) { this(settings, clusterSettings, null); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java index 1eda86b03046..e04d925f429f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java @@ -51,7 +51,6 @@ public class CleanerService extends AbstractLifecycleComponent { clusterSettings.addSettingsUpdateConsumer(MonitoringField.HISTORY_DURATION, this::setGlobalRetention); } - @SuppressWarnings("this-escape") public CleanerService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { this(settings, clusterSettings, threadPool, new DefaultExecutionScheduler()); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index ba43cf82d145..20421ca90950 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -75,7 +75,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -public class LocalExporter extends Exporter implements ClusterStateListener, CleanerService.Listener, LicenseStateListener { +public final class LocalExporter extends Exporter implements ClusterStateListener, CleanerService.Listener, LicenseStateListener { private static final Logger logger = LogManager.getLogger(LocalExporter.class); @@ -108,7 +108,6 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle private long stateInitializedTime; - @SuppressWarnings("this-escape") public LocalExporter( Exporter.Config config, Client client, diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java index cbf1d606e3a5..4dd92b125b88 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java @@ -26,9 +26,8 @@ /** * Tests {@link HttpResource}. */ -public class HttpResourceTests extends ESTestCase { +public final class HttpResourceTests extends ESTestCase { - @SuppressWarnings("this-escape") private final String owner = getTestName(); private final RestClient mockClient = mock(RestClient.class); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java index 464fd052f542..23a9ee28b9ac 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java @@ -19,10 +19,9 @@ /** * Tests {@link NodeFailureListener}. */ -public class NodeFailureListenerTests extends ESTestCase { +public final class NodeFailureListenerTests extends ESTestCase { private final Sniffer sniffer = mock(Sniffer.class); - @SuppressWarnings("this-escape") private final HttpResource resource = new MockHttpResource(getTestName(), false); private final Node node = new Node(new HttpHost("localhost", 9200)); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/WatcherExistsHttpResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/WatcherExistsHttpResourceTests.java index f341a1fadc22..9ef6b5482c73 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/WatcherExistsHttpResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/WatcherExistsHttpResourceTests.java @@ -32,13 +32,12 @@ /** * Tests {@link WatcherExistsHttpResource}. */ -public class WatcherExistsHttpResourceTests extends AbstractPublishableHttpResourceTestCase { +public final class WatcherExistsHttpResourceTests extends AbstractPublishableHttpResourceTestCase { private final ClusterService clusterService = mock(ClusterService.class); private final MultiHttpResource mockWatches = mock(MultiHttpResource.class); private final WatcherExistsHttpResource resource = new WatcherExistsHttpResource(owner, clusterService, mockWatches); - @SuppressWarnings("this-escape") private final Map expectedParameters = getParameters(resource.getDefaultParameters(), GET_EXISTS, XPACK_DOES_NOT_EXIST); public void testDoCheckIgnoresClientWhenNotElectedMaster() { diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java index afc13567e59b..c9c5a7c7a486 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java @@ -26,7 +26,7 @@ import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; -public class FailShardsOnInvalidLicenseClusterListener implements LicenseStateListener, IndexEventListener { +public final class FailShardsOnInvalidLicenseClusterListener implements LicenseStateListener, IndexEventListener { private static final Logger logger = LogManager.getLogger(FailShardsOnInvalidLicenseClusterListener.class); @@ -38,7 +38,6 @@ public class FailShardsOnInvalidLicenseClusterListener implements LicenseStateLi private boolean allowed; - @SuppressWarnings("this-escape") public FailShardsOnInvalidLicenseClusterListener(XPackLicenseState xPackLicenseState, RerouteService rerouteService) { this.xPackLicenseState = xPackLicenseState; this.rerouteService = rerouteService; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60MetadataOnlyPointsReader.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60MetadataOnlyPointsReader.java index 8d7f81d28579..2e796a04200f 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60MetadataOnlyPointsReader.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60MetadataOnlyPointsReader.java @@ -35,13 +35,12 @@ import java.util.Map; /** Reads the metadata of point values previously written with Lucene60PointsWriter */ -public class Lucene60MetadataOnlyPointsReader extends PointsReader { +public final class Lucene60MetadataOnlyPointsReader extends PointsReader { final IndexInput dataIn; final SegmentReadState readState; final Map readers = new HashMap<>(); /** Sole constructor */ - @SuppressWarnings("this-escape") public Lucene60MetadataOnlyPointsReader(SegmentReadState readState) throws IOException { this.readState = readState; diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeMap.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeMap.java index 01ff4d67b102..f2e9e8b04d3c 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeMap.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeMap.java @@ -32,7 +32,7 @@ * Worth noting the {@link #combine(AttributeMap)}, {@link #intersect(AttributeMap)} and {@link #subtract(AttributeMap)} methods which * return copies, decoupled from the input maps. In other words the returned maps can be modified without affecting the input or vice-versa. */ -public class AttributeMap implements Map { +public final class AttributeMap implements Map { static class AttributeWrapper { @@ -155,7 +155,7 @@ public String toString() { private static final AttributeMap EMPTY = new AttributeMap<>(emptyMap()); @SuppressWarnings("unchecked") - public static final AttributeMap emptyAttributeMap() { + public static AttributeMap emptyAttributeMap() { return EMPTY; } @@ -169,7 +169,6 @@ public AttributeMap() { delegate = new LinkedHashMap<>(); } - @SuppressWarnings("this-escape") public AttributeMap(Attribute key, E value) { delegate = new LinkedHashMap<>(); add(key, value); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/ExpressionSet.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/ExpressionSet.java index 7461a3f70a33..7b0012b58bf9 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/ExpressionSet.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/ExpressionSet.java @@ -17,7 +17,7 @@ /** * @param expression type */ -public class ExpressionSet implements Set { +public final class ExpressionSet implements Set { @SuppressWarnings("rawtypes") public static final ExpressionSet EMPTY = new ExpressionSet<>(emptyList()); @@ -34,7 +34,6 @@ public ExpressionSet() { super(); } - @SuppressWarnings("this-escape") public ExpressionSet(Collection c) { addAll(c); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/fulltext/StringQueryPredicate.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/fulltext/StringQueryPredicate.java index efb6809ae428..cd6a68f135c4 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/fulltext/StringQueryPredicate.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/fulltext/StringQueryPredicate.java @@ -15,11 +15,10 @@ import static java.util.Collections.emptyList; -public class StringQueryPredicate extends FullTextPredicate { +public final class StringQueryPredicate extends FullTextPredicate { private final Map fields; - @SuppressWarnings("this-escape") public StringQueryPredicate(Source source, String query, String options) { super(source, query, options, emptyList()); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/RemoteClusterResolver.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/RemoteClusterResolver.java index 28719b279614..db062c289501 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/RemoteClusterResolver.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/RemoteClusterResolver.java @@ -16,10 +16,9 @@ import java.util.TreeSet; import java.util.concurrent.CopyOnWriteArraySet; -public class RemoteClusterResolver extends RemoteClusterAware { +public final class RemoteClusterResolver extends RemoteClusterAware { private final CopyOnWriteArraySet clusters; - @SuppressWarnings("this-escape") public RemoteClusterResolver(Settings settings, ClusterSettings clusterSettings) { super(settings); clusters = new CopyOnWriteArraySet<>(getEnabledRemoteClusters(settings)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java index fcae0fa6c09f..5621bdced15b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java @@ -31,9 +31,8 @@ * in an integration test class, because the reserved handlers are injected through * SPI. (see {@link LocalReservedUnstableSecurityStateHandlerProvider}) */ -public class UnstableLocalStateSecurity extends LocalStateSecurity { +public final class UnstableLocalStateSecurity extends LocalStateSecurity { - @SuppressWarnings("this-escape") public UnstableLocalStateSecurity(Settings settings, Path configPath) throws Exception { super(settings, configPath); // We reuse most of the initialization of LocalStateSecurity, we then just overwrite diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java index 5c53179b5aa9..19202bb10921 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java @@ -21,11 +21,10 @@ import java.util.HashMap; import java.util.Map; -public class DummyUsernamePasswordRealm extends UsernamePasswordRealm { +public final class DummyUsernamePasswordRealm extends UsernamePasswordRealm { private Map> users; - @SuppressWarnings("this-escape") public DummyUsernamePasswordRealm(RealmConfig config) { super(config); initRealmRef( diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java index f9d8f69d888c..554b617774db 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java @@ -33,14 +33,13 @@ * * Currently, this consists of keeping track of whether we've seen nodes which are marked for shutdown. */ -public class NodeSeenService implements ClusterStateListener { +public final class NodeSeenService implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(NodeSeenService.class); final ClusterService clusterService; private final MasterServiceTaskQueue setSeenTaskQueue; - @SuppressWarnings("this-escape") public NodeSeenService(ClusterService clusterService) { this.clusterService = clusterService; this.setSeenTaskQueue = clusterService.createTaskQueue( diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java index bb1e08b9561a..a4f73e0e3bda 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java @@ -45,7 +45,7 @@ * * SLM must be running to fix warning reported by this indicator. */ -public class SlmHealthIndicatorService implements HealthIndicatorService { +public final class SlmHealthIndicatorService implements HealthIndicatorService { public static final String NAME = "slm"; @@ -83,7 +83,6 @@ static Diagnosis.Definition checkRecentlyFailedSnapshots(String causeText, Strin private final ClusterService clusterService; private volatile long failedSnapshotWarnThreshold; - @SuppressWarnings("this-escape") public SlmHealthIndicatorService(ClusterService clusterService) { this.clusterService = clusterService; this.failedSnapshotWarnThreshold = clusterService.getClusterSettings().get(SLM_HEALTH_FAILED_SNAPSHOT_WARN_THRESHOLD_SETTING); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 89b0c1c6ef92..892e251285f1 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -108,7 +108,7 @@ private static Builder builder(FieldMapper in) { return ((GeoShapeWithDocValuesFieldMapper) in).builder; } - public static class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.Builder { final Parameter indexed = Parameter.indexParam(m -> builder(m).indexed.get(), true); final Parameter stored = Parameter.storeParam(m -> builder(m).stored.get(), false); @@ -125,7 +125,6 @@ public static class Builder extends FieldMapper.Builder { private final IndexVersion version; private final GeoFormatterFactory geoFormatterFactory; - @SuppressWarnings("this-escape") public Builder( String name, IndexVersion version, @@ -145,7 +144,7 @@ public Builder( } // for testing - protected Builder setStored(boolean stored) { + Builder setStored(boolean stored) { this.stored.setValue(stored); return this; } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java index 8bf939aeae49..534c08f39c7e 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Map; -public class GeoHexGridAggregationBuilder extends GeoGridAggregationBuilder { +public final class GeoHexGridAggregationBuilder extends GeoGridAggregationBuilder { public static final String NAME = "geohex_grid"; private static final int DEFAULT_PRECISION = 5; private static final int DEFAULT_MAX_NUM_CELLS = 10000; @@ -51,7 +51,6 @@ static int parsePrecision(XContentParser parser) throws IOException, Elasticsear return XContentMapValues.nodeIntegerValue(node); } - @SuppressWarnings("this-escape") public GeoHexGridAggregationBuilder(String name) { super(name); precision(DEFAULT_PRECISION); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java index 52e14ca1fd2b..2789ecdbf120 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java @@ -13,7 +13,7 @@ import java.util.Objects; import java.util.Set; -public class RequestInfo { +public final class RequestInfo { private static final String CANVAS = "canvas"; public static final String ODBC_32 = "odbc32"; private static final String ODBC_64 = "odbc64"; @@ -46,14 +46,12 @@ public RequestInfo(Mode mode, String clientId) { this(mode, clientId, null); } - @SuppressWarnings("this-escape") public RequestInfo(Mode mode, String clientId, String version) { mode(mode); clientId(clientId); version(version); } - @SuppressWarnings("this-escape") public RequestInfo(Mode mode, SqlVersion version) { mode(mode); this.version = version; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index c0bf3efa74f1..2debdccc7c99 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -78,7 +78,7 @@ import static org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.resolveFunction; import static org.elasticsearch.xpack.ql.util.CollectionUtils.combine; -public class Analyzer extends ParameterizedRuleExecutor { +public final class Analyzer extends ParameterizedRuleExecutor { private static final Iterable> rules; @@ -114,7 +114,6 @@ public class Analyzer extends ParameterizedRuleExecutor values; @@ -45,7 +45,6 @@ public Pivot(Source source, LogicalPlan child, Expression column, List +public final class TransportSqlQueryAction extends HandledTransportAction implements AsyncTaskManagementService.AsyncOperation { @@ -74,7 +74,6 @@ public class TransportSqlQueryAction extends HandledTransportAction asyncTaskManagementService; - @SuppressWarnings("this-escape") @Inject public TransportSqlQueryAction( Settings settings, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java index 929150e916d2..db1a9ed9f8c1 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParser.java @@ -50,7 +50,7 @@ import static org.elasticsearch.core.Strings.format; -public class ReportingAttachmentParser implements EmailAttachmentParser { +public final class ReportingAttachmentParser implements EmailAttachmentParser { public static final String TYPE = "reporting"; @@ -137,7 +137,6 @@ public static List> getSettings() { private boolean warningEnabled = REPORT_WARNING_ENABLED_SETTING.getDefault(Settings.EMPTY); private final Map customWarnings = new ConcurrentHashMap<>(1); - @SuppressWarnings("this-escape") public ReportingAttachmentParser( Settings settings, WebhookService webhookService, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java index 5d20db35dc7a..7586543d0869 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java @@ -19,7 +19,7 @@ import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalArgument; import static org.elasticsearch.xpack.watcher.support.Strings.join; -public class DayTimes implements Times { +public final class DayTimes implements Times { public static final DayTimes NOON = new DayTimes("noon", new int[] { 12 }, new int[] { 0 }); public static final DayTimes MIDNIGHT = new DayTimes("midnight", new int[] { 0 }, new int[] { 0 }); @@ -36,12 +36,10 @@ public DayTimes(int hour, int minute) { this(new int[] { hour }, new int[] { minute }); } - @SuppressWarnings("this-escape") public DayTimes(int[] hour, int[] minute) { this(null, hour, minute); } - @SuppressWarnings("this-escape") DayTimes(String time, int[] hour, int[] minute) { this.time = time; this.hour = hour; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java index b01a786316e5..37fb70a3e89f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java @@ -22,7 +22,7 @@ import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalArgument; import static org.elasticsearch.xpack.watcher.support.Strings.join; -public class MonthTimes implements Times { +public final class MonthTimes implements Times { public static final String LAST = "last_day"; public static final String FIRST = "first_day"; @@ -37,7 +37,6 @@ public MonthTimes() { this(DEFAULT_DAYS, DEFAULT_TIMES); } - @SuppressWarnings("this-escape") public MonthTimes(int[] days, DayTimes[] times) { this.days = days.length == 0 ? DEFAULT_DAYS : days; Arrays.sort(this.days); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java index a2091295820f..55b1d494f8ac 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java @@ -24,7 +24,7 @@ import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalArgument; import static org.elasticsearch.xpack.watcher.support.Strings.join; -public class YearTimes implements Times { +public final class YearTimes implements Times { public static final EnumSet DEFAULT_MONTHS = EnumSet.of(Month.JANUARY); public static final int[] DEFAULT_DAYS = new int[] { 1 }; @@ -38,7 +38,6 @@ public YearTimes() { this(DEFAULT_MONTHS, DEFAULT_DAYS, DEFAULT_TIMES); } - @SuppressWarnings("this-escape") public YearTimes(EnumSet months, int[] days, DayTimes[] times) { this.months = months.isEmpty() ? DEFAULT_MONTHS : months; this.days = days.length == 0 ? DEFAULT_DAYS : days; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatchExecutionContextMockBuilder.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatchExecutionContextMockBuilder.java index 359aa2b28f66..b6c1cebfa656 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatchExecutionContextMockBuilder.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatchExecutionContextMockBuilder.java @@ -22,12 +22,11 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class WatchExecutionContextMockBuilder { +public final class WatchExecutionContextMockBuilder { private final WatchExecutionContext ctx; private final Watch watch; - @SuppressWarnings("this-escape") public WatchExecutionContextMockBuilder(String watchId) { ctx = mock(WatchExecutionContext.class); watch = mock(Watch.class); diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index ec206c64a237..480704b89ca6 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -122,8 +122,7 @@ public TokenStreamComponents createComponents(String fieldName) { } }); - public static class PunctuationFoldingFilter extends TokenFilter { - @SuppressWarnings("this-escape") + public static final class PunctuationFoldingFilter extends TokenFilter { private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); /** @@ -137,7 +136,7 @@ public PunctuationFoldingFilter(TokenStream in) { } @Override - public final boolean incrementToken() throws IOException { + public boolean incrementToken() throws IOException { if (input.incrementToken()) { normalize(termAtt.buffer(), 0, termAtt.length()); return true; @@ -587,7 +586,7 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { throw new IllegalStateException("Invalid query type found parsing regex query:" + approxQuery); } - protected void getNgramTokens(Set tokens, String fragment) { + private void getNgramTokens(Set tokens, String fragment) { if (fragment.equals(TOKEN_START_STRING) || fragment.equals(TOKEN_END_STRING)) { // If a regex is a form of match-all e.g. ".*" we only produce the token start/end markers as search // terms which can be ignored. diff --git a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java index d013ce143a67..839d21bbd3e8 100644 --- a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java +++ b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java @@ -28,7 +28,7 @@ * (2) It performs role mapping to determine the roles for the looked-up user * (3) It caches the looked-up User objects */ -public class CustomRoleMappingRealm extends Realm implements CachingRealm { +public final class CustomRoleMappingRealm extends Realm implements CachingRealm { public static final String TYPE = "custom_role_mapping"; @@ -38,7 +38,6 @@ public class CustomRoleMappingRealm extends Realm implements CachingRealm { private final Cache cache; private final UserRoleMapper roleMapper; - @SuppressWarnings("this-escape") public CustomRoleMappingRealm(RealmConfig config, UserRoleMapper roleMapper) { super(config); this.cache = CacheBuilder.builder().build(); From 3c2d8f265dcf05717d06515828b147ddfcce1ae3 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Thu, 2 Nov 2023 09:34:05 -0400 Subject: [PATCH 33/47] Remove HLRC from repository-old-versions test (#101696) --- .../client/core/AcknowledgedResponse.java | 73 ----------- .../core/ShardsAcknowledgedResponse.java | 50 -------- .../oldrepos/OldRepositoryAccessIT.java | 118 +++++++----------- 3 files changed, 46 insertions(+), 195 deletions(-) delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/core/AcknowledgedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/core/ShardsAcknowledgedResponse.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/AcknowledgedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/AcknowledgedResponse.java deleted file mode 100644 index 7adcee74cb20..000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/AcknowledgedResponse.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.core; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; -import java.util.function.Function; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class AcknowledgedResponse { - - protected static final String PARSE_FIELD_NAME = "acknowledged"; - private static final ConstructingObjectParser PARSER = AcknowledgedResponse.generateParser( - "acknowledged_response", - AcknowledgedResponse::new, - AcknowledgedResponse.PARSE_FIELD_NAME - ); - - private final boolean acknowledged; - - public AcknowledgedResponse(final boolean acknowledged) { - this.acknowledged = acknowledged; - } - - public boolean isAcknowledged() { - return acknowledged; - } - - protected static ConstructingObjectParser generateParser(String name, Function ctor, String parseField) { - ConstructingObjectParser p = new ConstructingObjectParser<>(name, true, args -> ctor.apply((boolean) args[0])); - p.declareBoolean(constructorArg(), new ParseField(parseField)); - return p; - } - - public static AcknowledgedResponse fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final AcknowledgedResponse that = (AcknowledgedResponse) o; - return isAcknowledged() == that.isAcknowledged(); - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged); - } - - /** - * @return the field name this response uses to output the acknowledged flag - */ - protected String getFieldName() { - return PARSE_FIELD_NAME; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/ShardsAcknowledgedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/ShardsAcknowledgedResponse.java deleted file mode 100644 index a80a6bb2a15b..000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/ShardsAcknowledgedResponse.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.core; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class ShardsAcknowledgedResponse extends AcknowledgedResponse { - - protected static final String SHARDS_PARSE_FIELD_NAME = "shards_acknowledged"; - - private static ConstructingObjectParser buildParser() { - - ConstructingObjectParser p = new ConstructingObjectParser<>( - "freeze", - true, - args -> new ShardsAcknowledgedResponse((boolean) args[0], (boolean) args[1]) - ); - p.declareBoolean(constructorArg(), new ParseField(AcknowledgedResponse.PARSE_FIELD_NAME)); - p.declareBoolean(constructorArg(), new ParseField(SHARDS_PARSE_FIELD_NAME)); - return p; - } - - private static final ConstructingObjectParser PARSER = buildParser(); - - private final boolean shardsAcknowledged; - - public ShardsAcknowledgedResponse(boolean acknowledged, boolean shardsAcknowledged) { - super(acknowledged); - this.shardsAcknowledged = shardsAcknowledged; - } - - public boolean isShardsAcknowledged() { - return shardsAcknowledged; - } - - public static ShardsAcknowledgedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } -} diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index b48d31358eae..2a9d761b7c3c 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -11,15 +11,12 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.core.ShardsAcknowledgedResponse; import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.document.DocumentField; @@ -27,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.PathUtils; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.query.QueryBuilders; @@ -43,7 +41,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; @@ -75,12 +72,6 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } - @SuppressWarnings("removal") - protected static RestHighLevelClient highLevelClient(RestClient client) { - return new RestHighLevelClient(client, ignore -> {}, Collections.emptyList()) { - }; - } - public void testOldRepoAccess() throws IOException { runTest(false); } @@ -113,10 +104,7 @@ public void runTest(boolean sourceOnlyRepository) throws IOException { int numDocs = 10; int extraDocs = 1; final Set expectedIds = new HashSet<>(); - try ( - RestHighLevelClient client = highLevelClient(adminClient()); - RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build() - ) { + try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) { if (afterRestart == false) { beforeRestart( sourceOnlyRepository, @@ -126,7 +114,6 @@ public void runTest(boolean sourceOnlyRepository) throws IOException { numDocs, extraDocs, expectedIds, - client, oldEs, indexName ); @@ -151,7 +138,6 @@ private void beforeRestart( int numDocs, int extraDocs, Set expectedIds, - RestHighLevelClient client, RestClient oldEs, String indexName ) throws IOException { @@ -262,35 +248,15 @@ private void beforeRestart( assertThat(getResp.evaluate("snapshots.0.stats.total.file_count"), greaterThan(0)); // restore / mount and check whether searches work - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); + restoreMountAndVerify(numDocs, expectedIds, numberOfShards, sourceOnlyRepository, oldVersion, indexName, repoName, snapshotName); // close indices - assertTrue(closeIndex(client(), "restored_" + indexName).isShardsAcknowledged()); - assertTrue(closeIndex(client(), "mounted_full_copy_" + indexName).isShardsAcknowledged()); - assertTrue(closeIndex(client(), "mounted_shared_cache_" + indexName).isShardsAcknowledged()); + closeIndex(client(), "restored_" + indexName); + closeIndex(client(), "mounted_full_copy_" + indexName); + closeIndex(client(), "mounted_shared_cache_" + indexName); // restore / mount again - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); + restoreMountAndVerify(numDocs, expectedIds, numberOfShards, sourceOnlyRepository, oldVersion, indexName, repoName, snapshotName); } private String getType(Version oldVersion, String id) { @@ -305,7 +271,6 @@ private static String sourceForDoc(int i) { private void restoreMountAndVerify( int numDocs, Set expectedIds, - RestHighLevelClient client, int numberOfShards, boolean sourceOnlyRepository, Version oldVersion, @@ -358,7 +323,7 @@ private void restoreMountAndVerify( } // run a search against the index - assertDocs("restored_" + indexName, numDocs, expectedIds, client, sourceOnlyRepository, oldVersion, numberOfShards); + assertDocs("restored_" + indexName, numDocs, expectedIds, sourceOnlyRepository, oldVersion, numberOfShards); // mount as full copy searchable snapshot Request mountRequest = new Request("POST", "/_snapshot/" + repoName + "/" + snapshotName + "/_mount"); @@ -378,7 +343,7 @@ private void restoreMountAndVerify( ensureGreen("mounted_full_copy_" + indexName); // run a search against the index - assertDocs("mounted_full_copy_" + indexName, numDocs, expectedIds, client, sourceOnlyRepository, oldVersion, numberOfShards); + assertDocs("mounted_full_copy_" + indexName, numDocs, expectedIds, sourceOnlyRepository, oldVersion, numberOfShards); // mount as shared cache searchable snapshot mountRequest = new Request("POST", "/_snapshot/" + repoName + "/" + snapshotName + "/_mount"); @@ -391,7 +356,7 @@ private void restoreMountAndVerify( assertEquals(numberOfShards, (int) mountResponse.evaluate("snapshot.shards.successful")); // run a search against the index - assertDocs("mounted_shared_cache_" + indexName, numDocs, expectedIds, client, sourceOnlyRepository, oldVersion, numberOfShards); + assertDocs("mounted_shared_cache_" + indexName, numDocs, expectedIds, sourceOnlyRepository, oldVersion, numberOfShards); } @SuppressWarnings("removal") @@ -399,7 +364,6 @@ private void assertDocs( String index, int numDocs, Set expectedIds, - RestHighLevelClient client, boolean sourceOnlyRepository, Version oldVersion, int numberOfShards @@ -410,8 +374,10 @@ private void assertDocs( .build(); RequestOptions randomRequestOptions = randomBoolean() ? RequestOptions.DEFAULT : v7RequestOptions; + SearchResponse searchResponse; + // run a search against the index - SearchResponse searchResponse = client.search(new SearchRequest(index), randomRequestOptions); + searchResponse = search(index, null, randomRequestOptions); logger.info(searchResponse); // check hit count assertEquals(numDocs, searchResponse.getHits().getTotalHits().value); @@ -429,12 +395,11 @@ private void assertDocs( String id = randomFrom(expectedIds); int num = getIdAsNumeric(id); // run a search using runtime fields against the index - searchResponse = client.search( - new SearchRequest(index).source( - SearchSourceBuilder.searchSource() - .query(QueryBuilders.matchQuery("val", num)) - .runtimeMappings(Map.of("val", Map.of("type", "long"))) - ), + searchResponse = search( + index, + SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchQuery("val", num)) + .runtimeMappings(Map.of("val", Map.of("type", "long"))), randomRequestOptions ); logger.info(searchResponse); @@ -444,24 +409,24 @@ private void assertDocs( if (sourceOnlyRepository == false) { // search using reverse sort on val - searchResponse = client.search( - new SearchRequest(index).source( - SearchSourceBuilder.searchSource() - .query(QueryBuilders.matchAllQuery()) - .sort(SortBuilders.fieldSort("val").order(SortOrder.DESC)) - ), + searchResponse = search( + index, + SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()) + .sort(SortBuilders.fieldSort("val").order(SortOrder.DESC)), randomRequestOptions ); logger.info(searchResponse); // check sort order assertEquals( - expectedIds.stream().sorted(Comparator.comparingInt(this::getIdAsNumeric).reversed()).collect(Collectors.toList()), - Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getId).collect(Collectors.toList()) + expectedIds.stream().sorted(Comparator.comparingInt(this::getIdAsNumeric).reversed()).toList(), + Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getId).toList() ); // look up postings - searchResponse = client.search( - new SearchRequest(index).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("test", "test" + num))), + searchResponse = search( + index, + SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("test", "test" + num)), randomRequestOptions ); logger.info(searchResponse); @@ -472,8 +437,9 @@ private void assertDocs( // search on _type and check that results contain _type information String randomType = getType(oldVersion, randomFrom(expectedIds)); long typeCount = expectedIds.stream().filter(idd -> getType(oldVersion, idd).equals(randomType)).count(); - searchResponse = client.search( - new SearchRequest(index).source(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("_type", randomType))), + searchResponse = search( + index, + SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("_type", randomType)), randomRequestOptions ); logger.info(searchResponse); @@ -493,10 +459,9 @@ private void assertDocs( ); // check that shards are skipped based on non-matching date - searchResponse = client.search( - new SearchRequest(index).source( - SearchSourceBuilder.searchSource().query(QueryBuilders.rangeQuery("create_date").from("2020-02-01")) - ), + searchResponse = search( + index, + SearchSourceBuilder.searchSource().query(QueryBuilders.rangeQuery("create_date").from("2020-02-01")), randomRequestOptions ); logger.info(searchResponse); @@ -507,13 +472,22 @@ private void assertDocs( } } + private static SearchResponse search(String index, @Nullable SearchSourceBuilder builder, RequestOptions options) throws IOException { + Request request = new Request("POST", "/" + index + "/_search"); + if (builder != null) { + request.setJsonEntity(builder.toString()); + } + request.setOptions(options); + return SearchResponse.fromXContent(responseAsParser(client().performRequest(request))); + } + private int getIdAsNumeric(String id) { return Integer.parseInt(id.substring("testdoc".length())); } - static ShardsAcknowledgedResponse closeIndex(RestClient client, String index) throws IOException { + private static void closeIndex(RestClient client, String index) throws IOException { Request request = new Request("POST", "/" + index + "/_close"); - Response response = client.performRequest(request); - return ShardsAcknowledgedResponse.fromXContent(responseAsParser(response)); + ObjectPath doc = ObjectPath.createFromResponse(client.performRequest(request)); + assertTrue(doc.evaluate("shards_acknowledged")); } } From a61de5865ad1405dc9fcc66d54547d503d485065 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Thu, 2 Nov 2023 09:34:21 -0400 Subject: [PATCH 34/47] Drop the HLRC from this test (#101698) --- .../upgrades/SearchStatesIT.java | 67 +++++++------------ 1 file changed, 26 insertions(+), 41 deletions(-) diff --git a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java index 6d89571e5af9..1bb2116cc680 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java +++ b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java @@ -12,14 +12,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.test.rest.ESRestTestCase; @@ -44,7 +42,6 @@ * This test ensure that we keep the search states of a CCS request correctly when the local and remote clusters * have different but compatible versions. See SearchService#createAndPutReaderContext */ -@SuppressWarnings("removal") public class SearchStatesIT extends ESRestTestCase { private static final Logger LOGGER = LogManager.getLogger(SearchStatesIT.class); @@ -90,7 +87,7 @@ static List parseHosts(String props) { public static void configureRemoteClusters(List remoteNodes) throws Exception { assertThat(remoteNodes, hasSize(3)); final String remoteClusterSettingPrefix = "cluster.remote." + CLUSTER_ALIAS + "."; - try (RestClient localClient = newLocalClient().getLowLevelClient()) { + try (RestClient localClient = newLocalClient()) { final Settings remoteConnectionSettings; if (randomBoolean()) { final List seeds = remoteNodes.stream() @@ -124,28 +121,32 @@ public static void configureRemoteClusters(List remoteNodes) throws Except } } - static RestHighLevelClient newLocalClient() { + static RestClient newLocalClient() { final List hosts = parseHosts("tests.rest.cluster"); final int index = random().nextInt(hosts.size()); LOGGER.info("Using client node {}", index); - return new RestHighLevelClient(RestClient.builder(hosts.get(index))); + return RestClient.builder(hosts.get(index)).build(); } - static RestHighLevelClient newRemoteClient() { - return new RestHighLevelClient(RestClient.builder(randomFrom(parseHosts("tests.rest.remote_cluster")))); + static RestClient newRemoteClient() { + return RestClient.builder(randomFrom(parseHosts("tests.rest.remote_cluster"))).build(); } - static int indexDocs(RestHighLevelClient client, String index, int numDocs) throws IOException { + static int indexDocs(RestClient client, String index, int numDocs) throws IOException { for (int i = 0; i < numDocs; i++) { - client.index(new IndexRequest(index).id("id_" + i).source("f", i), RequestOptions.DEFAULT); + Request createDoc = new Request("POST", "/" + index + "/_doc/id_" + i); + createDoc.setJsonEntity(Strings.format(""" + { "f": %s } + """, i)); + assertOK(client.performRequest(createDoc)); } - refresh(client.getLowLevelClient(), index); + refresh(client, index); return numDocs; } void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int remoteNumDocs, Integer preFilterShardSize) { - try (RestClient localClient = newLocalClient().getLowLevelClient()) { + try (RestClient localClient = newLocalClient()) { Request request = new Request("POST", "/_search"); final int expectedDocs; if (randomBoolean()) { @@ -185,56 +186,40 @@ void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int r public void testBWCSearchStates() throws Exception { String localIndex = "test_bwc_search_states_index"; String remoteIndex = "test_bwc_search_states_remote_index"; - try (RestHighLevelClient localClient = newLocalClient(); RestHighLevelClient remoteClient = newRemoteClient()) { - createIndex( - localClient.getLowLevelClient(), - localIndex, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build() - ); + try (RestClient localClient = newLocalClient(); RestClient remoteClient = newRemoteClient()) { + createIndex(localClient, localIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build()); int localNumDocs = indexDocs(localClient, localIndex, between(10, 100)); - createIndex( - remoteClient.getLowLevelClient(), - remoteIndex, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build() - ); + createIndex(remoteClient, remoteIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build()); int remoteNumDocs = indexDocs(remoteClient, remoteIndex, between(10, 100)); - configureRemoteClusters(getNodes(remoteClient.getLowLevelClient())); + configureRemoteClusters(getNodes(remoteClient)); int iterations = between(1, 20); for (int i = 0; i < iterations; i++) { verifySearch(localIndex, localNumDocs, CLUSTER_ALIAS + ":" + remoteIndex, remoteNumDocs, null); } - deleteIndex(localClient.getLowLevelClient(), localIndex); - deleteIndex(remoteClient.getLowLevelClient(), remoteIndex); + deleteIndex(localClient, localIndex); + deleteIndex(remoteClient, remoteIndex); } } public void testCanMatch() throws Exception { String localIndex = "test_can_match_local_index"; String remoteIndex = "test_can_match_remote_index"; - try (RestHighLevelClient localClient = newLocalClient(); RestHighLevelClient remoteClient = newRemoteClient()) { - createIndex( - localClient.getLowLevelClient(), - localIndex, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(5, 20)).build() - ); + try (RestClient localClient = newLocalClient(); RestClient remoteClient = newRemoteClient()) { + createIndex(localClient, localIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(5, 20)).build()); int localNumDocs = indexDocs(localClient, localIndex, between(10, 100)); - createIndex( - remoteClient.getLowLevelClient(), - remoteIndex, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(5, 20)).build() - ); + createIndex(remoteClient, remoteIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(5, 20)).build()); int remoteNumDocs = indexDocs(remoteClient, remoteIndex, between(10, 100)); - configureRemoteClusters(getNodes(remoteClient.getLowLevelClient())); + configureRemoteClusters(getNodes(remoteClient)); int iterations = between(1, 10); for (int i = 0; i < iterations; i++) { verifySearch(localIndex, localNumDocs, CLUSTER_ALIAS + ":" + remoteIndex, remoteNumDocs, between(1, 10)); } - deleteIndex(localClient.getLowLevelClient(), localIndex); - deleteIndex(remoteClient.getLowLevelClient(), remoteIndex); + deleteIndex(localClient, localIndex); + deleteIndex(remoteClient, remoteIndex); } } } From e5eb0bbf04c724121b0903d8f5d303412162f93d Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 2 Nov 2023 09:47:22 -0400 Subject: [PATCH 35/47] [main] EQL doc wrong description in query example (#101579) (#101709) Co-authored-by: Ming <42666128+zethsqx@users.noreply.github.com> --- docs/reference/eql/syntax.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/eql/syntax.asciidoc b/docs/reference/eql/syntax.asciidoc index f592610f487c..33a6fb745ac5 100644 --- a/docs/reference/eql/syntax.asciidoc +++ b/docs/reference/eql/syntax.asciidoc @@ -243,7 +243,7 @@ my_field like ("Value-*", "VALUE2", "VAL?") // case-sensitive my_field like~ ("value-*", "value2", "val?") // case-insensitive my_field regex ("[vV]alue-[0-9]", "VALUE[^2].?", "VAL3") // case-sensitive -my_field regex~ ("value-[0-9]", "value[^2].?", "val3") // case-sensitive +my_field regex~ ("value-[0-9]", "value[^2].?", "val3") // case-insensitive ---- `in` (case-sensitive):: From 86c27188ad6b4d1ec828a1247db3f7b356ae2563 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Fern=C3=A1ndez=20Casta=C3=B1o?= Date: Thu, 2 Nov 2023 14:57:27 +0100 Subject: [PATCH 36/47] Allow filtering out waiting for tasks to complete in AbstractXPackRestTest cleanup method (#101711) --- .../xpack/test/rest/AbstractXPackRestTest.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/AbstractXPackRestTest.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/AbstractXPackRestTest.java index 1009a8460a32..e8c3250bf2e4 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/AbstractXPackRestTest.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/AbstractXPackRestTest.java @@ -35,6 +35,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; import java.util.function.Supplier; import static java.util.Collections.emptyList; @@ -118,13 +119,17 @@ public void cleanup() throws Exception { if (isWaitForPendingTasks()) { // This waits for pending tasks to complete, so must go last (otherwise // it could be waiting for pending tasks while monitoring is still running). - waitForPendingTasks(adminClient(), task -> { - // Don't check rollup jobs because we clear them in the superclass. - return task.contains(RollupJob.NAME); - }); + waitForPendingTasks(adminClient(), waitForPendingTasksFilter()); } } + protected Predicate waitForPendingTasksFilter() { + return task -> { + // Don't check rollup jobs because we clear them in the superclass. + return task.contains(RollupJob.NAME); + }; + } + /** * Delete any left over machine learning datafeeds and jobs. */ From 567aba07105f20f5eff7d2b737fc22ec9d062e26 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Thu, 2 Nov 2023 10:06:11 -0400 Subject: [PATCH 37/47] [ML] Hugging Face inference service (#100672) * Starting hugging face service * Adding more hf pieces * Adding http interface * Implementing close * Building out the hf external structure * Working call to hugging face * Removing keep alive and refactoring parsing * Adding more tests * Adding tests * Removing a few testing left overs * Removing dependency on fasterxml * Fixing spotless * Fixing exception text and adding more tests * Addressing pr feedback * Refactoring comments * Switching to latest elserv2 transport version * Refactoring throttler to count messages * Do not allow logging after close * Adding test --- .../org/elasticsearch/TransportVersions.java | 1 + .../inference/EmptyTaskSettings.java | 50 ++++ .../inference/InferenceService.java | 7 + .../inference/ModelConfigurations.java | 17 +- .../TestInferenceServicePlugin.java | 10 + .../InferenceNamedWriteablesProvider.java | 19 ++ .../xpack/inference/InferencePlugin.java | 18 +- .../TransportPutInferenceModelAction.java | 30 +++ .../external/action/ExecutableAction.java | 18 ++ .../huggingface/HuggingFaceElserAction.java | 47 ++++ .../inference/external/http/HttpClient.java | 27 ++- .../external/http/HttpClientManager.java | 21 +- .../sender/HttpRequestExecutorService.java | 66 ++++- .../http/sender/HttpRequestSenderFactory.java | 3 +- .../external/http/sender/Sender.java | 24 ++ .../huggingface/HuggingFaceAccount.java | 21 ++ .../huggingface/HuggingFaceClient.java | 52 ++++ .../inference/external/request/Request.java | 14 ++ .../huggingface/HuggingFaceElserRequest.java | 48 ++++ .../HuggingFaceElserRequestEntity.java | 33 +++ .../HuggingFaceElserResponseEntity.java | 100 ++++++++ .../xpack/inference/logging/Throttler.java | 164 +++++++++++++ .../inference/logging/ThrottlerManager.java | 112 +++++++++ .../inference/services/MapParsingUtils.java | 10 +- .../services/elser/ElserMlNodeService.java | 7 + .../elser/HuggingFaceElserModel.java | 35 +++ .../elser/HuggingFaceElserSecretSettings.java | 80 ++++++ .../elser/HuggingFaceElserService.java | 150 ++++++++++++ .../HuggingFaceElserServiceSettings.java | 110 +++++++++ .../inference/EmptyTaskSettingsTests.java | 34 +++ .../HuggingFaceElserActionTests.java | 152 ++++++++++++ .../external/http/HttpClientManagerTests.java | 15 +- .../external/http/HttpClientTests.java | 31 +-- .../http/IdleConnectionEvictorTests.java | 48 ++-- .../xpack/inference/external/http/Utils.java | 58 ++++- .../HttpRequestExecutorServiceTests.java | 2 +- .../sender/HttpRequestSenderFactoryTests.java | 48 ++-- .../http/sender/RequestTaskTests.java | 5 +- .../huggingface/HuggingFaceClientTests.java | 167 +++++++++++++ .../HuggingFaceElserRequestEntityTests.java | 34 +++ .../HuggingFaceElserRequestTests.java | 48 ++++ .../HuggingFaceElserResponseEntityTests.java | 220 +++++++++++++++++ .../logging/ThrottlerManagerTests.java | 78 ++++++ .../inference/logging/ThrottlerTests.java | 229 ++++++++++++++++++ .../HuggingFaceElserSecretSettingsTests.java | 78 ++++++ .../HuggingFaceElserServiceSettingsTests.java | 73 ++++++ 46 files changed, 2509 insertions(+), 105 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index facc95422081..083d7de37194 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -159,6 +159,7 @@ static TransportVersion def(int id) { public static final TransportVersion INVALID_BUCKET_PATH_EXCEPTION_INTRODUCED = def(8_528_00_0); public static final TransportVersion KNN_AS_QUERY_ADDED = def(8_529_00_0); public static final TransportVersion UNDESIRED_SHARD_ALLOCATIONS_COUNT_ADDED = def(8_530_00_0); + public static final TransportVersion ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED = def(8_531_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java new file mode 100644 index 000000000000..24bfef4ec313 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/EmptyTaskSettings.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * This class defines an empty task settings object. This is useful for services that do not have any task settings. + */ +public record EmptyTaskSettings() implements TaskSettings { + public static final String NAME = "empty_task_settings"; + + public static EmptyTaskSettings INSTANCE = new EmptyTaskSettings(); + + public EmptyTaskSettings(StreamInput in) { + this(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException {} +} diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 82ce13e591b6..2d7ee9f210e6 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -8,6 +8,7 @@ package org.elasticsearch.inference; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import java.io.Closeable; @@ -76,4 +77,10 @@ public interface InferenceService extends Closeable { default boolean isInClusterService() { return false; } + + /** + * Defines the version required across all clusters to use this service + * @return {@link TransportVersion} specifying the version + */ + TransportVersion getMinimalSupportedVersion(); } diff --git a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java index a8ae380bd3ba..cdccca7eb0c0 100644 --- a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java +++ b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java @@ -33,6 +33,13 @@ public class ModelConfigurations implements ToXContentObject, VersionedNamedWrit private final ServiceSettings serviceSettings; private final TaskSettings taskSettings; + /** + * Allows no task settings to be defined. This will default to the {@link EmptyTaskSettings} object. + */ + public ModelConfigurations(String modelId, TaskType taskType, String service, ServiceSettings serviceSettings) { + this(modelId, taskType, service, serviceSettings, EmptyTaskSettings.INSTANCE); + } + public ModelConfigurations( String modelId, TaskType taskType, @@ -40,11 +47,11 @@ public ModelConfigurations( ServiceSettings serviceSettings, TaskSettings taskSettings ) { - this.modelId = modelId; - this.taskType = taskType; - this.service = service; - this.serviceSettings = serviceSettings; - this.taskSettings = taskSettings; + this.modelId = Objects.requireNonNull(modelId); + this.taskType = Objects.requireNonNull(taskType); + this.service = Objects.requireNonNull(service); + this.serviceSettings = Objects.requireNonNull(serviceSettings); + this.taskSettings = Objects.requireNonNull(taskSettings); } public ModelConfigurations(StreamInput in) throws IOException { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java index 102436b37524..f9e6eef5ffcc 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java @@ -65,6 +65,11 @@ public TestInferenceService(InferenceServiceFactoryContext context) { public String name() { return NAME; } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } } public static class TestInferenceServiceClusterService extends TestInferenceServiceBase { @@ -83,6 +88,11 @@ public boolean isInClusterService() { public String name() { return NAME; } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } } public abstract static class TestInferenceServiceBase implements InferenceService { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 3ef93c6c275d..42ad64b9c60a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -8,10 +8,14 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.inference.EmptyTaskSettings; +import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; import java.util.ArrayList; import java.util.List; @@ -23,6 +27,9 @@ private InferenceNamedWriteablesProvider() {} public static List getNamedWriteables() { List namedWriteables = new ArrayList<>(); + // Empty default settings + namedWriteables.add(new NamedWriteableRegistry.Entry(EmptyTaskSettings.class, EmptyTaskSettings.NAME, EmptyTaskSettings::new)); + // ELSER config namedWriteables.add( new NamedWriteableRegistry.Entry(ServiceSettings.class, ElserMlNodeServiceSettings.NAME, ElserMlNodeServiceSettings::new) @@ -31,6 +38,18 @@ public static List getNamedWriteables() { new NamedWriteableRegistry.Entry(TaskSettings.class, ElserMlNodeTaskSettings.NAME, ElserMlNodeTaskSettings::new) ); + // Hugging Face ELSER config + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + HuggingFaceElserServiceSettings.NAME, + HuggingFaceElserServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(SecretSettings.class, HuggingFaceElserSecretSettings.NAME, HuggingFaceElserSecretSettings::new) + ); + return namedWriteables; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 2f0f95cf8a91..393cbd0413e5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -41,12 +41,14 @@ import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestGetInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestInferenceAction; import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; import java.util.Collection; import java.util.List; @@ -62,6 +64,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, InferenceSe private final SetOnce httpRequestSenderFactory = new SetOnce<>(); // We'll keep a reference to the http manager just in case the inference services don't get closed individually private final SetOnce httpManager = new SetOnce<>(); + private final SetOnce throttlerManager = new SetOnce<>(); public InferencePlugin(Settings settings) { this.settings = settings; @@ -97,7 +100,9 @@ public List getRestHandlers( @Override public Collection createComponents(PluginServices services) { - httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService())); + throttlerManager.set(new ThrottlerManager(settings, services.threadPool(), services.clusterService())); + + httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService(), throttlerManager.get())); httpRequestSenderFactory.set( new HttpRequestSenderFactory(services.threadPool(), httpManager.get(), services.clusterService(), settings) ); @@ -138,7 +143,7 @@ public List> getExecutorBuilders(Settings settingsToUse) { new ScalingExecutorBuilder( UTILITY_THREAD_POOL_NAME, 0, - 1, + 10, TimeValue.timeValueMinutes(10), false, "xpack.inference.utility_thread_pool" @@ -151,7 +156,8 @@ public List> getSettings() { return Stream.of( HttpSettings.getSettings(), HttpClientManager.getSettings(), - HttpRequestSenderFactory.HttpRequestSender.getSettings() + HttpRequestSenderFactory.HttpRequestSender.getSettings(), + ThrottlerManager.getSettings() ).flatMap(Collection::stream).collect(Collectors.toList()); } @@ -167,7 +173,7 @@ public String getFeatureDescription() { @Override public List getInferenceServiceFactories() { - return List.of(ElserMlNodeService::new); + return List.of(ElserMlNodeService::new, context -> new HuggingFaceElserService(httpRequestSenderFactory, throttlerManager)); } @Override @@ -177,8 +183,6 @@ public List getInferenceServiceNamedWriteables() { @Override public void close() { - if (httpManager.get() != null) { - IOUtils.closeWhileHandlingException(httpManager.get()); - } + IOUtils.closeWhileHandlingException(httpManager.get(), throttlerManager.get()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 046eff3e6b83..569d4e023928 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -41,10 +43,14 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.core.Strings.format; + public class TransportPutInferenceModelAction extends TransportMasterNodeAction< PutInferenceModelAction.Request, PutInferenceModelAction.Response> { + private static final Logger logger = LogManager.getLogger(TransportPutInferenceModelAction.class); + private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; private final Client client; @@ -97,6 +103,30 @@ protected void masterOperation( return; } + // Check if all the nodes in this cluster know about the service + if (service.get().getMinimalSupportedVersion().after(state.getMinTransportVersion())) { + logger.warn( + format( + "Service [%s] requires version [%s] but minimum cluster version is [%s]", + serviceName, + service.get().getMinimalSupportedVersion(), + state.getMinTransportVersion() + ) + ); + + listener.onFailure( + new ElasticsearchStatusException( + format( + "All nodes in the cluster are not aware of the service [%s]." + + "Wait for the cluster to finish upgrading and try again.", + serviceName + ), + RestStatus.BAD_REQUEST + ) + ); + return; + } + if (service.get().isInClusterService()) { // Find the cluster platform as the service may need that // information when creating the model diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java new file mode 100644 index 000000000000..bc52a04ab720 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceResults; + +/** + * Defines an inference request to a 3rd party service. The success or failure response is communicated through the provided listener. + */ +public interface ExecutableAction { + void execute(String input, ActionListener listener); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java new file mode 100644 index 000000000000..acc3ab57ce9e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceClient; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; + +public class HuggingFaceElserAction implements ExecutableAction { + + private final HuggingFaceAccount account; + private final HuggingFaceClient client; + + public HuggingFaceElserAction(Sender sender, HuggingFaceElserModel model, ThrottlerManager throttlerManager) { + this.client = new HuggingFaceClient(sender, throttlerManager); + this.account = new HuggingFaceAccount(model.getServiceSettings().uri(), model.getSecretSettings().apiKey()); + } + + public void execute(String input, ActionListener listener) { + try { + HuggingFaceElserRequest request = new HuggingFaceElserRequest(account, new HuggingFaceElserRequestEntity(input)); + + client.send(request, listener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure( + new ElasticsearchStatusException("Failed to send request ELSER Hugging Face request", RestStatus.INTERNAL_SERVER_ERROR, e) + ); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java index 125ff7ae047a..1dac8153da4f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClient.java @@ -19,9 +19,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; import java.io.IOException; +import java.util.Objects; import java.util.concurrent.CancellationException; import java.util.concurrent.atomic.AtomicReference; @@ -41,11 +43,17 @@ enum Status { private final AtomicReference status = new AtomicReference<>(Status.CREATED); private final ThreadPool threadPool; private final HttpSettings settings; + private final ThrottlerManager throttlerManager; - public static HttpClient create(HttpSettings settings, ThreadPool threadPool, PoolingNHttpClientConnectionManager connectionManager) { - CloseableHttpAsyncClient client = createAsyncClient(connectionManager); + public static HttpClient create( + HttpSettings settings, + ThreadPool threadPool, + PoolingNHttpClientConnectionManager connectionManager, + ThrottlerManager throttlerManager + ) { + CloseableHttpAsyncClient client = createAsyncClient(Objects.requireNonNull(connectionManager)); - return new HttpClient(settings, client, threadPool); + return new HttpClient(settings, client, threadPool, throttlerManager); } private static CloseableHttpAsyncClient createAsyncClient(PoolingNHttpClientConnectionManager connectionManager) { @@ -59,10 +67,11 @@ private static CloseableHttpAsyncClient createAsyncClient(PoolingNHttpClientConn } // Default for testing - HttpClient(HttpSettings settings, CloseableHttpAsyncClient asyncClient, ThreadPool threadPool) { - this.settings = settings; - this.threadPool = threadPool; - this.client = asyncClient; + HttpClient(HttpSettings settings, CloseableHttpAsyncClient asyncClient, ThreadPool threadPool, ThrottlerManager throttlerManager) { + this.settings = Objects.requireNonNull(settings); + this.threadPool = Objects.requireNonNull(threadPool); + this.client = Objects.requireNonNull(asyncClient); + this.throttlerManager = Objects.requireNonNull(throttlerManager); } public void start() { @@ -83,7 +92,7 @@ public void completed(HttpResponse response) { @Override public void failed(Exception ex) { - logger.warn(format("Request [%s] failed", request.getRequestLine()), ex); + throttlerManager.getThrottler().warn(logger, format("Request [%s] failed", request.getRequestLine()), ex); failUsingUtilityThread(ex, listener); } @@ -99,7 +108,7 @@ private void respondUsingUtilityThread(HttpResponse response, HttpUriRequest req try { listener.onResponse(HttpResult.create(settings.getMaxResponseSize(), response)); } catch (Exception e) { - logger.warn(format("Failed to create http result for [%s]", request.getRequestLine()), e); + throttlerManager.getThrottler().warn(logger, format("Failed to create http result for [%s]", request.getRequestLine()), e); listener.onFailure(e); } }); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java index 862170a229b4..494e0f7c60df 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java @@ -9,6 +9,7 @@ import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; +import org.apache.http.impl.nio.reactor.IOReactorConfig; import org.apache.http.nio.reactor.ConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.logging.log4j.LogManager; @@ -19,6 +20,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; import java.io.IOException; @@ -43,7 +45,7 @@ public class HttpClientManager implements Closeable { Setting.Property.Dynamic ); - private static final TimeValue DEFAULT_CONNECTION_EVICTION_THREAD_INTERVAL_TIME = TimeValue.timeValueSeconds(10); + private static final TimeValue DEFAULT_CONNECTION_EVICTION_THREAD_INTERVAL_TIME = TimeValue.timeValueMinutes(1); public static final Setting CONNECTION_EVICTION_THREAD_INTERVAL_SETTING = Setting.timeSetting( "xpack.inference.http.connection_eviction_interval", DEFAULT_CONNECTION_EVICTION_THREAD_INTERVAL_TIME, @@ -65,9 +67,14 @@ public class HttpClientManager implements Closeable { private IdleConnectionEvictor connectionEvictor; private final HttpClient httpClient; - public static HttpClientManager create(Settings settings, ThreadPool threadPool, ClusterService clusterService) { + public static HttpClientManager create( + Settings settings, + ThreadPool threadPool, + ClusterService clusterService, + ThrottlerManager throttlerManager + ) { PoolingNHttpClientConnectionManager connectionManager = createConnectionManager(); - return new HttpClientManager(settings, connectionManager, threadPool, clusterService); + return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); } // Default for testing @@ -75,14 +82,15 @@ public static HttpClientManager create(Settings settings, ThreadPool threadPool, Settings settings, PoolingNHttpClientConnectionManager connectionManager, ThreadPool threadPool, - ClusterService clusterService + ClusterService clusterService, + ThrottlerManager throttlerManager ) { this.threadPool = threadPool; this.connectionManager = connectionManager; setMaxConnections(MAX_CONNECTIONS.get(settings)); - this.httpClient = HttpClient.create(new HttpSettings(settings, clusterService), threadPool, connectionManager); + this.httpClient = HttpClient.create(new HttpSettings(settings, clusterService), threadPool, connectionManager, throttlerManager); evictorSettings = new EvictorSettings(settings); connectionEvictor = createConnectionEvictor(); @@ -93,7 +101,8 @@ public static HttpClientManager create(Settings settings, ThreadPool threadPool, private static PoolingNHttpClientConnectionManager createConnectionManager() { ConnectingIOReactor ioReactor; try { - ioReactor = new DefaultConnectingIOReactor(); + var configBuilder = IOReactorConfig.custom().setSoKeepAlive(true); + ioReactor = new DefaultConnectingIOReactor(configBuilder.build()); } catch (IOReactorException e) { var message = "Failed to initialize the inference http client manager"; logger.error(message, e); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java index 0635b4d4d8b3..328afb264c4a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java @@ -21,13 +21,18 @@ import org.elasticsearch.xpack.inference.external.http.HttpResult; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Objects; -import java.util.concurrent.AbstractExecutorService; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; @@ -44,7 +49,7 @@ * attempting to execute a task (aka waiting for the connection manager to lease a connection). See * {@link org.apache.http.client.config.RequestConfig.Builder#setConnectionRequestTimeout} for more info. */ -class HttpRequestExecutorService extends AbstractExecutorService { +class HttpRequestExecutorService implements ExecutorService { private static final Logger logger = LogManager.getLogger(HttpRequestExecutorService.class); private final String serviceName; @@ -232,4 +237,61 @@ public void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionLis public void execute(Runnable runnable) { throw new UnsupportedOperationException("use send instead"); } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public Future submit(Callable task) { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public Future submit(Runnable task, T result) { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public Future submit(Runnable task) { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public List> invokeAll(Collection> tasks) throws InterruptedException { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) throws InterruptedException { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public T invokeAny(Collection> tasks) throws InterruptedException, ExecutionException { + throw new UnsupportedOperationException("use send instead"); + } + + /** + * This method is not supported. Use {@link #send} instead. + */ + @Override + public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) throws InterruptedException, + ExecutionException, TimeoutException { + throw new UnsupportedOperationException("use send instead"); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java index ce99e1951248..40adc9c4a8be 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpResult; -import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Objects; @@ -59,7 +58,7 @@ public HttpRequestSender createSender(String serviceName) { * A class for providing a more friendly interface for sending an {@link HttpUriRequest}. This leverages the queuing logic for sending * a request. */ - public static final class HttpRequestSender implements Closeable { + public static final class HttpRequestSender implements Sender { private static final Logger logger = LogManager.getLogger(HttpRequestSender.class); /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java new file mode 100644 index 000000000000..abef521c77fc --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.Closeable; + +public interface Sender extends Closeable { + void start(); + + void send(HttpRequestBase request, ActionListener listener); + + void send(HttpRequestBase request, @Nullable TimeValue timeout, ActionListener listener); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java new file mode 100644 index 000000000000..771c7b6adaea --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.elasticsearch.common.settings.SecureString; + +import java.net.URI; +import java.util.Objects; + +public record HuggingFaceAccount(URI url, SecureString apiKey) { + + public HuggingFaceAccount { + Objects.requireNonNull(url); + Objects.requireNonNull(apiKey); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java new file mode 100644 index 000000000000..ed6e5c200b36 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; +import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceElserResponseEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; + +import static org.elasticsearch.core.Strings.format; + +public class HuggingFaceClient { + private static final Logger logger = LogManager.getLogger(HuggingFaceClient.class); + + private final ThrottlerManager throttlerManager; + + private final Sender sender; + + public HuggingFaceClient(Sender sender, ThrottlerManager throttlerManager) { + this.sender = sender; + this.throttlerManager = throttlerManager; + } + + public void send(HuggingFaceElserRequest request, ActionListener listener) throws IOException { + HttpRequestBase httpRequest = request.createRequest(); + ActionListener responseListener = ActionListener.wrap(response -> { + try { + listener.onResponse(HuggingFaceElserResponseEntity.fromResponse(response)); + } catch (Exception e) { + String msg = format("Failed to parse the Hugging Face ELSER response for request [%s]", httpRequest.getRequestLine()); + throttlerManager.getThrottler().warn(logger, msg, e); + listener.onFailure(new ElasticsearchException(msg, e)); + } + }, listener::onFailure); + + sender.send(httpRequest, responseListener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java new file mode 100644 index 000000000000..91ebfe0e3478 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request; + +import org.apache.http.client.methods.HttpRequestBase; + +public interface Request { + HttpRequestBase createRequest(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java new file mode 100644 index 000000000000..f896bba4ae06 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.huggingface; + +import org.apache.http.Header; +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.message.BasicHeader; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +public class HuggingFaceElserRequest implements Request { + + private final HuggingFaceAccount account; + private final HuggingFaceElserRequestEntity entity; + + public HuggingFaceElserRequest(HuggingFaceAccount account, HuggingFaceElserRequestEntity entity) { + this.account = Objects.requireNonNull(account); + this.entity = Objects.requireNonNull(entity); + } + + public HttpRequestBase createRequest() { + HttpPost httpPost = new HttpPost(account.url()); + + ByteArrayEntity byteEntity = new ByteArrayEntity(Strings.toString(entity).getBytes(StandardCharsets.UTF_8)); + httpPost.setEntity(byteEntity); + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaTypeWithoutParameters()); + httpPost.setHeader(apiKeyHeader()); + + return httpPost; + } + + private Header apiKeyHeader() { + return new BasicHeader(HttpHeaders.AUTHORIZATION, "Bearer " + account.apiKey().toString()); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java new file mode 100644 index 000000000000..f21bee923eca --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.huggingface; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public record HuggingFaceElserRequestEntity(String inputs) implements ToXContentObject { + + private static final String INPUTS_FIELD = "inputs"; + + public HuggingFaceElserRequestEntity { + Objects.requireNonNull(inputs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(INPUTS_FIELD, inputs); + + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java new file mode 100644 index 000000000000..2ac9eb44ed7f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; + +public class HuggingFaceElserResponseEntity { + + /** + * The response from hugging face will be formatted as [{"token": 0.0...123}]. Each object within the array will correspond to the + * item within the inputs array within the request sent to hugging face. For example for a request like: + * + *
+     *     
+     *        {
+     *            "inputs": ["hello this is my name", "I wish I was there!"]
+     *        }
+     *     
+     * 
+ * + * The response would look like: + * + *
+     *   
+     *     [
+     *       {
+     *         "the": 0.7226026,
+     *         "to": 0.29198948,
+     *         "is": 0.059944477,
+     *         ...
+     *       },
+     *       {
+     *           "wish": 0.123456,
+     *           ...
+     *       }
+     *     ]
+     *   
+     * 
+ */ + public static TextExpansionResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + if (jsonParser.currentToken() == null) { + jsonParser.nextToken(); + } + + List parsedResponse = XContentParserUtils.parseList( + jsonParser, + HuggingFaceElserResponseEntity::parseExpansionResult + ); + + if (parsedResponse.isEmpty()) { + return new TextExpansionResults(DEFAULT_RESULTS_FIELD, Collections.emptyList(), false); + } + + // we only handle a single response right now so just grab the first one + return parsedResponse.get(0); + } + } + + private static TextExpansionResults parseExpansionResult(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + + List weightedTokens = new ArrayList<>(); + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + var floatToken = parser.nextToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, floatToken, parser); + + weightedTokens.add(new TextExpansionResults.WeightedToken(parser.currentName(), parser.floatValue())); + } + // TODO how do we know if the tokens were truncated so we can set this appropriately? + // This will depend on whether we handle the tokenization or hugging face + return new TextExpansionResults(DEFAULT_RESULTS_FIELD, weightedTokens, false); + } + + private HuggingFaceElserResponseEntity() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java new file mode 100644 index 000000000000..b1dee15a93bd --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/Throttler.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.logging; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.Closeable; +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; + +/** + * A class that throttles calls to a logger. If a log call is made during the throttle period a counter is incremented. + * If a log call occurs after the throttle period, then the call will proceed, and it will include a message like + * "repeated X times" to indicate how often the message was attempting to be logged. + */ +public class Throttler implements Closeable { + + private static final Logger classLogger = LogManager.getLogger(Throttler.class); + + private final TimeValue resetInterval; + private Duration durationToWait; + private final Clock clock; + private final ConcurrentMap logExecutors; + private final AtomicReference cancellableTask = new AtomicReference<>(); + private final AtomicBoolean isRunning = new AtomicBoolean(true); + + /** + * Constructs the throttler and kicks of a scheduled tasks to clear the internal stats. + * + * @param resetInterval the frequency for clearing the internal stats. This protects against an ever growing + * cache + * @param durationToWait the amount of time to wait before logging a message after the threshold + * is reached + * @param threadPool a thread pool for running a scheduled task to clear the internal stats + */ + public Throttler(TimeValue resetInterval, TimeValue durationToWait, ThreadPool threadPool) { + this(resetInterval, durationToWait, Clock.systemUTC(), threadPool, new ConcurrentHashMap<>()); + } + + /** + * This should only be used directly for testing. + */ + Throttler( + TimeValue resetInterval, + TimeValue durationToWait, + Clock clock, + ThreadPool threadPool, + ConcurrentMap logExecutors + ) { + Objects.requireNonNull(durationToWait); + Objects.requireNonNull(threadPool); + + this.resetInterval = Objects.requireNonNull(resetInterval); + this.durationToWait = Duration.ofMillis(durationToWait.millis()); + this.clock = Objects.requireNonNull(clock); + this.logExecutors = Objects.requireNonNull(logExecutors); + + this.cancellableTask.set(startResetTask(threadPool)); + } + + private Scheduler.Cancellable startResetTask(ThreadPool threadPool) { + classLogger.debug(() -> format("Reset task scheduled with interval [%s]", resetInterval)); + + return threadPool.scheduleWithFixedDelay(logExecutors::clear, resetInterval, threadPool.executor(UTILITY_THREAD_POOL_NAME)); + } + + public void setDurationToWait(TimeValue durationToWait) { + this.durationToWait = Duration.ofMillis(durationToWait.millis()); + } + + public void warn(Logger logger, String message, Throwable e) { + Objects.requireNonNull(message); + Objects.requireNonNull(e); + + if (isRunning.get()) { + logHelper(message, msgToAppend -> logger.warn(message.concat(msgToAppend), e)); + } + } + + private void logHelper(String message, Consumer executor) { + LogExecutor logExecutor = logExecutors.compute(message, (key, value) -> { + if (value == null) { + return new LogExecutor(clock, executor); + } + + return value.compute(executor, durationToWait); + }); + + logExecutor.log(); + } + + @Override + public void close() { + isRunning.set(false); + cancellableTask.get().cancel(); + logExecutors.clear(); + } + + private static class LogExecutor { + private final long skippedLogCalls; + private final Instant timeOfLastLogCall; + private final Clock clock; + private final Runnable logRunner; + + LogExecutor(Clock clock, Consumer logAppendedMessage) { + skippedLogCalls = 0; + timeOfLastLogCall = Instant.now(clock); + this.clock = clock; + // The first log message can log the original message without waiting + this.logRunner = () -> logAppendedMessage.accept(""); + } + + LogExecutor(Clock clock, long skippedLogCalls, Runnable logRunner) { + this.skippedLogCalls = skippedLogCalls; + timeOfLastLogCall = Instant.now(clock); + this.clock = clock; + this.logRunner = logRunner; + } + + void log() { + this.logRunner.run(); + } + + LogExecutor compute(Consumer executor, Duration durationToWait) { + if (hasDurationExpired(durationToWait)) { + String msg = ""; + if (this.skippedLogCalls == 1) { + msg = ", repeated 1 time"; + } else if (this.skippedLogCalls > 1) { + msg = format(", repeated %s times", this.skippedLogCalls); + } + + String finalMsg = msg; + return new LogExecutor(this.clock, 0, () -> executor.accept(finalMsg)); + } + + return new LogExecutor(this.clock, this.skippedLogCalls + 1, () -> {}); + } + + private boolean hasDurationExpired(Duration durationToWait) { + Instant now = Instant.now(clock); + return now.isAfter(timeOfLastLogCall.plus(durationToWait)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java new file mode 100644 index 000000000000..6c38c341a040 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/logging/ThrottlerManager.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.logging; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.Closeable; +import java.util.List; +import java.util.Objects; + +/** + * This class manages the settings for a {@link Throttler}. + */ +public class ThrottlerManager implements Closeable { + private static final TimeValue DEFAULT_STATS_RESET_INTERVAL_TIME = TimeValue.timeValueDays(1); + /** + * A setting specifying the interval for clearing the cached log message stats + */ + public static final Setting STATS_RESET_INTERVAL_SETTING = Setting.timeSetting( + "xpack.inference.logging.reset_interval", + DEFAULT_STATS_RESET_INTERVAL_TIME, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private static final TimeValue DEFAULT_WAIT_DURATION_TIME = TimeValue.timeValueHours(1); + /** + * A setting specifying the amount of time to wait after a log call occurs before allowing another log call. + */ + public static final Setting LOGGER_WAIT_DURATION_SETTING = Setting.timeSetting( + "xpack.inference.logging.wait_duration", + DEFAULT_WAIT_DURATION_TIME, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private final ThreadPool threadPool; + private Throttler throttler; + private LoggerSettings loggerSettings; + + public ThrottlerManager(Settings settings, ThreadPool threadPool, ClusterService clusterService) { + Objects.requireNonNull(settings); + Objects.requireNonNull(clusterService); + + this.threadPool = Objects.requireNonNull(threadPool); + this.loggerSettings = LoggerSettings.fromSettings(settings); + + throttler = new Throttler(loggerSettings.resetInterval(), loggerSettings.waitDuration(), threadPool); + this.addSettingsUpdateConsumers(clusterService); + } + + private void addSettingsUpdateConsumers(ClusterService clusterService) { + clusterService.getClusterSettings().addSettingsUpdateConsumer(STATS_RESET_INTERVAL_SETTING, this::setResetInterval); + clusterService.getClusterSettings().addSettingsUpdateConsumer(LOGGER_WAIT_DURATION_SETTING, this::setWaitDuration); + } + + // default for testing + void setWaitDuration(TimeValue waitDuration) { + loggerSettings = loggerSettings.createWithWaitDuration(waitDuration); + + throttler.setDurationToWait(waitDuration); + } + + // default for testing + void setResetInterval(TimeValue resetInterval) { + loggerSettings = loggerSettings.createWithResetInterval(resetInterval); + + throttler.close(); + throttler = new Throttler(loggerSettings.resetInterval(), loggerSettings.waitDuration(), threadPool); + } + + public Throttler getThrottler() { + return throttler; + } + + @Override + public void close() { + throttler.close(); + } + + public static List> getSettings() { + return List.of(STATS_RESET_INTERVAL_SETTING, LOGGER_WAIT_DURATION_SETTING); + } + + private record LoggerSettings(TimeValue resetInterval, TimeValue waitDuration) { + LoggerSettings { + Objects.requireNonNull(resetInterval); + Objects.requireNonNull(waitDuration); + } + + static LoggerSettings fromSettings(Settings settings) { + return new LoggerSettings(STATS_RESET_INTERVAL_SETTING.get(settings), LOGGER_WAIT_DURATION_SETTING.get(settings)); + } + + LoggerSettings createWithResetInterval(TimeValue resetInterval) { + return new LoggerSettings(resetInterval, waitDuration); + } + + LoggerSettings createWithWaitDuration(TimeValue waitDuration) { + return new LoggerSettings(resetInterval, waitDuration); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java index 31228b645cff..0849e8fa53cf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java @@ -61,7 +61,7 @@ public static void throwIfNotEmptyMap(Map settingsMap, String se } public static ElasticsearchStatusException unknownSettingsError(Map config, String serviceName) { - // TOOD map as JSON + // TODO map as JSON return new ElasticsearchStatusException( "Model configuration contains settings [{}] unknown to the [{}] service", RestStatus.BAD_REQUEST, @@ -73,4 +73,12 @@ public static ElasticsearchStatusException unknownSettingsError(Map map) { + ValidationException validationException = new ValidationException(); + + String apiToken = MapParsingUtils.removeAsType(map, API_KEY, String.class); + + if (apiToken == null) { + validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(API_KEY, ModelSecrets.SECRET_SETTINGS)); + } else if (apiToken.isEmpty()) { + validationException.addValidationError(MapParsingUtils.mustBeNonEmptyString(API_KEY, ModelSecrets.SECRET_SETTINGS)); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + SecureString secureApiToken = new SecureString(Objects.requireNonNull(apiToken).toCharArray()); + + return new HuggingFaceElserSecretSettings(secureApiToken); + } + + public HuggingFaceElserSecretSettings { + Objects.requireNonNull(apiKey); + } + + public HuggingFaceElserSecretSettings(StreamInput in) throws IOException { + this(in.readSecureString()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(API_KEY, apiKey.toString()); + builder.endObject(); + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeSecureString(apiKey); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java new file mode 100644 index 000000000000..e25315b6bbaf --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceElserAction; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.MapParsingUtils.throwIfNotEmptyMap; + +public class HuggingFaceElserService implements InferenceService { + public static final String NAME = "hugging_face_elser"; + + private final SetOnce factory; + private final SetOnce throttlerManager; + private final AtomicReference sender = new AtomicReference<>(); + // This is initialized once which assumes that the settings will not change. To change the service, it + // should be deleted and then added again + private final AtomicReference action = new AtomicReference<>(); + + public HuggingFaceElserService(SetOnce factory, SetOnce throttlerManager) { + this.factory = Objects.requireNonNull(factory); + this.throttlerManager = Objects.requireNonNull(throttlerManager); + } + + @Override + public String name() { + return NAME; + } + + @Override + public HuggingFaceElserModel parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platformArchitectures + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + + HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); + HuggingFaceElserSecretSettings secretSettings = HuggingFaceElserSecretSettings.fromMap(serviceSettingsMap); + + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + + return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + } + + @Override + public HuggingFaceElserModel parsePersistedConfig( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); + + HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); + HuggingFaceElserSecretSettings secretSettings = HuggingFaceElserSecretSettings.fromMap(secretSettingsMap); + + return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + } + + @Override + public void infer(Model model, String input, Map taskSettings, ActionListener listener) { + if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { + listener.onFailure( + new ElasticsearchStatusException( + TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME), + RestStatus.BAD_REQUEST + ) + ); + return; + } + + try { + init(model); + } catch (Exception e) { + listener.onFailure(new ElasticsearchException("Failed to initialize service", e)); + return; + } + + action.get().execute(input, listener); + } + + @Override + public void start(Model model, ActionListener listener) { + try { + init(model); + sender.get().start(); + listener.onResponse(true); + } catch (Exception e) { + listener.onFailure(new ElasticsearchException("Failed to start service", e)); + } + } + + @Override + public void close() throws IOException { + IOUtils.closeWhileHandlingException(sender.get()); + } + + private void init(Model model) { + if (model instanceof HuggingFaceElserModel == false) { + throw new IllegalArgumentException("The internal model was invalid"); + } + + sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); + + HuggingFaceElserModel huggingFaceElserModel = (HuggingFaceElserModel) model; + action.updateAndGet( + current -> Objects.requireNonNullElseGet( + current, + () -> new HuggingFaceElserAction(sender.get(), huggingFaceElserModel, throttlerManager.get()) + ) + ); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java new file mode 100644 index 000000000000..13f66562f6f8 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.MapParsingUtils; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.core.Strings.format; + +public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSettings { + public static final String NAME = "hugging_face_elser_service_settings"; + + private static final Logger logger = LogManager.getLogger(HuggingFaceElserServiceSettings.class); + static final String URL = "url"; + + public static HuggingFaceElserServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String parsedUrl = MapParsingUtils.removeAsType(map, URL, String.class); + URI uri = convertToUri(parsedUrl, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new HuggingFaceElserServiceSettings(uri); + } + + private static URI convertToUri(String url, ValidationException validationException) { + if (url == null) { + validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(URL, ModelConfigurations.SERVICE_SETTINGS)); + return null; + } + + try { + return createUri(url); + } catch (IllegalArgumentException ignored) { + validationException.addValidationError(MapParsingUtils.invalidUrlErrorMsg(url, ModelConfigurations.SERVICE_SETTINGS)); + return null; + } + } + + // TODO move this to a common location and potentially improve parsing errors + private static URI createUri(String url) throws IllegalArgumentException { + Objects.requireNonNull(url); + + try { + return new URI(url); + } catch (URISyntaxException e) { + logger.info(format("Invalid URL received [%s]", url), e); + throw new IllegalArgumentException(format("unable to parse url [%s]", url), e); + } + } + + public HuggingFaceElserServiceSettings { + Objects.requireNonNull(uri); + } + + public HuggingFaceElserServiceSettings(String url) { + this(createUri(url)); + } + + public HuggingFaceElserServiceSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(URL, uri.toString()); + builder.endObject(); + + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uri.toString()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java new file mode 100644 index 000000000000..5a51e89f57e1 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.EmptyTaskSettings; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +public class EmptyTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static EmptyTaskSettings createRandom() { + return EmptyTaskSettings.INSTANCE; // no options to randomise + } + + @Override + protected Writeable.Reader instanceReader() { + return EmptyTaskSettings::new; + } + + @Override + protected EmptyTaskSettings createTestInstance() { + return createRandom(); + } + + @Override + protected EmptyTaskSettings mutateInstance(EmptyTaskSettings instance) { + return null; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java new file mode 100644 index 000000000000..713312204e65 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class HuggingFaceElserActionTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(getTestName()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + [ + { + ".": 0.133155956864357 + } + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var action = createAction(getUrl(webServer), sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute("abc", listener); + + InferenceResults result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(Map.of(DEFAULT_RESULTS_FIELD, Map.of(".", 0.13315596f)))); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), is("abc")); + } + } + + public void testExecute_ThrowsURISyntaxException_ForInvalidUrl() throws IOException { + try (var sender = mock(Sender.class)) { + var thrownException = expectThrows(IllegalArgumentException.class, () -> createAction("^^", sender)); + assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + + var action = createAction(getUrl(webServer), sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute("abc", listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); + + var action = createAction(getUrl(webServer), sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute("abc", listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("Failed to send request ELSER Hugging Face request")); + } + + private HuggingFaceElserAction createAction(String url, Sender sender) { + var model = new HuggingFaceElserModel( + "id", + TaskType.SPARSE_EMBEDDING, + "service", + new HuggingFaceElserServiceSettings(url), + new HuggingFaceElserSecretSettings(new SecureString("secret".toCharArray())) + ); + + return new HuggingFaceElserAction(sender, model, mock(ThrottlerManager.class)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java index dd9a89ae4188..3e07bd773c65 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; import org.junit.Before; @@ -25,7 +26,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterService; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.hamcrest.Matchers.equalTo; @@ -63,7 +64,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { String paramValue = randomAlphaOfLength(3); var httpPost = createHttpPost(webServer.getPort(), paramKey, paramValue); - var manager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty()); + var manager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); try (var httpClient = manager.getHttpClient()) { httpClient.start(); @@ -83,7 +84,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { public void testStartsANewEvictor_WithNewEvictionInterval() { var threadPool = mock(ThreadPool.class); - var manager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty()); + var manager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); var evictionInterval = TimeValue.timeValueSeconds(1); manager.setEvictionInterval(evictionInterval); @@ -96,7 +97,13 @@ public void test_DoesNotStartANewEvictor_WithNewEvictionMaxIdle() { Settings settings = Settings.builder() .put(HttpClientManager.CONNECTION_EVICTION_THREAD_INTERVAL_SETTING.getKey(), TimeValue.timeValueNanos(1)) .build(); - var manager = new HttpClientManager(settings, mockConnectionManager, threadPool, mockClusterService(settings)); + var manager = new HttpClientManager( + settings, + mockConnectionManager, + threadPool, + mockClusterService(settings), + mock(ThrottlerManager.class) + ); var evictionMaxIdle = TimeValue.timeValueSeconds(1); manager.setEvictionMaxIdle(evictionMaxIdle); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java index e2c1b1f942f8..c72d9167a9e0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java @@ -29,8 +29,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.threadpool.ScalingExecutorBuilder; -import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.junit.After; @@ -44,8 +42,9 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterService; +import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -82,7 +81,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { String paramValue = randomAlphaOfLength(3); var httpPost = createHttpPost(webServer.getPort(), paramKey, paramValue); - try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager())) { + try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager(), mockThrottlerManager())) { httpClient.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -100,7 +99,7 @@ public void testSend_MockServerReceivesRequest() throws Exception { } public void testSend_ThrowsErrorIfCalledBeforeStart() throws Exception { - try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager())) { + try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager(), mockThrottlerManager())) { PlainActionFuture listener = new PlainActionFuture<>(); var thrownException = expectThrows( AssertionError.class, @@ -123,7 +122,7 @@ public void testSend_FailedCallsOnFailure() throws Exception { var httpPost = createHttpPost(webServer.getPort(), "a", "b"); - try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool)) { + try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool, mockThrottlerManager())) { client.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -146,7 +145,7 @@ public void testSend_CancelledCallsOnFailure() throws Exception { var httpPost = createHttpPost(webServer.getPort(), "a", "b"); - try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool)) { + try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool, mockThrottlerManager())) { client.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -164,7 +163,7 @@ public void testStart_MultipleCallsOnlyStartTheClientOnce() throws Exception { var httpPost = createHttpPost(webServer.getPort(), "a", "b"); - try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool)) { + try (var client = new HttpClient(emptyHttpSettings(), asyncClient, threadPool, mockThrottlerManager())) { client.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -187,7 +186,7 @@ public void testSend_FailsWhenMaxBytesReadIsExceeded() throws Exception { Settings settings = Settings.builder().put(HttpSettings.MAX_HTTP_RESPONSE_SIZE.getKey(), ByteSizeValue.ONE).build(); var httpSettings = createHttpSettings(settings); - try (var httpClient = HttpClient.create(httpSettings, threadPool, createConnectionManager())) { + try (var httpClient = HttpClient.create(httpSettings, threadPool, createConnectionManager(), mockThrottlerManager())) { httpClient.start(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -218,20 +217,6 @@ public static HttpPost createHttpPost(int port, String paramKey, String paramVal return httpPost; } - public static ThreadPool createThreadPool(String name) { - return new TestThreadPool( - name, - new ScalingExecutorBuilder( - UTILITY_THREAD_POOL_NAME, - 1, - 4, - TimeValue.timeValueMinutes(10), - false, - "xpack.inference.utility_thread_pool" - ) - ); - } - public static PoolingNHttpClientConnectionManager createConnectionManager() throws IOReactorException { return new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java index 0f0300358907..dba80923c487 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java @@ -20,7 +20,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doAnswer; @@ -49,16 +49,18 @@ public void testStart_CallsExecutorSubmit() throws IOReactorException { when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); - var evictor = new IdleConnectionEvictor( - mockThreadPool, - createConnectionManager(), - new TimeValue(1, TimeUnit.NANOSECONDS), - new TimeValue(1, TimeUnit.NANOSECONDS) - ); - - evictor.start(); - - verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any()); + try ( + var evictor = new IdleConnectionEvictor( + mockThreadPool, + createConnectionManager(), + new TimeValue(1, TimeUnit.NANOSECONDS), + new TimeValue(1, TimeUnit.NANOSECONDS) + ) + ) { + evictor.start(); + + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any()); + } } public void testStart_OnlyCallsSubmitOnce() throws IOReactorException { @@ -66,17 +68,19 @@ public void testStart_OnlyCallsSubmitOnce() throws IOReactorException { when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); - var evictor = new IdleConnectionEvictor( - mockThreadPool, - createConnectionManager(), - new TimeValue(1, TimeUnit.NANOSECONDS), - new TimeValue(1, TimeUnit.NANOSECONDS) - ); - - evictor.start(); - evictor.start(); - - verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any()); + try ( + var evictor = new IdleConnectionEvictor( + mockThreadPool, + createConnectionManager(), + new TimeValue(1, TimeUnit.NANOSECONDS), + new TimeValue(1, TimeUnit.NANOSECONDS) + ) + ) { + evictor.start(); + evictor.start(); + + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), any(), any()); + } } public void testCloseExpiredConnections_IsCalled() throws InterruptedException { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java index 80a8c4d4914c..becb0cc43e1e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java @@ -10,12 +10,30 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ScalingExecutorBuilder; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.Collection; +import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -30,7 +48,8 @@ public static ClusterService mockClusterService(Settings settings) { var registeredSettings = Stream.of( HttpSettings.getSettings(), HttpClientManager.getSettings(), - HttpRequestSenderFactory.HttpRequestSender.getSettings() + HttpRequestSenderFactory.HttpRequestSender.getSettings(), + ThrottlerManager.getSettings() ).flatMap(Collection::stream).collect(Collectors.toSet()); var cSettings = new ClusterSettings(settings, registeredSettings); @@ -38,4 +57,41 @@ public static ClusterService mockClusterService(Settings settings) { return clusterService; } + + public static String getUrl(MockWebServer webServer) { + return format("http://%s:%s", webServer.getHostName(), webServer.getPort()); + } + + public static Map entityAsMap(String body) throws IOException { + InputStream bodyStream = new ByteArrayInputStream(body.getBytes(StandardCharsets.UTF_8)); + + return entityAsMap(bodyStream); + } + + public static Map entityAsMap(InputStream body) throws IOException { + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + XContentParserConfiguration.EMPTY.withRegistry(NamedXContentRegistry.EMPTY) + .withDeprecationHandler(DeprecationHandler.THROW_UNSUPPORTED_OPERATION), + body + ) + ) { + return parser.map(); + } + } + + public static ThreadPool createThreadPool(String name) { + return new TestThreadPool( + name, + new ScalingExecutorBuilder( + UTILITY_THREAD_POOL_NAME, + 1, + 4, + TimeValue.timeValueMinutes(10), + false, + "xpack.inference.utility_thread_pool" + ) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java index 85f30c2aed05..245ce09848a7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java @@ -29,7 +29,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java index e2d78324a3c9..3434b951147d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java @@ -17,11 +17,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; import org.junit.Before; @@ -29,10 +31,11 @@ import java.nio.charset.StandardCharsets; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -48,20 +51,20 @@ public class HttpRequestSenderFactoryTests extends ESTestCase { private final MockWebServer webServer = new MockWebServer(); private ThreadPool threadPool; private HttpClientManager clientManager; - private Thread thread; + private final AtomicReference threadRef = new AtomicReference<>(); @Before public void init() throws Exception { webServer.start(); threadPool = createThreadPool(getTestName()); - clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty()); - thread = null; + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + threadRef.set(null); } @After public void shutdown() throws IOException, InterruptedException { - if (thread != null) { - thread.join(TIMEOUT.millis()); + if (threadRef.get() != null) { + threadRef.get().join(TIMEOUT.millis()); } clientManager.close(); @@ -70,20 +73,7 @@ public void shutdown() throws IOException, InterruptedException { } public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception { - var mockExecutorService = mock(ExecutorService.class); - doAnswer(invocation -> { - Runnable runnable = (Runnable) invocation.getArguments()[0]; - thread = new Thread(runnable); - thread.start(); - - return Void.TYPE; - }).when(mockExecutorService).execute(any(Runnable.class)); - - var mockThreadPool = mock(ThreadPool.class); - when(mockThreadPool.executor(anyString())).thenReturn(mockExecutorService); - when(mockThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); - - var senderFactory = new HttpRequestSenderFactory(mockThreadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + var senderFactory = createSenderFactory(clientManager, threadRef); try (var sender = senderFactory.createSender("test_service")) { sender.start(); @@ -162,4 +152,22 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws ); } } + + private static HttpRequestSenderFactory createSenderFactory(HttpClientManager clientManager, AtomicReference threadRef) { + var mockExecutorService = mock(ExecutorService.class); + doAnswer(invocation -> { + Runnable runnable = (Runnable) invocation.getArguments()[0]; + threadRef.set(new Thread(runnable)); + threadRef.get().start(); + + return Void.TYPE; + }).when(mockExecutorService).execute(any(Runnable.class)); + + var mockThreadPool = mock(ThreadPool.class); + when(mockThreadPool.executor(anyString())).thenReturn(mockExecutorService); + when(mockThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(mockThreadPool.schedule(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.ScheduledCancellable.class)); + + return new HttpRequestSenderFactory(mockThreadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index 811881bb10c1..f3718954d8ad 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -38,8 +39,8 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createConnectionManager; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createThreadPool; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.emptyHttpSettings; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -80,7 +81,7 @@ public void testDoRun_SendsRequestAndReceivesResponse() throws Exception { String paramValue = randomAlphaOfLength(3); var httpPost = createHttpPost(webServer.getPort(), paramKey, paramValue); - try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager())) { + try (var httpClient = HttpClient.create(emptyHttpSettings(), threadPool, createConnectionManager(), mock(ThrottlerManager.class))) { httpClient.start(); PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java new file mode 100644 index 000000000000..346306714399 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestTests.createRequest; +import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class HuggingFaceClientTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(getTestName()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mockThrottlerManager()); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testSend_SuccessfulResponse() throws IOException, URISyntaxException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + [ + { + ".": 0.133155956864357 + } + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); + + PlainActionFuture listener = new PlainActionFuture<>(); + huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + + InferenceResults result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(Map.of(DEFAULT_RESULTS_FIELD, Map.of(".", 0.13315596f)))); + + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), is("abc")); + } + } + + public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyntaxException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + [ + { + "outputs": [ + [ + [ + ".", + ".", + 0.133155956864357 + ] + ] + ] + } + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); + + PlainActionFuture listener = new PlainActionFuture<>(); + huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to parse the Hugging Face ELSER response for request [POST %s HTTP/1.1]", getUrl(webServer))) + ); + + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), is("abc")); + } + } + + public void testSend_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + + HuggingFaceClient huggingFaceClient = new HuggingFaceClient(sender, mockThrottlerManager()); + PlainActionFuture listener = new PlainActionFuture<>(); + + var thrownException = expectThrows( + ElasticsearchException.class, + () -> huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener) + ); + assertThat(thrownException.getMessage(), is("failed")); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java new file mode 100644 index 000000000000..b0977da234c1 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.huggingface; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.is; + +public class HuggingFaceElserRequestEntityTests extends ESTestCase { + + public void testXContent() throws IOException { + var entity = new HuggingFaceElserRequestEntity("abc"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + { + "inputs" : "abc" + }""")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java new file mode 100644 index 000000000000..717f5a7e2409 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.huggingface; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class HuggingFaceElserRequestTests extends ESTestCase { + public void testCreateRequest() throws URISyntaxException, IOException { + var huggingFaceRequest = createRequest("www.google.com", "secret", "abc"); + var httpRequest = huggingFaceRequest.createRequest(); + + assertThat(httpRequest, instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest; + + assertThat(httpPost.getURI().toString(), is("www.google.com")); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), is("abc")); + } + + public static HuggingFaceElserRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { + var account = new HuggingFaceAccount(new URI(url), new SecureString(apiKey.toCharArray())); + var entity = new HuggingFaceElserRequestEntity(input); + + return new HuggingFaceElserRequest(account, entity); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java new file mode 100644 index 000000000000..8cfac1858ab5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java @@ -0,0 +1,220 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentEOFException; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class HuggingFaceElserResponseEntityTests extends ESTestCase { + public void testFromResponse_CreatesTextExpansionResults() throws IOException { + String responseJson = """ + [ + { + ".": 0.133155956864357, + "the": 0.6747211217880249 + } + ]"""; + + TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + Map tokenWeightMap = parsedResults.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + // the results get truncated because weighted token stores them as a float + assertThat(tokenWeightMap.size(), is(2)); + assertThat(tokenWeightMap.get("."), is(0.13315596f)); + assertThat(tokenWeightMap.get("the"), is(0.67472112f)); + assertFalse(parsedResults.isTruncated()); + } + + public void testFromResponse_CreatesTextExpansionResultsForFirstItem() throws IOException { + String responseJson = """ + [ + { + ".": 0.133155956864357, + "the": 0.6747211217880249 + }, + { + "hi": 0.133155956864357, + "super": 0.6747211217880249 + } + ]"""; + + TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + Map tokenWeightMap = parsedResults.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + // the results get truncated because weighted token stores them as a float + assertThat(tokenWeightMap.size(), is(2)); + assertThat(tokenWeightMap.get("."), is(0.13315596f)); + assertThat(tokenWeightMap.get("the"), is(0.67472112f)); + assertFalse(parsedResults.isTruncated()); + } + + public void testFails_NotAnArray() { + String responseJson = """ + { + "field": "abc" + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_ARRAY] but found [START_OBJECT]") + ); + } + + public void testFails_ValueString() { + String responseJson = """ + [ + { + "field": "abc" + } + ] + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]") + ); + } + + public void testFails_ValueInt() throws IOException { + String responseJson = """ + [ + { + "field": 1 + } + ] + """; + + TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + Map tokenWeightMap = parsedResults.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + assertThat(tokenWeightMap.size(), is(1)); + assertThat(tokenWeightMap.get("field"), is(1.0f)); + assertFalse(parsedResults.isTruncated()); + } + + public void testFails_ValueLong() throws IOException { + String responseJson = """ + [ + { + "field": 40294967295 + } + ] + """; + + TextExpansionResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + Map tokenWeightMap = parsedResults.getWeightedTokens() + .stream() + .collect(Collectors.toMap(TextExpansionResults.WeightedToken::token, TextExpansionResults.WeightedToken::weight)); + + assertThat(tokenWeightMap.size(), is(1)); + assertThat(tokenWeightMap.get("field"), is(4.0294965E10F)); + assertFalse(parsedResults.isTruncated()); + } + + public void testFails_ValueObject() { + String responseJson = """ + [ + { + "field": {} + } + ] + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") + ); + } + + public void testFails_ResponseIsInvalidJson_MissingSquareBracket() { + String responseJson = """ + [ + { + "field": 0.1 + } + """; + + var thrownException = expectThrows( + XContentEOFException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), containsString("expected close marker for Array (start marker at [Source: (byte[])")); + } + + public void testFails_ResponseIsInvalidJson_MissingField() { + String responseJson = """ + [ + { + : 0.1 + } + ] + """; + + var thrownException = expectThrows( + XContentParseException.class, + () -> HuggingFaceElserResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), containsString("Unexpected character")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java new file mode 100644 index 000000000000..01374d02a21c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.logging; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class ThrottlerManagerTests extends ESTestCase { + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30); + + private ThreadPool threadPool; + + @Before + public void init() { + threadPool = createThreadPool(getTestName()); + } + + @After + public void shutdown() { + terminate(threadPool); + } + + public void testStartsNewThrottler_WhenResetIntervalIsChanged() { + var mockThreadPool = mock(ThreadPool.class); + when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); + + try (var manager = new ThrottlerManager(Settings.EMPTY, mockThreadPool, mockClusterServiceEmpty())) { + var resetInterval = TimeValue.timeValueSeconds(1); + var currentThrottler = manager.getThrottler(); + manager.setResetInterval(resetInterval); + // once for when the throttler is created initially + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), eq(TimeValue.timeValueDays(1)), any()); + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), eq(resetInterval), any()); + assertNotSame(currentThrottler, manager.getThrottler()); + } + } + + public void testDoesNotStartNewThrottler_WhenWaitDurationIsChanged() { + var mockThreadPool = mock(ThreadPool.class); + when(mockThreadPool.scheduleWithFixedDelay(any(Runnable.class), any(), any())).thenReturn(mock(Scheduler.Cancellable.class)); + + try (var manager = new ThrottlerManager(Settings.EMPTY, mockThreadPool, mockClusterServiceEmpty())) { + var currentThrottler = manager.getThrottler(); + + var waitDuration = TimeValue.timeValueSeconds(1); + manager.setWaitDuration(waitDuration); + // should only call when initializing the throttler + verify(mockThreadPool, times(1)).scheduleWithFixedDelay(any(Runnable.class), eq(TimeValue.timeValueDays(1)), any()); + assertSame(currentThrottler, manager.getThrottler()); + } + } + + public static ThrottlerManager mockThrottlerManager() { + var mockManager = mock(ThrottlerManager.class); + when(mockManager.getThrottler()).thenReturn(mock(Throttler.class)); + + return mockManager; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java new file mode 100644 index 000000000000..df95232ff85f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java @@ -0,0 +1,229 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.logging; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; +import java.time.ZoneId; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.external.http.Utils.createThreadPool; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class ThrottlerTests extends ESTestCase { + + private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30); + + private ThreadPool threadPool; + + @Before + public void init() { + threadPool = createThreadPool(getTestName()); + } + + @After + public void shutdown() { + terminate(threadPool); + } + + public void testWarn_LogsOnlyOnce() { + var logger = mock(Logger.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + Clock.fixed(Instant.now(), ZoneId.systemDefault()), + threadPool, + new ConcurrentHashMap<>() + ) + ) { + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verifyNoMoreInteractions(logger); + } + } + + public void testWarn_LogsOnce_ThenOnceAfterDuration() { + var logger = mock(Logger.class); + + var now = Clock.systemUTC().instant(); + + var clock = mock(Clock.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ) + ) { + when(clock.instant()).thenReturn(now); + + // The first call is always logged + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); + // This call should be allowed because the clock thinks it's after the duration period + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(2)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now); + // This call should not be allowed because the clock doesn't think it's pasted the wait period + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verifyNoMoreInteractions(logger); + } + } + + public void testWarn_AllowsDifferentMessagesToBeLogged() { + var logger = mock(Logger.class); + + var clock = mock(Clock.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ) + ) { + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + throttled.warn(logger, "a different message", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("a different message"), any(Throwable.class)); + } + } + + public void testWarn_LogsRepeated1Time() { + var logger = mock(Logger.class); + + var now = Clock.systemUTC().instant(); + + var clock = mock(Clock.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ) + ) { + when(clock.instant()).thenReturn(now); + // first message is allowed + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now); // don't allow this message because duration hasn't expired + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); // allow this message by faking expired duration + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test, repeated 1 time"), any(Throwable.class)); + } + } + + public void testWarn_LogsRepeated2Times() { + var logger = mock(Logger.class); + + var now = Clock.systemUTC().instant(); + + var clock = mock(Clock.class); + + try ( + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ) + ) { + when(clock.instant()).thenReturn(now); + // message allowed because it is the first one + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now); // don't allow these messages because duration hasn't expired + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test"), any(Throwable.class)); + + when(clock.instant()).thenReturn(now.plus(Duration.ofMinutes(1))); // allow this message by faking the duration completion + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verify(logger, times(1)).warn(eq("test, repeated 2 times"), any(Throwable.class)); + } + } + + public void testResetTask_ClearsInternalsAfterInterval() throws InterruptedException { + var calledClearLatch = new CountDownLatch(1); + + var now = Clock.systemUTC().instant(); + + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var concurrentMap = mock(ConcurrentHashMap.class); + doAnswer(invocation -> { + calledClearLatch.countDown(); + + return Void.TYPE; + }).when(concurrentMap).clear(); + + try (@SuppressWarnings("unchecked") + var ignored = new Throttler(TimeValue.timeValueNanos(1), TimeValue.timeValueSeconds(10), clock, threadPool, concurrentMap)) { + calledClearLatch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); + } + } + + public void testClose_DoesNotAllowLoggingAnyMore() { + var logger = mock(Logger.class); + + var clock = mock(Clock.class); + + var throttled = new Throttler( + TimeValue.timeValueDays(1), + TimeValue.timeValueSeconds(10), + clock, + threadPool, + new ConcurrentHashMap<>() + ); + + throttled.close(); + throttled.warn(logger, "test", new IllegalArgumentException("failed")); + verifyNoMoreInteractions(logger); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java new file mode 100644 index 000000000000..c3aa62870519 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class HuggingFaceElserSecretSettingsTests extends AbstractWireSerializingTestCase { + + public static HuggingFaceElserSecretSettings createRandom() { + return new HuggingFaceElserSecretSettings(new SecureString(randomAlphaOfLength(15).toCharArray())); + } + + public void testFromMap() { + var apiKey = "abc"; + var serviceSettings = HuggingFaceElserSecretSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserSecretSettings.API_KEY, apiKey))); + + assertThat(new HuggingFaceElserSecretSettings(new SecureString(apiKey.toCharArray())), is(serviceSettings)); + } + + public void testFromMap_MissingApiKey_ThrowsError() { + var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceElserSecretSettings.fromMap(new HashMap<>())); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format("[secret_settings] does not contain the required setting [%s]", HuggingFaceElserSecretSettings.API_KEY) + ) + ); + } + + public void testFromMap_EmptyApiKey_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceElserSecretSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserSecretSettings.API_KEY, ""))) + ); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "[secret_settings] Invalid value empty string. [%s] must be a non-empty string", + HuggingFaceElserSecretSettings.API_KEY + ) + ) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return HuggingFaceElserSecretSettings::new; + } + + @Override + protected HuggingFaceElserSecretSettings createTestInstance() { + return createRandom(); + } + + @Override + protected HuggingFaceElserSecretSettings mutateInstance(HuggingFaceElserSecretSettings instance) throws IOException { + return createRandom(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java new file mode 100644 index 000000000000..021904d7c2b6 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class HuggingFaceElserServiceSettingsTests extends AbstractWireSerializingTestCase { + + public static HuggingFaceElserServiceSettings createRandom() { + return new HuggingFaceElserServiceSettings(randomAlphaOfLength(15)); + } + + public void testFromMap() { + var url = "https://www.abc.com"; + var serviceSettings = HuggingFaceElserServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, url))); + + assertThat(new HuggingFaceElserServiceSettings(url), is(serviceSettings)); + } + + public void testFromMap_MissingUrl_ThrowsError() { + var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceElserServiceSettings.fromMap(new HashMap<>())); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format("[service_settings] does not contain the required setting [%s]", HuggingFaceElserServiceSettings.URL) + ) + ); + } + + public void testFromMap_InvalidUrl_ThrowsError() { + var url = "https://www.abc^.com"; + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceElserServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceElserServiceSettings.URL, url))) + ); + + assertThat( + thrownException.getMessage(), + containsString(Strings.format("Invalid url [%s] received in setting [service_settings]", url)) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return HuggingFaceElserServiceSettings::new; + } + + @Override + protected HuggingFaceElserServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected HuggingFaceElserServiceSettings mutateInstance(HuggingFaceElserServiceSettings instance) throws IOException { + return createRandom(); + } +} From 7a3ff915f4d9addf224db0b934b3094919c52d97 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 2 Nov 2023 14:49:31 +0000 Subject: [PATCH 38/47] Remove obsolete version check for system index migrations (#101670) --- .../upgrades/SystemIndexMigrator.java | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index a67ca691eb8f..728eae67f22c 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; @@ -68,8 +67,6 @@ public class SystemIndexMigrator extends AllocatedPersistentTask { private static final Logger logger = LogManager.getLogger(SystemIndexMigrator.class); - private static final Version READY_FOR_MIGRATION_VERSION = Version.V_7_16_0; - // Fixed properties & services private final ParentTaskAssigningClient baseClient; private final ClusterService clusterService; @@ -421,11 +418,6 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer innerListener = ActionListener.wrap(listener::accept, this::markAsFailed); try { - Exception versionException = checkNodeVersionsReadyForMigration(clusterState); - if (versionException != null) { - markAsFailed(versionException); - return; - } createIndex(migrationInfo, ActionListener.wrap(shardsAcknowledgedResponse -> { logger.debug( "while migrating [{}] , got create index response: [{}]", @@ -602,20 +594,6 @@ public void markAsFailed(Exception e) { super.markAsFailed(e); } - private static Exception checkNodeVersionsReadyForMigration(ClusterState state) { - final Version minNodeVersion = state.nodes().getMinNodeVersion(); - if (minNodeVersion.before(READY_FOR_MIGRATION_VERSION)) { - return new IllegalStateException( - "all nodes must be on version [" - + READY_FOR_MIGRATION_VERSION - + "] or later to migrate feature indices but lowest node version currently in cluster is [" - + minNodeVersion - + "]" - ); - } - return null; - } - /** * Creates a task that will clear the results of previous migration attempts. * @param clusterService The cluster service. From 800b38445cbc0a34c4a785520e861a56352a9a02 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Thu, 2 Nov 2023 15:56:15 +0100 Subject: [PATCH 39/47] QL: Update the use of some user-caused exceptions (#100918) This updates the use of certain exceptions in the QL, EQL and SQL code base. The failures that occur due to either user-provided input (such as incorrect query, incorrect query parameters), or data format not appropriate to the attempted operation (like converting strings to various other types) will now result in a 4xx type code, rather than 5xx. This change also proposes turning `QlClientException` concrete, by implementing the `status()` method to return a `BAD_REQUEST` (400). The 400 is a "generic", catch-all client-side error message. The other 4xx errors are specializations. No QL code / exception makes use of specializations, but if they so wish, they can simply override the method. This also makes it clearer to any subclass that a 4xx status must be return. This change allows the use of some currently abstract subclasses, such as `EqlClientException`, which can be employed on user-related failures, that are currently inappropriately raised as a form of `QlIllegalArgumentException`, which is a 500-type error. Also, an `InvalidArgumentException` extending `QlClientException` is introduced, being thrown where the execution cannot continue due to user's query (either direct input or invalid application of functions to index data). --- .../test/eql/EqlRestTestCase.java | 2 +- .../xpack/eql/EqlClientException.java | 11 +-- .../eql/analysis/VerificationException.java | 7 +- .../execution/assembler/ExecutionManager.java | 3 +- .../eql/execution/search/RuntimeUtils.java | 3 +- .../math/ToNumberFunctionProcessor.java | 3 +- .../string/CIDRMatchFunctionProcessor.java | 4 +- .../xpack/eql/parser/ExpressionBuilder.java | 12 +-- .../xpack/eql/parser/LogicalPlanBuilder.java | 2 +- .../xpack/eql/parser/ParsingException.java | 6 -- .../xpack/eql/planner/PlanningException.java | 5 - .../xpack/eql/util/MathUtils.java | 5 + .../xpack/eql/util/StringUtils.java | 10 -- .../math/ToNumberFunctionProcessorTests.java | 31 ++++--- .../CIDRMatchFunctionProcessorTests.java | 10 +- .../src/main/resources/date.csv-spec | 2 +- .../src/main/resources/ints.csv-spec | 54 +++++------ .../xpack/esql/EsqlClientException.java | 4 +- .../esql/analysis/VerificationException.java | 5 - .../function/scalar/string/Concat.java | 8 +- .../xpack/esql/parser/ExpressionBuilder.java | 5 +- .../xpack/esql/parser/ParsingException.java | 6 -- .../PhysicalVerificationException.java | 5 - .../xpack/ql/InvalidArgumentException.java | 27 ++++++ .../xpack/ql/ParsingException.java | 6 -- .../xpack/ql/QlClientException.java | 9 +- .../extractor/AbstractFieldHitExtractor.java | 4 +- .../xpack/ql/index/MappingException.java | 9 -- .../xpack/ql/type/DataTypeConverter.java | 30 +++--- .../xpack/ql/util/StringUtils.java | 48 +++++----- .../xpack/ql/expression/LiteralTests.java | 4 +- .../ql/type/DataTypeConversionTests.java | 65 +++++++------ .../xpack/sql/qa/cli/LenientTestCase.java | 8 +- .../sql/client/JreHttpUrlConnection.java | 1 + .../xpack/sql/SqlClientException.java | 9 +- .../analyzer/VerificationException.java | 5 - .../scalar/datetime/DateAddProcessor.java | 5 +- .../function/scalar/datetime/DateDiff.java | 4 +- .../scalar/datetime/DateDiffProcessor.java | 5 +- .../scalar/datetime/DatePartProcessor.java | 5 +- .../datetime/DateTimeFormatProcessor.java | 3 +- .../datetime/DateTimeParseProcessor.java | 3 +- .../scalar/datetime/DateTruncProcessor.java | 13 +-- .../scalar/geo/StWkttosqlProcessor.java | 3 +- .../function/scalar/math/MathProcessor.java | 6 +- .../sql/expression/literal/geo/GeoShape.java | 5 +- .../xpack/sql/parser/ExpressionBuilder.java | 5 +- .../xpack/sql/parser/ParsingException.java | 6 -- .../xpack/sql/planner/FoldingException.java | 6 -- .../xpack/sql/planner/PlanningException.java | 5 - .../elasticsearch/xpack/sql/util/Check.java | 11 +-- .../extractor/FieldHitExtractorTests.java | 8 +- .../function/scalar/CastProcessorTests.java | 4 +- .../datetime/DateAddProcessorTests.java | 25 ++--- .../datetime/DateDiffProcessorTests.java | 91 ++++++++++--------- .../datetime/DatePartProcessorTests.java | 21 +++-- .../DateTimeFormatProcessorTests.java | 35 +++---- .../datetime/DateTimeParseProcessorTests.java | 91 ++++++++++--------- .../datetime/DateTruncProcessorTests.java | 33 +++---- .../scalar/geo/StWkttosqlProcessorTests.java | 17 ++-- .../scalar/math/MathOperationTests.java | 8 +- .../BinaryStringNumericProcessorTests.java | 88 ++++++++---------- .../scalar/string/InsertProcessorTests.java | 45 ++++----- .../scalar/string/LocateProcessorTests.java | 27 +++--- .../string/SubstringProcessorTests.java | 41 +++++---- .../arithmetic/SqlBinaryArithmeticTests.java | 8 +- .../xpack/sql/optimizer/OptimizerTests.java | 16 +--- .../sql/type/SqlDataTypeConverterTests.java | 76 ++++++++-------- 68 files changed, 543 insertions(+), 604 deletions(-) create mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/InvalidArgumentException.java diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlRestTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlRestTestCase.java index 65ed174f5562..49d25af3bfe5 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlRestTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlRestTestCase.java @@ -77,7 +77,7 @@ public void testBadRequests() throws Exception { assertBadRequest(""" {"query": "sample by event.category [any where true] [any where true]", "fetch_size": 1001} - """, "Fetch size cannot be greater than [1000]", 500); + """, "Fetch size cannot be greater than [1000]", 400); deleteIndexWithProvisioningClient(defaultValidationIndexName); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlClientException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlClientException.java index 75d3dd45f110..0d1b363eeb98 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlClientException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlClientException.java @@ -8,16 +8,12 @@ import org.elasticsearch.xpack.ql.QlClientException; -public abstract class EqlClientException extends QlClientException { +public class EqlClientException extends QlClientException { - protected EqlClientException(String message, Object... args) { + public EqlClientException(String message, Object... args) { super(message, args); } - protected EqlClientException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - protected EqlClientException(String message, Throwable cause) { super(message, cause); } @@ -26,7 +22,4 @@ protected EqlClientException(Throwable cause, String message, Object... args) { super(cause, message, args); } - protected EqlClientException(Throwable cause) { - super(cause); - } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java index 7c21b68d7fe9..bed7e1634ac7 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/analysis/VerificationException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.eql.analysis; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.ql.common.Failure; @@ -14,12 +13,8 @@ public class VerificationException extends EqlClientException { - protected VerificationException(Collection sources) { + public VerificationException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/ExecutionManager.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/ExecutionManager.java index dea45e4b9d76..b26c815c1a2b 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/ExecutionManager.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/ExecutionManager.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.eql.querydsl.container.FieldExtractorRegistry; import org.elasticsearch.xpack.eql.session.EqlConfiguration; import org.elasticsearch.xpack.eql.session.EqlSession; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.BucketExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.ComputingExtractor; @@ -177,7 +178,7 @@ public Executable assemble( */ public Executable assemble(List> listOfKeys, List plans, Limit limit) { if (cfg.fetchSize() > SAMPLE_MAX_PAGE_SIZE) { - throw new EqlIllegalArgumentException("Fetch size cannot be greater than [{}]", SAMPLE_MAX_PAGE_SIZE); + throw new InvalidArgumentException("Fetch size cannot be greater than [{}]", SAMPLE_MAX_PAGE_SIZE); } FieldExtractorRegistry extractorRegistry = new FieldExtractorRegistry(); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java index aff398a523e9..8640378878f1 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java @@ -21,6 +21,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; import org.elasticsearch.xpack.eql.execution.search.extractor.CompositeKeyExtractor; import org.elasticsearch.xpack.eql.execution.search.extractor.FieldHitExtractor; @@ -156,7 +157,7 @@ public static HitExtractor createExtractor(FieldExtraction ref, EqlConfiguration hitNames.add(he.hitName()); if (hitNames.size() > 1) { - throw new EqlIllegalArgumentException("Multi-level nested fields [{}] not supported yet", hitNames); + throw new EqlClientException("Multi-level nested fields [{}] not supported yet", hitNames); } return new HitExtractorInput(l.source(), l.expression(), he); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessor.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessor.java index 214600c69b6c..2e64a54eac16 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessor.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessor.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import java.io.IOException; @@ -79,7 +80,7 @@ public static Object doProcess(Object value, Object base) { return Long.parseLong(value.toString(), radix); } } catch (NumberFormatException e) { - throw new EqlIllegalArgumentException("Unable to convert [{}] to number of base [{}]", value, radix); + throw new InvalidArgumentException(e, "Unable to convert [{}] to number of base [{}]", value, radix); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessor.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessor.java index 143513f59cdd..d802fd60b860 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessor.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessor.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.CIDRUtils; -import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.ql.util.Check; @@ -72,7 +72,7 @@ public static Object doProcess(Object source, List addresses) { try { return CIDRUtils.isInRange((String) source, arr); } catch (IllegalArgumentException e) { - throw new EqlIllegalArgumentException(e.getMessage()); + throw new InvalidArgumentException(e.getMessage()); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ExpressionBuilder.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ExpressionBuilder.java index 381d7e2ecfd3..5b8bf7116526 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ExpressionBuilder.java @@ -23,7 +23,7 @@ import org.elasticsearch.xpack.eql.parser.EqlBaseParser.LogicalBinaryContext; import org.elasticsearch.xpack.eql.parser.EqlBaseParser.LogicalNotContext; import org.elasticsearch.xpack.eql.parser.EqlBaseParser.PredicateContext; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; @@ -205,8 +205,8 @@ public Literal visitDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx) { try { return new Literal(source, Double.valueOf(StringUtils.parseDouble(text)), DataTypes.DOUBLE); - } catch (QlIllegalArgumentException siae) { - throw new ParsingException(source, siae.getMessage()); + } catch (InvalidArgumentException ciae) { + throw new ParsingException(source, ciae.getMessage()); } } @@ -242,13 +242,13 @@ public Literal visitIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx) { try { Number value = StringUtils.parseIntegral(text); return new Literal(source, value, DataTypes.fromJava(value)); - } catch (QlIllegalArgumentException siae) { + } catch (InvalidArgumentException ciae) { // if it's too large, then quietly try to parse as a float instead try { return new Literal(source, Double.valueOf(StringUtils.parseDouble(text)), DataTypes.DOUBLE); - } catch (QlIllegalArgumentException ignored) {} + } catch (InvalidArgumentException ignored) {} - throw new ParsingException(source, siae.getMessage()); + throw new ParsingException(source, ciae.getMessage()); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java index 8e96ea41c0a8..2f57bc021a1c 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/LogicalPlanBuilder.java @@ -339,7 +339,7 @@ public Sequence visitSequence(SequenceContext ctx) { } if (queries.stream().allMatch(KeyedFilter::isMissingEventFilter)) { - throw new IllegalStateException("A sequence requires at least one positive event query; found none"); + throw new ParsingException(source, "A sequence requires at least one positive event query; found none"); } return new Sequence(source, queries, until, maxSpan, fieldTimestamp(), fieldTiebreaker(), resultPosition()); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ParsingException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ParsingException.java index 820b7451ac7a..1c5e85f58795 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ParsingException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/ParsingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.eql.parser; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.ql.tree.Source; @@ -50,11 +49,6 @@ public String getErrorMessage() { return super.getMessage(); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java index 25ac30caf0c2..87a6119d38ac 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/PlanningException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.eql.planner; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.eql.EqlClientException; import org.elasticsearch.xpack.ql.common.Failure; @@ -22,8 +21,4 @@ protected PlanningException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/MathUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/MathUtils.java index 4c0d37026fe0..7e35a0ba1eb4 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/MathUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/MathUtils.java @@ -14,6 +14,11 @@ public class MathUtils { public static int abs(int number) { if (number == Integer.MIN_VALUE) { + // TODO: can this function be removed? + // This case should never occur, as `number` is either a non-negative user-provided input, + // or the result of opposing sign integers summation. + // Additionally, the math on offset/limit is inexact anyways. + // But, if this can somehow happen, we should (1) have a test and (2) switch to exact math everywhere. throw new EqlIllegalArgumentException("[" + number + "] cannot be negated since the result is outside the range"); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/StringUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/StringUtils.java index 0f27a4989147..81983adbbcbe 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/StringUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/util/StringUtils.java @@ -7,10 +7,7 @@ package org.elasticsearch.xpack.eql.util; -import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; -import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.predicate.regex.LikePattern; -import org.elasticsearch.xpack.ql.type.DataTypes; public final class StringUtils { @@ -29,11 +26,4 @@ public static LikePattern toLikePattern(String s) { return new LikePattern(likeString, escape); } - - public static LikePattern toLikePattern(Expression expression) { - if (expression.foldable() == false || DataTypes.isString(expression.dataType()) == false) { - throw new EqlIllegalArgumentException("Invalid like pattern received {}", expression); - } - return toLikePattern(expression.fold().toString()); - } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessorTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessorTests.java index cb7c01e0047b..3b1183ce20aa 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessorTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/math/ToNumberFunctionProcessorTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.eql.expression.function.scalar.math; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.QlException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l; @@ -19,12 +21,17 @@ private static Object process(Object value, Object base) { return new ToNumber(EMPTY, l(value), l(base)).makePipe().asProcessor().process(null); } + private static String error(Object value, Object base, Class exceptionClass) { + Exception e = expectThrows(exceptionClass, () -> new ToNumber(EMPTY, l(value), l(base)).makePipe().asProcessor().process(null)); + return e.getMessage(); + } + private static String error(Object value, Object base) { - QlIllegalArgumentException saie = expectThrows( - QlIllegalArgumentException.class, - () -> new ToNumber(EMPTY, l(value), l(base)).makePipe().asProcessor().process(null) - ); - return saie.getMessage(); + return error(value, base, QlIllegalArgumentException.class); + } + + private static String clientError(Object value, Object base) { + return error(value, base, InvalidArgumentException.class); } public void toNumberWithLongRange() { @@ -121,7 +128,7 @@ public void toNumberWithUnsupportedDoubleBase() { } public void testNegativeBase16() { - assertEquals("Unable to convert [-0x1] to number of base [16]", error("-0x1", 16)); + assertEquals("Unable to convert [-0x1] to number of base [16]", clientError("-0x1", 16)); } public void testNumberInvalidDataType() { @@ -139,11 +146,11 @@ public void testInvalidBase() { } public void testInvalidSourceString() { - assertEquals("Unable to convert [] to number of base [10]", error("", null)); - assertEquals("Unable to convert [] to number of base [16]", error("", 16)); - assertEquals("Unable to convert [foo] to number of base [10]", error("foo", null)); - assertEquals("Unable to convert [foo] to number of base [16]", error("foo", 16)); - assertEquals("Unable to convert [1.2.3.4] to number of base [10]", error("1.2.3.4", 10)); - assertEquals("Unable to convert [1.2.3.4] to number of base [16]", error("1.2.3.4", 16)); + assertEquals("Unable to convert [] to number of base [10]", clientError("", null)); + assertEquals("Unable to convert [] to number of base [16]", clientError("", 16)); + assertEquals("Unable to convert [foo] to number of base [10]", clientError("foo", null)); + assertEquals("Unable to convert [foo] to number of base [16]", clientError("foo", 16)); + assertEquals("Unable to convert [1.2.3.4] to number of base [10]", clientError("1.2.3.4", 10)); + assertEquals("Unable to convert [1.2.3.4] to number of base [16]", clientError("1.2.3.4", 16)); } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java index 66e544ed55ec..1e218749a499 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/CIDRMatchFunctionProcessorTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.eql.expression.function.scalar.string; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; @@ -36,8 +36,8 @@ public void testCIDRMatchFunctionInvalidInput() { ArrayList addresses = new ArrayList<>(); // Invalid source address - EqlIllegalArgumentException e = expectThrows( - EqlIllegalArgumentException.class, + Exception e = expectThrows( + InvalidArgumentException.class, () -> new CIDRMatch(EMPTY, l("10.6.48"), addresses).makePipe().asProcessor().process(null) ); @@ -46,7 +46,7 @@ public void testCIDRMatchFunctionInvalidInput() { // Invalid match ip address addresses.add(l("10.6.48")); e = expectThrows( - EqlIllegalArgumentException.class, + InvalidArgumentException.class, () -> new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null) ); @@ -56,7 +56,7 @@ public void testCIDRMatchFunctionInvalidInput() { // Invalid CIDR addresses.add(l("10.6.12/12")); e = expectThrows( - EqlIllegalArgumentException.class, + InvalidArgumentException.class, () -> new CIDRMatch(EMPTY, l("10.6.48.157"), addresses).makePipe().asProcessor().process(null) ); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 72fa96710710..f85dbeda7f6b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -185,7 +185,7 @@ string:keyword |datetime:date convertFromUnsignedLong row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warning:Line 1:58: evaluation of [to_datetime(ul)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:58: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [9223372036854775808] out of [long] range +warning:Line 1:58: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range ul:ul | dt:date [9223372036854775808, 520128000000]|1986-06-26T00:00:00.000Z diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 68ed0319047f..cdc25587793c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -66,7 +66,7 @@ long:long |ul:ul convertDoubleToUL row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warning:Line 1:48: evaluation of [to_ul(1e20)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:48: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [1.0E20] out of [unsigned_long] range +warning:Line 1:48: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E20] out of [unsigned_long] range d:double |ul:ul |overflow:ul 123.4 |123 |null @@ -123,7 +123,7 @@ int:integer |long:long convertULToLong row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warning:Line 1:67: evaluation of [to_long(ul)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:67: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [9223372036854775808] out of [long] range +warning:Line 1:67: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range ul:ul | long:long [9223372036854775807, 9223372036854775808]|9223372036854775807 @@ -164,7 +164,7 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long convertDoubleToLong row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warning:Line 1:51: evaluation of [to_long(1e19)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:51: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [1.0E19] out of [long] range +warning:Line 1:51: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [long] range d:double |d2l:long |overflow:long 123.4 |123 |null @@ -186,7 +186,7 @@ ROW long = [5013792, 2147483647, 501379200000] // end::to_int-long[] ; warning:Line 2:14: evaluation of [TO_INTEGER(long)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:14: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [501379200000] out of [integer] range +warning:Line 2:14: org.elasticsearch.xpack.ql.InvalidArgumentException: [501379200000] out of [integer] range // tag::to_int-long-result[] long:long |int:integer @@ -198,7 +198,7 @@ convertULToInt row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warning:Line 1:57: evaluation of [to_int(ul)] failed, treating result as null. Only first 20 failures recorded. // UL conversion to int dips into long; not the most efficient, but it's how SQL does it too. -warning:Line 1:57: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [9223372036854775808] out of [long] range +warning:Line 1:57: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range ul:ul |int:integer [2147483647, 9223372036854775808]|2147483647 @@ -232,7 +232,7 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer |overflow:in convertDoubleToInt row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warning:Line 1:54: evaluation of [to_integer(1e19)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:54: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [1.0E19] out of [long] range +warning:Line 1:54: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [long] range d:double |d2i:integer |overflow:integer 123.4 |123 |null @@ -476,7 +476,7 @@ ROW deg = [90, 180, 270] warningWithFromSource from employees | sort emp_no | limit 1 | eval x = to_long(emp_no) * 10000000 | eval y = to_int(x) > 1 | keep y; warning:Line 1:89: evaluation of [to_int(x)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:89: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100010000000] out of [integer] range +warning:Line 1:89: org.elasticsearch.xpack.ql.InvalidArgumentException: [100010000000] out of [integer] range y:boolean null @@ -486,26 +486,26 @@ null multipleWarnings-Ignore from employees | sort emp_no | eval x = to_long(emp_no) * 10000000 | where to_int(x) > 1 | keep x | limit 1; warning:Line 1:76: evaluation of [to_int(x)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100010000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100020000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100030000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100040000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100050000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100060000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100070000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100080000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100090000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100100000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100110000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100120000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100130000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100140000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100150000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100160000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100170000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100180000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100190000000] out of [integer] range -warning:Line 1:76: org.elasticsearch.xpack.ql.QlIllegalArgumentException: [100200000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100010000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100020000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100030000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100040000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100050000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100060000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100070000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100080000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100090000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100100000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100110000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100120000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100130000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100140000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100150000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100160000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100170000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100180000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100190000000] out of [integer] range +warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [100200000000] out of [integer] range x:long ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java index 48f03e2df911..ba539777b36c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java @@ -8,9 +8,9 @@ import org.elasticsearch.xpack.ql.QlClientException; -public abstract class EsqlClientException extends QlClientException { +public class EsqlClientException extends QlClientException { - protected EsqlClientException(String message, Object... args) { + public EsqlClientException(String message, Object... args) { super(message, args); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java index 4a86dd1741da..4372401e7d8f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.analysis; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.ql.common.Failure; @@ -22,8 +21,4 @@ protected VerificationException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 1e84bf60b0dd..87d6460d7098 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -12,7 +12,6 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; @@ -98,12 +97,7 @@ private static int checkedTotalLength(BytesRef[] values) { length += v.length; } if (length > MAX_CONCAT_LENGTH) { - throw new EsqlClientException("concatenating more than [" + MAX_CONCAT_LENGTH + "] bytes is not supported") { - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; // return a 400 response - } - }; + throw new EsqlClientException("concatenating more than [" + MAX_CONCAT_LENGTH + "] bytes is not supported"); } return length; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 5d3108a785f5..f24324fac2fb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; @@ -116,7 +117,7 @@ public Literal visitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { try { number = StringUtils.parseIntegral(text); - } catch (QlIllegalArgumentException siae) { + } catch (InvalidArgumentException siae) { // if it's too large, then quietly try to parse as a float instead try { return new Literal(source, StringUtils.parseDouble(text), DataTypes.DOUBLE); @@ -225,7 +226,7 @@ public Object visitQualifiedIntegerLiteral(EsqlBaseParser.QualifiedIntegerLitera try { TemporalAmount quantity = parseTemporalAmout(value, qualifier, source); return new Literal(source, quantity, quantity instanceof Duration ? TIME_DURATION : DATE_PERIOD); - } catch (QlIllegalArgumentException | ArithmeticException e) { + } catch (InvalidArgumentException | ArithmeticException e) { // the range varies by unit: Duration#ofMinutes(), #ofHours() will Math#multiplyExact() to reduce the unit to seconds; // and same for Period#ofWeeks() throw new ParsingException(source, "Number [{}] outside of [{}] range", ctx.integerValue().getText(), qualifier); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index 1cb71d64d554..6779e25b8851 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.parser; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.ql.tree.Source; @@ -50,11 +49,6 @@ public String getErrorMessage() { return super.getMessage(); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalVerificationException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalVerificationException.java index f303fc5a7e04..3bfc8385bbb8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalVerificationException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PhysicalVerificationException.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.planner; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.ql.common.Failure; @@ -19,8 +18,4 @@ public PhysicalVerificationException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/InvalidArgumentException.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/InvalidArgumentException.java new file mode 100644 index 000000000000..3713da26cb26 --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/InvalidArgumentException.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.ql; + +/** + * Exception thrown when unable to continue processing client request, + * in cases such as invalid query parameter or failure to apply requested processing to given data. + * It's meant as a generic equivalent to QlIllegalArgumentException (that's a server exception). + * TODO: reason for [E|S|ES]QL specializations of QlIllegalArgumentException? + * TODO: the intended use of ql.ParsingException, vs its [E|S|ES]QL equivalents, subclassed from the respective XxxClientException? + * Same for PlanningException. + */ +public class InvalidArgumentException extends QlClientException { + + public InvalidArgumentException(String message, Object... args) { + super(message, args); + } + + public InvalidArgumentException(Throwable cause, String message, Object... args) { + super(cause, message, args); + } + +} diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/ParsingException.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/ParsingException.java index ca7b33291717..e343bb7be189 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/ParsingException.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/ParsingException.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ql; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.tree.Source; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -50,11 +49,6 @@ public String getErrorMessage() { return super.getMessage(); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/QlClientException.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/QlClientException.java index e0aba6507c7f..0a28096c8df4 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/QlClientException.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/QlClientException.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.ql; +import org.elasticsearch.rest.RestStatus; + /** * Exception thrown by performing client (or user) code. * Typically it means the given action or query is incorrect and needs fixing. */ -public abstract class QlClientException extends QlException { +public class QlClientException extends QlException { protected QlClientException(String message, Object... args) { super(message, args); @@ -31,4 +33,9 @@ protected QlClientException(Throwable cause, String message, Object... args) { protected QlClientException(Throwable cause) { super(cause); } + + @Override + public RestStatus status() { + return RestStatus.BAD_REQUEST; + } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java index 5e0aa654392e..0defe009c6f9 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -193,7 +194,8 @@ protected Object unwrapFieldsMultiValue(Object values) { } values = unwrappedValues; } else { - throw new QlIllegalArgumentException("Arrays (returned by [{}]) are not supported", fieldName); + // missing `field_multi_value_leniency` setting + throw new InvalidArgumentException("Arrays (returned by [{}]) are not supported", fieldName); } } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/MappingException.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/MappingException.java index 9fb9ce733681..d2314384384b 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/MappingException.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/MappingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.ql.index; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.QlClientException; public class MappingException extends QlClientException { @@ -15,12 +14,4 @@ public MappingException(String message, Object... args) { super(message, args); } - public MappingException(String message, Throwable ex) { - super(message, ex); - } - - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java index 02a9f0f05e25..bb7fa9cf8c03 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.core.Booleans; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; @@ -363,28 +363,28 @@ private static DefaultConverter conversionToBoolean(DataType from) { public static byte safeToByte(long x) { if (x > Byte.MAX_VALUE || x < Byte.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + x + "] out of [byte] range"); + throw new InvalidArgumentException("[{}] out of [byte] range", x); } return (byte) x; } public static short safeToShort(long x) { if (x > Short.MAX_VALUE || x < Short.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + x + "] out of [short] range"); + throw new InvalidArgumentException("[{}] out of [short] range", x); } return (short) x; } public static int safeToInt(long x) { if (x > Integer.MAX_VALUE || x < Integer.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + x + "] out of [integer] range"); + throw new InvalidArgumentException("[{}] out of [integer] range", x); } return (int) x; } public static long safeDoubleToLong(double x) { if (x > Long.MAX_VALUE || x < Long.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + x + "] out of [long] range"); + throw new InvalidArgumentException("[{}] out of [long] range", x); } return Math.round(x); } @@ -400,20 +400,20 @@ public static Long safeToLong(Number x) { } return x.longValue(); } catch (ArithmeticException ae) { - throw new QlIllegalArgumentException("[" + x + "] out of [long] range", ae); + throw new InvalidArgumentException(ae, "[{}] out of [long] range", x); } } public static BigInteger safeToUnsignedLong(Double x) { if (inUnsignedLongRange(x) == false) { - throw new QlIllegalArgumentException("[" + x + "] out of [unsigned_long] range"); + throw new InvalidArgumentException("[{}] out of [unsigned_long] range", x); } return BigDecimal.valueOf(x).toBigInteger(); } public static BigInteger safeToUnsignedLong(Long x) { if (x < 0) { - throw new QlIllegalArgumentException("[" + x + "] out of [unsigned_long] range"); + throw new InvalidArgumentException("[{}] out of [unsigned_long] range", x); } return BigInteger.valueOf(x); } @@ -421,7 +421,7 @@ public static BigInteger safeToUnsignedLong(Long x) { public static BigInteger safeToUnsignedLong(String x) { BigInteger bi = new BigDecimal(x).toBigInteger(); if (isUnsignedLong(bi) == false) { - throw new QlIllegalArgumentException("[" + x + "] out of [unsigned_long] range"); + throw new InvalidArgumentException("[{}] out of [unsigned_long] range", x); } return bi; } @@ -451,7 +451,7 @@ public static Number toInteger(double x, DataType dataType) { public static boolean convertToBoolean(String val) { String lowVal = val.toLowerCase(Locale.ROOT); if (Booleans.isBoolean(lowVal) == false) { - throw new QlIllegalArgumentException("cannot cast [" + val + "] to [boolean]"); + throw new InvalidArgumentException("cannot cast [{}] to [boolean]", val); } return Booleans.parseBoolean(lowVal); } @@ -459,7 +459,7 @@ public static boolean convertToBoolean(String val) { /** * Converts arbitrary object to the desired data type. *

- * Throws QlIllegalArgumentException if such conversion is not possible + * Throws InvalidArgumentException if such conversion is not possible */ public static Object convert(Object value, DataType dataType) { DataType detectedType = DataTypes.fromJava(value); @@ -469,7 +469,7 @@ public static Object convert(Object value, DataType dataType) { Converter converter = converterFor(detectedType, dataType); if (converter == null) { - throw new QlIllegalArgumentException( + throw new InvalidArgumentException( "cannot convert from [{}], type [{}] to [{}]", value, detectedType.typeName(), @@ -546,7 +546,7 @@ public enum DefaultConverter implements Converter { STRING_TO_IP(o -> { if (InetAddresses.isInetAddress(o.toString()) == false) { - throw new QlIllegalArgumentException("[" + o + "] is not a valid IPv4 or IPv6 address"); + throw new InvalidArgumentException("[{}] is not a valid IPv4 or IPv6 address", o); } return o; }), @@ -573,9 +573,9 @@ public static Function fromString(Function conve try { return converter.apply(value.toString()); } catch (NumberFormatException e) { - throw new QlIllegalArgumentException(e, "cannot cast [{}] to [{}]", value, to); + throw new InvalidArgumentException(e, "cannot cast [{}] to [{}]", value, to); } catch (DateTimeParseException | IllegalArgumentException e) { - throw new QlIllegalArgumentException(e, "cannot cast [{}] to [{}]: {}", value, to, e.getMessage()); + throw new InvalidArgumentException(e, "cannot cast [{}] to [{}]: {}", value, to, e.getMessage()); } }; } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java index beebf0d58144..83c731ce4e7a 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java @@ -17,7 +17,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import java.io.IOException; import java.math.BigInteger; @@ -42,6 +42,8 @@ private StringUtils() {} private static final String[] INTEGER_ORDINALS = new String[] { "th", "st", "nd", "rd", "th", "th", "th", "th", "th", "th" }; + private static final String INVALID_REGEX_SEQUENCE = "Invalid sequence - escape character is not followed by special wildcard char"; + // CamelCase to camel_case public static String camelCaseToUnderscore(String string) { if (Strings.hasText(string) == false) { @@ -109,7 +111,7 @@ public static String likeToJavaPattern(String pattern, char escape) { if (escaped == false && (curr == escape) && escape != 0) { escaped = true; if (i + 1 == pattern.length()) { - throw new QlIllegalArgumentException("Invalid sequence - escape character is not followed by special wildcard char"); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } } else { switch (curr) { @@ -117,9 +119,7 @@ public static String likeToJavaPattern(String pattern, char escape) { case '_' -> regex.append(escaped ? "_" : "."); default -> { if (escaped) { - throw new QlIllegalArgumentException( - "Invalid sequence - escape character is not followed by special wildcard char" - ); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } // escape special regex characters switch (curr) { @@ -151,7 +151,7 @@ public static String wildcardToJavaPattern(String pattern, char escape) { if (escaped == false && (curr == escape) && escape != 0) { escaped = true; if (i + 1 == pattern.length()) { - throw new QlIllegalArgumentException("Invalid sequence - escape character is not followed by special wildcard char"); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } } else { switch (curr) { @@ -159,9 +159,7 @@ public static String wildcardToJavaPattern(String pattern, char escape) { case '?' -> regex.append(escaped ? "\\?" : "."); default -> { if (escaped && escape != curr) { - throw new QlIllegalArgumentException( - "Invalid sequence - escape character is not followed by special wildcard char" - ); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } // escape special regex characters switch (curr) { @@ -197,7 +195,7 @@ public static String likeToLuceneWildcard(String pattern, char escape) { if (escaped == false && (curr == escape) && escape != 0) { if (i + 1 == pattern.length()) { - throw new QlIllegalArgumentException("Invalid sequence - escape character is not followed by special wildcard char"); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } escaped = true; } else { @@ -206,9 +204,7 @@ public static String likeToLuceneWildcard(String pattern, char escape) { case '_' -> wildcard.append(escaped ? "_" : "?"); default -> { if (escaped) { - throw new QlIllegalArgumentException( - "Invalid sequence - escape character is not followed by special wildcard char" - ); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } // escape special regex characters switch (curr) { @@ -238,7 +234,7 @@ public static String likeToIndexWildcard(String pattern, char escape) { if (escaped == false && (curr == escape) && escape != 0) { if (i + 1 == pattern.length()) { - throw new QlIllegalArgumentException("Invalid sequence - escape character is not followed by special wildcard char"); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } escaped = true; } else { @@ -247,9 +243,7 @@ public static String likeToIndexWildcard(String pattern, char escape) { case '_' -> wildcard.append(escaped ? "_" : "*"); default -> { if (escaped) { - throw new QlIllegalArgumentException( - "Invalid sequence - escape character is not followed by special wildcard char" - ); + throw new InvalidArgumentException(INVALID_REGEX_SEQUENCE); } // the resolver doesn't support escaping... wildcard.append(curr); @@ -311,24 +305,24 @@ public static List findSimilar(String match, Iterable potentialM return scoredMatches.stream().map(a -> a.v2()).collect(toList()); } - public static double parseDouble(String string) throws QlIllegalArgumentException { + public static double parseDouble(String string) throws InvalidArgumentException { double value; try { value = Double.parseDouble(string); } catch (NumberFormatException nfe) { - throw new QlIllegalArgumentException("Cannot parse number [{}]", string); + throw new InvalidArgumentException(nfe, "Cannot parse number [{}]", string); } if (Double.isInfinite(value)) { - throw new QlIllegalArgumentException("Number [{}] is too large", string); + throw new InvalidArgumentException("Number [{}] is too large", string); } if (Double.isNaN(value)) { - throw new QlIllegalArgumentException("[{}] cannot be parsed as a number (NaN)", string); + throw new InvalidArgumentException("[{}] cannot be parsed as a number (NaN)", string); } return value; } - public static long parseLong(String string) throws QlIllegalArgumentException { + public static long parseLong(String string) throws InvalidArgumentException { try { return Long.parseLong(string); } catch (NumberFormatException nfe) { @@ -337,25 +331,25 @@ public static long parseLong(String string) throws QlIllegalArgumentException { try { bi.longValueExact(); } catch (ArithmeticException ae) { - throw new QlIllegalArgumentException("Number [{}] is too large", string); + throw new InvalidArgumentException("Number [{}] is too large", string); } } catch (NumberFormatException ex) { // parsing fails, go through } - throw new QlIllegalArgumentException("Cannot parse number [{}]", string); + throw new InvalidArgumentException("Cannot parse number [{}]", string); } } - public static Number parseIntegral(String string) throws QlIllegalArgumentException { + public static Number parseIntegral(String string) throws InvalidArgumentException { BigInteger bi; try { bi = new BigInteger(string); } catch (NumberFormatException ex) { - throw new QlIllegalArgumentException("Cannot parse number [{}]", string); + throw new InvalidArgumentException(ex, "Cannot parse number [{}]", string); } if (bi.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) { if (isUnsignedLong(bi) == false) { - throw new QlIllegalArgumentException("Number [{}] is too large", string); + throw new InvalidArgumentException("Number [{}] is too large", string); } return bi; } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/LiteralTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/LiteralTests.java index c71f34307a6b..13436e983af0 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/LiteralTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/LiteralTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ql.expression; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.ql.tree.SourceTests; import org.elasticsearch.xpack.ql.type.Converter; @@ -133,7 +133,7 @@ private List validReplacementDataTypes(Object value, DataType type) { Converter c = DataTypeConverter.converterFor(type, candidate); c.convert(value); validDataTypes.add(candidate); - } catch (QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { // invalid conversion then.... } } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java index 3d439acc0477..c0cb0f6667b5 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ql.type; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Location; import org.elasticsearch.xpack.ql.tree.Source; @@ -35,7 +35,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.UNSUPPORTED; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; import static org.elasticsearch.xpack.ql.type.DateUtils.asDateTime; -import static org.elasticsearch.xpack.ql.util.NumericUtils.UNSIGNED_LONG_MAX; public class DataTypeConversionTests extends ESTestCase { @@ -72,7 +71,7 @@ public void testConversionToLong() { assertEquals(10L, conversion.convert(10.0)); assertEquals(10L, conversion.convert(10.1)); assertEquals(11L, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -82,7 +81,7 @@ public void testConversionToLong() { assertEquals(bi.longValue(), conversion.convert(bi)); BigInteger longPlus = bi.add(BigInteger.valueOf(Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(longPlus)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(longPlus)); assertEquals("[" + longPlus + "] out of [long] range", e.getMessage()); } { @@ -110,7 +109,7 @@ public void testConversionToLong() { assertNull(conversion.convert(null)); assertEquals(1L, conversion.convert("1")); assertEquals(0L, conversion.convert("-0")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [long]", e.getMessage()); } } @@ -123,7 +122,7 @@ public void testConversionToDateTime() { assertEquals(asDateTime(10L), conversion.convert(10.0)); assertEquals(asDateTime(10L), conversion.convert(10.1)); assertEquals(asDateTime(11L), conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -133,7 +132,7 @@ public void testConversionToDateTime() { assertEquals(asDateTime(bi.longValue()), conversion.convert(bi)); BigInteger longPlus = bi.add(BigInteger.valueOf(Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(longPlus)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(longPlus)); assertEquals("[" + longPlus + "] out of [long] range", e.getMessage()); } { @@ -175,7 +174,7 @@ public void testConversionToDateTime() { Converter forward = converterFor(DATETIME, KEYWORD); Converter back = converterFor(KEYWORD, DATETIME); assertEquals(dt, back.convert(forward.convert(dt))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [datetime]: Text '0xff' could not be parsed at index 0", e.getMessage()); } } @@ -222,7 +221,7 @@ public void testConversionToFloat() { assertEquals(1.0f, (float) conversion.convert("1"), 0); assertEquals(0.0f, (float) conversion.convert("-0"), 0); assertEquals(12.776f, (float) conversion.convert("12.776"), 0.00001); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [float]", e.getMessage()); } } @@ -269,7 +268,7 @@ public void testConversionToDouble() { assertEquals(1.0, (double) conversion.convert("1"), 0); assertEquals(0.0, (double) conversion.convert("-0"), 0); assertEquals(12.776, (double) conversion.convert("12.776"), 0.00001); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [double]", e.getMessage()); } } @@ -326,17 +325,17 @@ public void testConversionToBoolean() { assertEquals(true, conversion.convert("True")); assertEquals(false, conversion.convert("fAlSe")); // Everything else should fail - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("10")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10")); assertEquals("cannot cast [10] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("-1")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-1")); assertEquals("cannot cast [-1] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0")); assertEquals("cannot cast [0] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("blah")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("blah")); assertEquals("cannot cast [blah] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("Yes")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("Yes")); assertEquals("cannot cast [Yes] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("nO")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("nO")); assertEquals("cannot cast [nO] to [boolean]", e.getMessage()); } } @@ -350,11 +349,11 @@ public void testConversionToUnsignedLong() { assertEquals(BigDecimal.valueOf(d).toBigInteger(), conversion.convert(d)); Double ulmAsDouble = UNSIGNED_LONG_MAX.doubleValue(); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(ulmAsDouble)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(ulmAsDouble)); assertEquals("[" + ulmAsDouble + "] out of [unsigned_long] range", e.getMessage()); Double nd = -Math.abs(randomDouble()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(nd)); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(nd)); assertEquals("[" + nd + "] out of [unsigned_long] range", e.getMessage()); } { @@ -364,7 +363,7 @@ public void testConversionToUnsignedLong() { BigInteger bi = BigInteger.valueOf(randomNonNegativeLong()); assertEquals(bi, conversion.convert(bi.longValue())); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(bi.negate())); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bi.negate())); assertEquals("[" + bi.negate() + "] out of [unsigned_long] range", e.getMessage()); } { @@ -374,7 +373,7 @@ public void testConversionToUnsignedLong() { long l = randomNonNegativeLong(); assertEquals(BigInteger.valueOf(l), conversion.convert(asDateTime(l))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateTime(-l))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(-l))); assertEquals("[" + -l + "] out of [unsigned_long] range", e.getMessage()); } { @@ -395,9 +394,9 @@ public void testConversionToUnsignedLong() { assertEquals(bi, conversion.convert(bi.toString() + "." + randomNonNegativeLong())); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(BigInteger.ONE.negate().toString())); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(BigInteger.ONE.negate().toString())); assertEquals("[-1] out of [unsigned_long] range", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(UNSIGNED_LONG_MAX.add(BigInteger.ONE).toString())); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(UNSIGNED_LONG_MAX.add(BigInteger.ONE).toString())); assertEquals("[" + UNSIGNED_LONG_MAX.add(BigInteger.ONE).toString() + "] out of [unsigned_long] range", e.getMessage()); } } @@ -410,7 +409,7 @@ public void testConversionToInt() { assertEquals(10, conversion.convert(10.0)); assertEquals(10, conversion.convert(10.1)); assertEquals(11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Long.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Long.MAX_VALUE)); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); } { @@ -420,7 +419,7 @@ public void testConversionToInt() { assertEquals(bi.intValueExact(), conversion.convert(bi)); BigInteger bip = BigInteger.valueOf(randomLongBetween(Integer.MAX_VALUE, Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(bip)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip)); assertEquals("[" + bip + "] out of [integer] range", e.getMessage()); } { @@ -431,7 +430,7 @@ public void testConversionToInt() { assertEquals(-123456789, conversion.convert(asDateTime(-123456789L))); // Nanos are ignored, only millis are used assertEquals(62123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:01:02.123456789Z"))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateTime(Long.MAX_VALUE))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Long.MAX_VALUE))); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); } } @@ -444,7 +443,7 @@ public void testConversionToShort() { assertEquals((short) 10, conversion.convert(10.0)); assertEquals((short) 10, conversion.convert(10.1)); assertEquals((short) 11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE)); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); } { @@ -454,7 +453,7 @@ public void testConversionToShort() { assertEquals(bi.shortValueExact(), conversion.convert(bi)); BigInteger bip = BigInteger.valueOf(randomLongBetween(Short.MAX_VALUE, Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(bip)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip)); assertEquals("[" + bip + "] out of [short] range", e.getMessage()); } { @@ -464,7 +463,7 @@ public void testConversionToShort() { assertEquals((short) -12345, conversion.convert(asDateTime(-12345L))); // Nanos are ignored, only millis are used assertEquals((short) 1123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:00:01.123456789Z"))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); } } @@ -477,7 +476,7 @@ public void testConversionToByte() { assertEquals((byte) 10, conversion.convert(10.0)); assertEquals((byte) 10, conversion.convert(10.1)); assertEquals((byte) 11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Short.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Short.MAX_VALUE)); assertEquals("[" + Short.MAX_VALUE + "] out of [byte] range", e.getMessage()); } { @@ -487,7 +486,7 @@ public void testConversionToByte() { assertEquals(bi.byteValueExact(), conversion.convert(bi)); BigInteger bip = BigInteger.valueOf(randomLongBetween(Byte.MAX_VALUE, Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(bip)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip)); assertEquals("[" + bip + "] out of [byte] range", e.getMessage()); } { @@ -497,7 +496,7 @@ public void testConversionToByte() { assertEquals((byte) -123, conversion.convert(asDateTime(-123L))); // Nanos are ignored, only millis are used assertEquals((byte) 123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:00:00.123456789Z"))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); assertEquals("[" + Integer.MAX_VALUE + "] out of [byte] range", e.getMessage()); } } @@ -546,7 +545,7 @@ public void testEsDataTypes() { } public void testConversionToUnsupported() { - Exception e = expectThrows(QlIllegalArgumentException.class, () -> DataTypeConverter.convert(Integer.valueOf(1), UNSUPPORTED)); + Exception e = expectThrows(InvalidArgumentException.class, () -> DataTypeConverter.convert(Integer.valueOf(1), UNSUPPORTED)); assertEquals("cannot convert from [1], type [integer] to [unsupported]", e.getMessage()); } @@ -554,7 +553,7 @@ public void testStringToIp() { Converter conversion = converterFor(KEYWORD, IP); assertNull(conversion.convert(null)); assertEquals("192.168.1.1", conversion.convert("192.168.1.1")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("10.1.1.300")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10.1.1.300")); assertEquals("[10.1.1.300] is not a valid IPv4 or IPv6 address", e.getMessage()); } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java index 76f84541e5bb..ab63913760fe 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java @@ -27,10 +27,8 @@ public void testDefaultNoLenient() throws IOException { index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); assertThat( command("SELECT * FROM test"), - containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + containsString("[?1l>[?1000l[?2004l[31;1mBad request [[3;33;22mArrays (returned by [tags]) are not supported[23;31;1m][0m") ); - while ("][23;31;1m][0m".equals(readLine()) == false) - ; // clean console to avoid failures on shutdown } public void testExplicitNoLenient() throws IOException { @@ -38,9 +36,7 @@ public void testExplicitNoLenient() throws IOException { assertEquals("[?1l>[?1000l[?2004llenient set to [90mfalse[0m", command("lenient = false")); assertThat( command("SELECT * FROM test"), - containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + containsString("[?1l>[?1000l[?2004l[31;1mBad request [[3;33;22mArrays (returned by [tags]) are not supported[23;31;1m][0m") ); - while ("][23;31;1m][0m".equals(readLine()) == false) - ; // clean console to avoid failures on shutdown } } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java index b41c5864a28b..0fe7d15d19b5 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java @@ -345,6 +345,7 @@ public static SqlExceptionType fromRemoteFailureType(String type) { case "analysis_exception": case "resource_not_found_exception": case "verification_exception": + case "invalid_argument_exception": return DATA; case "planning_exception": case "mapping_exception": diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlClientException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlClientException.java index 22d6118a5d48..7d105c4b4132 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlClientException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlClientException.java @@ -8,16 +8,12 @@ import org.elasticsearch.xpack.ql.QlClientException; -public abstract class SqlClientException extends QlClientException { +public class SqlClientException extends QlClientException { protected SqlClientException(String message, Object... args) { super(message, args); } - protected SqlClientException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } - protected SqlClientException(String message, Throwable cause) { super(message, cause); } @@ -26,7 +22,4 @@ protected SqlClientException(Throwable cause, String message, Object... args) { super(cause, message, args); } - protected SqlClientException(Throwable cause) { - super(cause); - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java index 41dd787d5e98..6979b66378b7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.analysis.analyzer; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.sql.SqlClientException; @@ -18,8 +17,4 @@ protected VerificationException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessor.java index 54bdbeb0b244..abe374bc3f2a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateAdd.Part; @@ -52,9 +53,9 @@ public static Object process(Object unit, Object numberOfUnits, Object timestamp if (datePartField == null) { List similar = Part.findSimilar((String) unit); if (similar.isEmpty()) { - throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), unit); + throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), unit); } else { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Received value [{}] is not valid date part to add; " + "did you mean {}?", unit, similar diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiff.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiff.java index 2ceca156af45..ee6063ba466f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiff.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiff.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.Nullability; @@ -14,7 +15,6 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import java.time.ZoneId; import java.time.ZonedDateTime; @@ -114,7 +114,7 @@ private static long diffInSeconds(ZonedDateTime start, ZonedDateTime end) { private static int safeInt(long diff) { if (diff > Integer.MAX_VALUE || diff < Integer.MIN_VALUE) { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "The DATE_DIFF function resulted in an overflow; the number of units " + "separating two date/datetime instances is too large. Try to use DATE_DIFF with a less precise unit." ); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessor.java index 225bcecacd18..f41c06d13cdf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateDiff.Part; @@ -52,9 +53,9 @@ public static Object process(Object unit, Object startTimestamp, Object endTimes if (datePartField == null) { List similar = Part.findSimilar((String) unit); if (similar.isEmpty()) { - throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), unit); + throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), unit); } else { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Received value [{}] is not valid date part to add; " + "did you mean {}?", unit, similar diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java index c9ef6431369e..c37e69c132d3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DatePart.Part; @@ -52,9 +53,9 @@ public static Object process(Object part, Object timestamp, ZoneId zoneId) { if (datePartField == null) { List similar = Part.findSimilar((String) part); if (similar.isEmpty()) { - throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), part); + throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), part); } else { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Received value [{}] is not valid date part for extraction; " + "did you mean {}?", part, similar diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java index 72125968c274..91a96678189f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessor.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; @@ -224,7 +225,7 @@ public Object format(Object timestamp, Object pattern, ZoneId zoneId) { try { return formatterFor(patternString).apply(ta); } catch (IllegalArgumentException | DateTimeException e) { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Invalid pattern [{}] is received for formatting date/time [{}]; {}", pattern, timestamp, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessor.java index 2b9801e51388..08fc1d621b50 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessor.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -72,7 +73,7 @@ public Object parse(Object timestamp, Object pattern, ZoneId zoneId) { if (msg.contains("Unable to convert parsed text using any of the specified queries")) { msg = format(null, "Unable to convert parsed text into [{}]", this.parseType); } - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Invalid {} string [{}] or pattern [{}] is received; {}", this.parseType, timestamp, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java index 03c0e8885296..3ea22f182e35 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.literal.interval.IntervalDayTime; @@ -55,13 +56,9 @@ public static Object process(Object truncateTo, Object timestamp, ZoneId zoneId) if (truncateDateField == null) { List similar = Part.findSimilar((String) truncateTo); if (similar.isEmpty()) { - throw new SqlIllegalArgumentException( - "A value of {} or their aliases is required; received [{}]", - Part.values(), - truncateTo - ); + throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), truncateTo); } else { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "Received value [{}] is not valid date part for truncation; " + "did you mean {}?", truncateTo, similar @@ -72,10 +69,10 @@ public static Object process(Object truncateTo, Object timestamp, ZoneId zoneId) if (timestamp instanceof ZonedDateTime == false && timestamp instanceof IntervalYearMonth == false && timestamp instanceof IntervalDayTime == false) { - throw new SqlIllegalArgumentException("A date/datetime/interval is required; received [{}]", timestamp); + throw new SqlIllegalArgumentException("A date/datetime/interval is required; received [{}]", timestamp); // verifier checked } if (truncateDateField == Part.WEEK && (timestamp instanceof IntervalDayTime || timestamp instanceof IntervalYearMonth)) { - throw new SqlIllegalArgumentException("Truncating intervals is not supported for {} units", truncateTo); + throw new InvalidArgumentException("Truncating intervals is not supported for {} units", truncateTo); } if (timestamp instanceof ZonedDateTime) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java index c56feacdb9d0..37a801d7ec3b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.literal.geo.GeoShape; @@ -42,7 +43,7 @@ public static GeoShape apply(Object input) { try { return new GeoShape(input); } catch (IOException | IllegalArgumentException | ElasticsearchParseException ex) { - throw new SqlIllegalArgumentException("Cannot parse [{}] as a geo_shape value", input); + throw new InvalidArgumentException("Cannot parse [{}] as a geo_shape value", input); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java index dee8cdc870e0..5bec38a459c2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathProcessor.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.Processor; import org.elasticsearch.xpack.ql.type.DataTypeConverter; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; @@ -38,14 +38,14 @@ public enum MathOperation { long lo = ((Number) l).longValue(); if (lo == Long.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + lo + "] cannot be negated since the result is outside the range"); + throw new InvalidArgumentException("[" + lo + "] cannot be negated since the result is outside the range"); } lo = lo < 0 ? -lo : lo; if (l instanceof Integer) { if ((int) lo == Integer.MIN_VALUE) { - throw new QlIllegalArgumentException("[" + lo + "] cannot be negated since the result is outside the range"); + throw new InvalidArgumentException("[" + lo + "] cannot be negated since the result is outside the range"); } return DataTypeConverter.safeToInt(lo); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/geo/GeoShape.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/geo/GeoShape.java index 386686f4f17d..6e5070a40903 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/geo/GeoShape.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/geo/GeoShape.java @@ -30,6 +30,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantNamedWriteable; @@ -60,7 +61,7 @@ public GeoShape(Object value) throws IOException { try { shape = parse(value); } catch (ParseException ex) { - throw new QlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); + throw new InvalidArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); } } @@ -69,7 +70,7 @@ public GeoShape(StreamInput in) throws IOException { try { shape = parse(value); } catch (ParseException ex) { - throw new QlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); + throw new InvalidArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 848939662a4f..67756d4951d6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Tuple; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; @@ -706,7 +707,7 @@ public Literal visitDecimalLiteral(DecimalLiteralContext ctx) { try { return new Literal(tuple.v1(), Double.valueOf(StringUtils.parseDouble(tuple.v2())), DataTypes.DOUBLE); - } catch (QlIllegalArgumentException siae) { + } catch (InvalidArgumentException siae) { throw new ParsingException(tuple.v1(), siae.getMessage()); } } @@ -717,7 +718,7 @@ public Literal visitIntegerLiteral(IntegerLiteralContext ctx) { try { Number value = StringUtils.parseIntegral(tuple.v2()); return new Literal(tuple.v1(), value, DataTypes.fromJava(value)); - } catch (QlIllegalArgumentException siae) { + } catch (InvalidArgumentException siae) { throw new ParsingException(tuple.v1(), siae.getMessage()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java index 3462a0ed411c..a553d3f64f27 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.parser; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.sql.SqlClientException; @@ -50,11 +49,6 @@ public String getErrorMessage() { return super.getMessage(); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java index 94861339759c..c7e05d65a037 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.planner; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.tree.Location; import org.elasticsearch.xpack.ql.tree.Node; import org.elasticsearch.xpack.sql.SqlClientException; @@ -48,11 +47,6 @@ public int getColumnNumber() { return column; } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } - @Override public String getMessage() { return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), super.getMessage()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java index 2c70711e1683..bd635e7454d5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.sql.planner; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.sql.SqlClientException; @@ -21,8 +20,4 @@ public PlanningException(Collection sources) { super(Failure.failMessage(sources)); } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/Check.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/Check.java index 38f13dbe974f..2e5b2d6a657c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/Check.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/Check.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.sql.util; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; /** @@ -26,12 +27,6 @@ public static void isTrue(boolean expression, String message) { } } - public static void notNull(Object object, String message) { - if (object == null) { - throw new SqlIllegalArgumentException(message); - } - } - public static void notNull(Object object, String message, Object... values) { if (object == null) { throw new SqlIllegalArgumentException(message, values); @@ -40,7 +35,7 @@ public static void notNull(Object object, String message, Object... values) { public static void isFixedNumberAndInRange(Object object, String objectName, Long from, Long to) { if ((object instanceof Number) == false || object instanceof Float || object instanceof Double) { - throw new SqlIllegalArgumentException( + throw new InvalidArgumentException( "A fixed point number is required for [{}]; received [{}]", objectName, object.getClass().getTypeName() @@ -48,7 +43,7 @@ public static void isFixedNumberAndInRange(Object object, String objectName, Lon } Long longValue = ((Number) object).longValue(); if (longValue < from || longValue > to) { - throw new SqlIllegalArgumentException("[{}] out of the allowed range [{}, {}], received [{}]", objectName, from, to, longValue); + throw new InvalidArgumentException("[{}] out of the allowed range [{}, {}], received [{}]", objectName, from, to, longValue); } } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index aca64a467934..5c3fc378d90c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.literal.geo.GeoShape; import org.elasticsearch.xpack.sql.proto.StringUtils; @@ -146,7 +146,7 @@ public void testMultiValuedDocValue() { DocumentField field = new DocumentField(fieldName, asList("a", "b")); SearchHit hit = new SearchHit(1, null); hit.setDocumentField(fieldName, field); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); + Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); } @@ -165,7 +165,7 @@ public void testMultiValuedSource() { DocumentField field = new DocumentField("a", asList(value, value)); SearchHit hit = new SearchHit(1, null); hit.setDocumentField("a", field); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); + Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [a]) are not supported")); } @@ -207,7 +207,7 @@ public void testMultipleGeoShapeExtraction() { SearchHit hit = new SearchHit(1, null); hit.setDocumentField(fieldName, field); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); + Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, randomBoolean() ? GEO_SHAPE : SHAPE, UTC, LENIENT); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java index b8fa3e4def77..e8014903699c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/CastProcessorTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.type.DataTypeConverter.DefaultConverter; import org.elasticsearch.xpack.sql.type.SqlDataTypeConverter.SqlConverter; @@ -38,7 +38,7 @@ public void testApply() { CastProcessor proc = new CastProcessor(DefaultConverter.STRING_TO_INT); assertEquals(null, proc.process(null)); assertEquals(1, proc.process("1")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> proc.process("1.2")); + Exception e = expectThrows(InvalidArgumentException.class, () -> proc.process("1.2")); assertEquals("cannot cast [1.2] to [integer]", e.getMessage()); } { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessorTests.java index 98cfda78b0ff..98300eae130d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateAddProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -62,28 +63,28 @@ protected DateAddProcessor mutateInstance(DateAddProcessor instance) { } public void testInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateAdd(Source.EMPTY, l(5), l(10), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateAdd(Source.EMPTY, l("days"), l("foo"), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("A number is required; received [foo]", siae.getMessage()); + assertEquals("A number is required; received [foo]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateAdd(Source.EMPTY, l("days"), l(10), l("foo"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime is required; received [foo]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateAdd(Source.EMPTY, l("invalid"), l(10), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) @@ -91,16 +92,16 @@ public void testInvalidInputs() { assertEquals( "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, " + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateAdd(Source.EMPTY, l("quertar"), l(10), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?", siae.getMessage()); + assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?", e.getMessage()); } public void testWithNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessorTests.java index c15d00250b23..b4d760d4cae7 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateDiffProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -60,32 +61,32 @@ protected DateDiffProcessor mutateInstance(DateDiffProcessor instance) { } public void testInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateDiff(Source.EMPTY, l(5), randomDatetimeLiteral(), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateDiff(Source.EMPTY, l("days"), l("foo"), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("A date/datetime is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime is required; received [foo]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateDiff(Source.EMPTY, l("days"), randomDatetimeLiteral(), l("foo"), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("A date/datetime is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime is required; received [foo]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) @@ -93,16 +94,16 @@ public void testInvalidInputs() { assertEquals( "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, " + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("quertar"), randomDatetimeLiteral(), randomDatetimeLiteral(), randomZone()).makePipe() .asProcessor() .process(null) ); - assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?", siae.getMessage()); + assertEquals("Received value [quertar] is not valid date part to add; did you mean [quarter, quarters]?", e.getMessage()); } public void testWithNulls() { @@ -305,114 +306,114 @@ public void testOverflow() { Literal dt1 = l(dateTime(-99992022, 12, 31, 20, 22, 33, 123456789, ZoneId.of("Etc/GMT-5"))); Literal dt2 = l(dateTime(99992022, 4, 18, 8, 33, 22, 987654321, ZoneId.of("Etc/GMT+5"))); - SqlIllegalArgumentException siae = expectThrows( - SqlIllegalArgumentException.class, + Exception e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("month"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("dayofyear"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("day"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("week"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("weekday"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("hours"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("minute"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("second"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("milliseconds"), dt2, dt1, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("mcs"), dt1, dt2, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateDiff(Source.EMPTY, l("nanoseconds"), dt2, dt1, zoneId).makePipe().asProcessor().process(null) ); assertEquals( "The DATE_DIFF function resulted in an overflow; the number of units separating two date/datetime " + "instances is too large. Try to use DATE_DIFF with a less precise unit.", - siae.getMessage() + e.getMessage() ); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java index 436c49c589af..615275f60bec 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -57,33 +58,33 @@ protected DatePartProcessor mutateInstance(DatePartProcessor instance) { } public void testInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DatePart(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DatePart(Source.EMPTY, l("days"), l("foo"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime is required; received [foo]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DatePart(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, " + "MICROSECOND, NANOSECOND, TZOFFSET] or their aliases is required; received [invalid]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DatePart(Source.EMPTY, l("dayfyear"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Received value [dayfyear] is not valid date part for extraction; did you mean [dayofyear, year]?", siae.getMessage()); + assertEquals("Received value [dayfyear] is not valid date part for extraction; did you mean [dayofyear, year]?", e.getMessage()); } public void testWithNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessorTests.java index 2c83f666d6f3..997447c525e4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFormatProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -70,61 +71,61 @@ protected DateTimeFormatProcessor mutateInstance(DateTimeFormatProcessor instanc } public void testDateTimeFormatInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTimeFormat(Source.EMPTY, l("foo"), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime/time is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime/time is required; received [foo]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTimeFormat(Source.EMPTY, randomDatetimeLiteral(), l(5), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeFormat(Source.EMPTY, l(dateTime(2019, 9, 3, 18, 10, 37, 0)), l("invalid"), randomZone()).makePipe() .asProcessor() .process(null) ); assertEquals( "Invalid pattern [invalid] is received for formatting date/time [2019-09-03T18:10:37Z]; Unknown pattern letter: i", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeFormat(Source.EMPTY, l(time(18, 10, 37, 123000000)), l("MM/dd"), randomZone()).makePipe() .asProcessor() .process(null) ); assertEquals( "Invalid pattern [MM/dd] is received for formatting date/time [18:10:37.123Z]; Unsupported field: MonthOfYear", - siae.getMessage() + e.getMessage() ); } public void testFormatInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Format(Source.EMPTY, l("foo"), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime/time is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime/time is required; received [foo]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new Format(Source.EMPTY, randomDatetimeLiteral(), l(5), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Format(Source.EMPTY, l(time(18, 10, 37, 123000000)), l("MM/dd"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid pattern [MM/dd] is received for formatting date/time [18:10:37.123Z]; Unsupported field: MonthOfYear", - siae.getMessage() + e.getMessage() ); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessorTests.java index ac93ee69fa8a..1bac217ef8ac 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeParseProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase; @@ -63,44 +64,44 @@ protected DateTimeParseProcessor mutateInstance(DateTimeParseProcessor instance) } public void testDateTimeInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l(10), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [10]", siae.getMessage()); + assertEquals("A string is required; received [10]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTimeParse(Source.EMPTY, randomStringLiteral(), l(20), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [20]", siae.getMessage()); + assertEquals("A string is required; received [20]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l("2020-04-07"), l("invalid"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Invalid datetime string [2020-04-07] or pattern [invalid] is received; Unknown pattern letter: i", siae.getMessage()); + assertEquals("Invalid datetime string [2020-04-07] or pattern [invalid] is received; Unknown pattern letter: i", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l("2020-04-07"), l("MM/dd"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid datetime string [2020-04-07] or pattern [MM/dd] is received; Text '2020-04-07' could not be parsed at index 2", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l("07/05/2020"), l("dd/MM/uuuu"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid datetime string [07/05/2020] or pattern [dd/MM/uuuu] is received; Unable to convert parsed text into [datetime]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTimeParse(Source.EMPTY, l("10:20:30.123456789"), l("HH:mm:ss.SSSSSSSSS"), randomZone()).makePipe() .asProcessor() .process(null) @@ -108,96 +109,96 @@ public void testDateTimeInvalidInputs() { assertEquals( "Invalid datetime string [10:20:30.123456789] or pattern [HH:mm:ss.SSSSSSSSS] is received; " + "Unable to convert parsed text into [datetime]", - siae.getMessage() + e.getMessage() ); } public void testTimeInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new TimeParse(Source.EMPTY, l(10), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [10]", siae.getMessage()); + assertEquals("A string is required; received [10]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new TimeParse(Source.EMPTY, randomStringLiteral(), l(20), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [20]", siae.getMessage()); + assertEquals("A string is required; received [20]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new TimeParse(Source.EMPTY, l("11:04:07"), l("invalid"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Invalid time string [11:04:07] or pattern [invalid] is received; Unknown pattern letter: i", siae.getMessage()); + assertEquals("Invalid time string [11:04:07] or pattern [invalid] is received; Unknown pattern letter: i", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new TimeParse(Source.EMPTY, l("11:04:07"), l("HH:mm"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid time string [11:04:07] or pattern [HH:mm] is received; " + "Text '11:04:07' could not be parsed, unparsed text found at index 5", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new TimeParse(Source.EMPTY, l("07/05/2020"), l("dd/MM/uuuu"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid time string [07/05/2020] or pattern [dd/MM/uuuu] is received; Unable to convert parsed text into [time]", - siae.getMessage() + e.getMessage() ); } public void testDateInvalidInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateParse(Source.EMPTY, l(10), randomStringLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [10]", siae.getMessage()); + assertEquals("A string is required; received [10]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateParse(Source.EMPTY, randomStringLiteral(), l(20), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [20]", siae.getMessage()); + assertEquals("A string is required; received [20]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateParse(Source.EMPTY, l("07/05/2020"), l("invalid"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Invalid date string [07/05/2020] or pattern [invalid] is received; Unknown pattern letter: i", siae.getMessage()); + assertEquals("Invalid date string [07/05/2020] or pattern [invalid] is received; Unknown pattern letter: i", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateParse(Source.EMPTY, l("07/05/2020"), l("dd/MM"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid date string [07/05/2020] or pattern [dd/MM] is received; " + "Text '07/05/2020' could not be parsed, unparsed text found at index 5", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateParse(Source.EMPTY, l("11:04:07"), l("HH:mm:ss"), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "Invalid date string [11:04:07] or pattern [HH:mm:ss] is received; Unable to convert parsed text into [date]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateParse(Source.EMPTY, l("05/2020 11:04:07"), l("MM/uuuu HH:mm:ss"), randomZone()).makePipe() .asProcessor() .process(null) ); assertEquals( "Invalid date string [05/2020 11:04:07] or pattern [MM/uuuu HH:mm:ss] is received; Unable to convert parsed text into [date]", - siae.getMessage() + e.getMessage() ); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java index 3ddddb4392d3..f49c7b76d0d8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.ql.tree.Source; @@ -76,45 +77,45 @@ public void testInvalidInputs() { TemporalAmount duration = Duration.ofDays(42).plusHours(12).plusMinutes(23).plusSeconds(12).plusNanos(143000000); Literal dayToSecond = intervalLiteral(duration, INTERVAL_DAY_TO_SECOND); - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTrunc(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A string is required; received [5]", siae.getMessage()); + assertEquals("A string is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("days"), l("foo"), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("A date/datetime/interval is required; received [foo]", siae.getMessage()); + assertEquals("A date/datetime/interval is required; received [foo]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); assertEquals( "A value of [MILLENNIUM, CENTURY, DECADE, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, " + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", - siae.getMessage() + e.getMessage() ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("dacede"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null) ); - assertEquals("Received value [dacede] is not valid date part for truncation; did you mean [decade, decades]?", siae.getMessage()); + assertEquals("Received value [dacede] is not valid date part for truncation; did you mean [decade, decades]?", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("weeks"), yearToMonth, null).makePipe().asProcessor().process(null) ); - assertEquals("Truncating intervals is not supported for weeks units", siae.getMessage()); + assertEquals("Truncating intervals is not supported for weeks units", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("week"), dayToSecond, null).makePipe().asProcessor().process(null) ); - assertEquals("Truncating intervals is not supported for week units", siae.getMessage()); + assertEquals("Truncating intervals is not supported for week units", e.getMessage()); } public void testWithNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java index 5eb87ae736e8..d54580098fa3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.geo; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.literal.geo.GeoShape; @@ -28,17 +29,17 @@ public void testApply() { public void testTypeCheck() { StWkttosqlProcessor procPoint = new StWkttosqlProcessor(); - QlIllegalArgumentException siae = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process(42)); - assertEquals("A string is required; received [42]", siae.getMessage()); + Exception e = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process(42)); + assertEquals("A string is required; received [42]", e.getMessage()); - siae = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process("some random string")); - assertEquals("Cannot parse [some random string] as a geo_shape value", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> procPoint.process("some random string")); + assertEquals("Cannot parse [some random string] as a geo_shape value", e.getMessage()); - siae = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process("point (foo bar)")); - assertEquals("Cannot parse [point (foo bar)] as a geo_shape or shape value", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> procPoint.process("point (foo bar)")); + assertEquals("Cannot parse [point (foo bar)] as a geo_shape or shape value", e.getMessage()); - siae = expectThrows(QlIllegalArgumentException.class, () -> procPoint.process("point (10 10")); - assertEquals("Cannot parse [point (10 10] as a geo_shape or shape value", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> procPoint.process("point (10 10")); + assertEquals("Cannot parse [point (10 10] as a geo_shape or shape value", e.getMessage()); } public void testCoerce() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathOperationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathOperationTests.java index 3049567d6690..e1e236413d58 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathOperationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathOperationTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.math; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; import java.util.Arrays; @@ -17,17 +17,17 @@ public class MathOperationTests extends ESTestCase { public void testAbsLongMax() { - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> MathOperation.ABS.apply(Long.MIN_VALUE)); + InvalidArgumentException ex = expectThrows(InvalidArgumentException.class, () -> MathOperation.ABS.apply(Long.MIN_VALUE)); assertTrue(ex.getMessage().contains("cannot be negated")); } public void testAbsIntegerMax() { - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> MathOperation.ABS.apply(Integer.MIN_VALUE)); + InvalidArgumentException ex = expectThrows(InvalidArgumentException.class, () -> MathOperation.ABS.apply(Integer.MIN_VALUE)); assertTrue(ex.getMessage().contains("cannot be negated")); } public void testAbsShortMax() { - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> MathOperation.ABS.apply(Short.MIN_VALUE)); + Exception ex = expectThrows(InvalidArgumentException.class, () -> MathOperation.ABS.apply(Short.MIN_VALUE)); assertTrue(ex.getMessage().contains("out of")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java index c0e436cc4890..e4fa7dab1db9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -62,35 +63,32 @@ public void testLeftFunctionWithEdgeCases() { } public void testLeftFunctionInputValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Left(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Left(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [count]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [count]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Left(EMPTY, l("foo"), l((long) Integer.MIN_VALUE - 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Left(EMPTY, l("foo"), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, - () -> new Left(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null) - ); - assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> new Left(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null)); + assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", e.getMessage()); } public void testRightFunctionWithValidInput() { @@ -111,35 +109,32 @@ public void testRightFunctionWithEdgeCases() { } public void testRightFunctionInputValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Right(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Right(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [count]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [count]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Right(EMPTY, l("foo"), l((long) Integer.MIN_VALUE - 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Right(EMPTY, l("foo"), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, - () -> new Right(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null) - ); - assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> new Right(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null)); + assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", e.getMessage()); } public void testRepeatFunctionWithValidInput() { @@ -158,34 +153,31 @@ public void testRepeatFunctionWithEdgeCases() { } public void testRepeatFunctionInputsValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Repeat(EMPTY, l(5), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo bar"), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [count]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [count]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo"), l((long) Integer.MIN_VALUE - 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [-2147483649]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo"), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[count] out of the allowed range [-2147483648, 2147483647], received [2147483648]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, - () -> new Repeat(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null) - ); - assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", siae.getMessage()); + e = expectThrows(InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null)); + assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java index b938876fc981..46beb99eb9a7 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -68,59 +69,59 @@ public void testInsertWithEdgeCases() { } public void testInsertInputsValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Insert(EMPTY, l(5), l(1), l(3), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l(1), l(3), l(66)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [66]", siae.getMessage()); + assertEquals("A string/char is required; received [66]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l("c"), l(3), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [start]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [start]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l(1), l('z'), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [length]; received [java.lang.Character]", siae.getMessage()); + assertEquals("A fixed point number is required for [length]; received [java.lang.Character]", e.getMessage()); assertEquals( "baroobar", new Insert(EMPTY, l("foobar"), l(Integer.MIN_VALUE + 1), l(1), l("bar")).makePipe().asProcessor().process(null) ); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobarbar"), l(Integer.MIN_VALUE), l(1), l("bar")).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", e.getMessage()); assertEquals("foobar", new Insert(EMPTY, l("foobar"), l(Integer.MAX_VALUE), l(1), l("bar")).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l((long) Integer.MAX_VALUE + 1), l(1), l("bar")).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", e.getMessage()); assertEquals("barfoobar", new Insert(EMPTY, l("foobar"), l(1), l(0), l("bar")).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l(1), l(-1), l("bar")).makePipe().asProcessor().process(null) ); - assertEquals("[length] out of the allowed range [0, 2147483647], received [-1]", siae.getMessage()); + assertEquals("[length] out of the allowed range [0, 2147483647], received [-1]", e.getMessage()); assertEquals("bar", new Insert(EMPTY, l("foobar"), l(1), l(Integer.MAX_VALUE), l("bar")).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Insert(EMPTY, l("foobar"), l(1), l((long) Integer.MAX_VALUE + 1), l("bar")).makePipe().asProcessor().process(null) ); - assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java index 3b8098978f01..66124d2b1dcc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -69,36 +70,36 @@ public void testLocateFunctionWithEdgeCasesInputs() { } public void testLocateFunctionValidatingInputs() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Locate(EMPTY, l(5), l("foobarbar"), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( + e = expectThrows( SqlIllegalArgumentException.class, () -> new Locate(EMPTY, l("foo"), l(1), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [1]", siae.getMessage()); + assertEquals("A string/char is required; received [1]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Locate(EMPTY, l("foobarbar"), l("bar"), l('c')).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [start]; received [java.lang.Character]", siae.getMessage()); + assertEquals("A fixed point number is required for [start]; received [java.lang.Character]", e.getMessage()); assertEquals(4, new Locate(EMPTY, l("bar"), l("foobarbar"), l(Integer.MIN_VALUE + 1)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Locate(EMPTY, l("bar"), l("foobarbar"), l(Integer.MIN_VALUE)).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", e.getMessage()); assertEquals(0, new Locate(EMPTY, l("bar"), l("foobarbar"), l(Integer.MAX_VALUE)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Locate(EMPTY, l("bar"), l("foobarbar"), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java index 2ce90e121b0e..3e98afb36fd9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -64,50 +65,50 @@ public void testSubstringFunctionWithEdgeCases() { } public void testSubstringFunctionInputsValidation() { - SqlIllegalArgumentException siae = expectThrows( + Exception e = expectThrows( SqlIllegalArgumentException.class, () -> new Substring(EMPTY, l(5), l(1), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A string/char is required; received [5]", siae.getMessage()); + assertEquals("A string/char is required; received [5]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l(1), l("baz")).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [length]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [length]; received [java.lang.String]", e.getMessage()); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l("bar"), l(3)).makePipe().asProcessor().process(null) ); - assertEquals("A fixed point number is required for [start]; received [java.lang.String]", siae.getMessage()); + assertEquals("A fixed point number is required for [start]; received [java.lang.String]", e.getMessage()); assertEquals("f", new Substring(EMPTY, l("foobarbar"), l(Integer.MIN_VALUE + 1), l(1)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l(Integer.MIN_VALUE), l(1)).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [-2147483648]", e.getMessage()); assertEquals("", new Substring(EMPTY, l("foobarbar"), l(Integer.MAX_VALUE), l(1)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l((long) Integer.MAX_VALUE + 1), l(1)).makePipe().asProcessor().process(null) ); - assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[start] out of the allowed range [-2147483647, 2147483647], received [2147483648]", e.getMessage()); assertEquals("", new Substring(EMPTY, l("foobarbar"), l(1), l(0)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l(1), l(-1)).makePipe().asProcessor().process(null) ); - assertEquals("[length] out of the allowed range [0, 2147483647], received [-1]", siae.getMessage()); + assertEquals("[length] out of the allowed range [0, 2147483647], received [-1]", e.getMessage()); assertEquals("foobarbar", new Substring(EMPTY, l("foobarbar"), l(1), l(Integer.MAX_VALUE)).makePipe().asProcessor().process(null)); - siae = expectThrows( - SqlIllegalArgumentException.class, + e = expectThrows( + InvalidArgumentException.class, () -> new Substring(EMPTY, l("foobarbar"), l(1), l((long) Integer.MAX_VALUE + 1)).makePipe().asProcessor().process(null) ); - assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", siae.getMessage()); + assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlBinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlBinaryArithmeticTests.java index 09f814da8e5e..00c13bbdd801 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlBinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/SqlBinaryArithmeticTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.type.DataType; @@ -24,7 +25,6 @@ import static org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Arithmetics.mod; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; -import static org.elasticsearch.xpack.ql.util.NumericUtils.UNSIGNED_LONG_MAX; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.INTERVAL_DAY; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.INTERVAL_DAY_TO_HOUR; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.INTERVAL_HOUR; @@ -252,13 +252,13 @@ public void testMulIntegerIntervalYearMonthOverflow() { public void testMulLongIntervalYearMonthOverflow() { Literal l = interval(Period.ofYears(1), INTERVAL_YEAR); - QlIllegalArgumentException expect = expectThrows(QlIllegalArgumentException.class, () -> mul(l, L(Long.MAX_VALUE))); + Exception expect = expectThrows(InvalidArgumentException.class, () -> mul(l, L(Long.MAX_VALUE))); assertEquals("[9223372036854775807] out of [integer] range", expect.getMessage()); } public void testMulUnsignedLongIntervalYearMonthOverflow() { Literal l = interval(Period.ofYears(1), INTERVAL_YEAR); - QlIllegalArgumentException expect = expectThrows(QlIllegalArgumentException.class, () -> mul(l, L(UNSIGNED_LONG_MAX))); + Exception expect = expectThrows(InvalidArgumentException.class, () -> mul(l, L(UNSIGNED_LONG_MAX))); assertEquals("[18446744073709551615] out of [long] range", expect.getMessage()); } @@ -270,7 +270,7 @@ public void testMulLongIntervalDayTimeOverflow() { public void testMulUnsignedLongIntervalDayTimeOverflow() { Literal l = interval(Duration.ofDays(1), INTERVAL_DAY); - QlIllegalArgumentException expect = expectThrows(QlIllegalArgumentException.class, () -> mul(l, L(UNSIGNED_LONG_MAX))); + Exception expect = expectThrows(InvalidArgumentException.class, () -> mul(l, L(UNSIGNED_LONG_MAX))); assertEquals("[18446744073709551615] out of [long] range", expect.getMessage()); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 32cfdc158d24..7bfb9b2e7a9f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.optimizer; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expression.TypeResolution; @@ -349,11 +349,8 @@ public void testNullFoldingIsNullWithCast() { final IsNull isNullOpt = (IsNull) foldNull.rule(isNull); assertEquals(isNull, isNullOpt); - QlIllegalArgumentException sqlIAE = expectThrows( - QlIllegalArgumentException.class, - () -> isNullOpt.asPipe().asProcessor().process(null) - ); - assertEquals("cannot cast [foo] to [date]: Text 'foo' could not be parsed at index 0", sqlIAE.getMessage()); + Exception e = expectThrows(InvalidArgumentException.class, () -> isNullOpt.asPipe().asProcessor().process(null)); + assertEquals("cannot cast [foo] to [date]: Text 'foo' could not be parsed at index 0", e.getMessage()); isNull = new IsNull(EMPTY, new Cast(EMPTY, NULL, randomFrom(DataTypes.types()))); assertTrue((Boolean) ((IsNull) foldNull.rule(isNull)).asPipe().asProcessor().process(null)); @@ -377,11 +374,8 @@ public void testNullFoldingIsNotNullWithCast() { final IsNotNull isNotNullOpt = (IsNotNull) foldNull.rule(isNotNull); assertEquals(isNotNull, isNotNullOpt); - QlIllegalArgumentException sqlIAE = expectThrows( - QlIllegalArgumentException.class, - () -> isNotNullOpt.asPipe().asProcessor().process(null) - ); - assertEquals("cannot cast [foo] to [date]: Text 'foo' could not be parsed at index 0", sqlIAE.getMessage()); + Exception e = expectThrows(InvalidArgumentException.class, () -> isNotNullOpt.asPipe().asProcessor().process(null)); + assertEquals("cannot cast [foo] to [date]: Text 'foo' could not be parsed at index 0", e.getMessage()); isNotNull = new IsNotNull(EMPTY, new Cast(EMPTY, NULL, randomFrom(DataTypes.types()))); assertFalse((Boolean) ((IsNotNull) foldNull.rule(isNotNull)).asPipe().asProcessor().process(null)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/SqlDataTypeConverterTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/SqlDataTypeConverterTests.java index 405ddbf4779f..80907230e828 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/SqlDataTypeConverterTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/SqlDataTypeConverterTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.type; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Location; import org.elasticsearch.xpack.ql.tree.Source; @@ -97,7 +97,7 @@ public void testConversionToLong() { assertEquals(10L, conversion.convert(10.0)); assertEquals(10L, conversion.convert(10.1)); assertEquals(11L, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -139,7 +139,7 @@ public void testConversionToLong() { assertNull(conversion.convert(null)); assertEquals(1L, conversion.convert("1")); assertEquals(0L, conversion.convert("-0")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [long]", e.getMessage()); } } @@ -152,7 +152,7 @@ public void testConversionToDate() { assertEquals(date(10L), conversion.convert(10.0)); assertEquals(date(10L), conversion.convert(10.1)); assertEquals(date(11L), conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -162,7 +162,7 @@ public void testConversionToDate() { assertEquals(date(bi.longValue()), conversion.convert(bi)); BigInteger tooLarge = bi.add(BigInteger.valueOf(Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(tooLarge)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(tooLarge)); assertEquals("[" + tooLarge + "] out of [long] range", e.getMessage()); } { @@ -213,15 +213,15 @@ public void testConversionToDate() { Converter forward = converterFor(DATE, KEYWORD); Converter back = converterFor(KEYWORD, DATE); assertEquals(asDateOnly(zdt), back.convert(forward.convert(zdt))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [date]: Text '0xff' could not be parsed at index 0", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("2020-02-")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("2020-02-")); assertEquals("cannot cast [2020-02-] to [date]: Text '2020-02-' could not be parsed at index 8", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("2020-")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("2020-")); assertEquals("cannot cast [2020-] to [date]: Text '2020-' could not be parsed at index 5", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("-2020-02-")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-2020-02-")); assertEquals("cannot cast [-2020-02-] to [date]: Text '-2020-02-' could not be parsed at index 9", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("-2020-")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-2020-")); assertEquals("cannot cast [-2020-] to [date]: Text '-2020-' could not be parsed at index 6", e.getMessage()); } } @@ -234,7 +234,7 @@ public void testConversionToTime() { assertEquals(time(10L), conversion.convert(10.0)); assertEquals(time(10L), conversion.convert(10.1)); assertEquals(time(11L), conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -244,7 +244,7 @@ public void testConversionToTime() { assertEquals(time(bi.longValue()), conversion.convert(bi)); BigInteger tooLarge = bi.add(BigInteger.valueOf(Long.MAX_VALUE)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(tooLarge)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(tooLarge)); assertEquals("[" + tooLarge + "] out of [long] range", e.getMessage()); } { @@ -285,7 +285,7 @@ public void testConversionToTime() { Converter forward = converterFor(TIME, KEYWORD); Converter back = converterFor(KEYWORD, TIME); assertEquals(ot, back.convert(forward.convert(ot))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [time]: Text '0xff' could not be parsed at index 0", e.getMessage()); } } @@ -298,7 +298,7 @@ public void testConversionToDateTime() { assertEquals(dateTime(10L), conversion.convert(10.0)); assertEquals(dateTime(10L), conversion.convert(10.1)); assertEquals(dateTime(11L), conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); } { @@ -349,7 +349,7 @@ public void testConversionToDateTime() { Converter forward = converterFor(DATETIME, KEYWORD); Converter back = converterFor(KEYWORD, DATETIME); assertEquals(dt, back.convert(forward.convert(dt))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [datetime]: Text '0xff' could not be parsed at index 0", e.getMessage()); } } @@ -403,7 +403,7 @@ public void testConversionToFloat() { assertEquals(1.0f, (float) conversion.convert("1"), 0); assertEquals(0.0f, (float) conversion.convert("-0"), 0); assertEquals(12.776f, (float) conversion.convert("12.776"), 0.00001); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [float]", e.getMessage()); } } @@ -457,7 +457,7 @@ public void testConversionToDouble() { assertEquals(1.0, (double) conversion.convert("1"), 0); assertEquals(0.0, (double) conversion.convert("-0"), 0); assertEquals(12.776, (double) conversion.convert("12.776"), 0.00001); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0xff")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [double]", e.getMessage()); } } @@ -522,17 +522,17 @@ public void testConversionToBoolean() { assertEquals(true, conversion.convert("True")); assertEquals(false, conversion.convert("fAlSe")); // Everything else should fail - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("10")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10")); assertEquals("cannot cast [10] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("-1")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-1")); assertEquals("cannot cast [-1] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("0")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0")); assertEquals("cannot cast [0] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("blah")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("blah")); assertEquals("cannot cast [blah] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("Yes")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("Yes")); assertEquals("cannot cast [Yes] to [boolean]", e.getMessage()); - e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("nO")); + e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("nO")); assertEquals("cannot cast [nO] to [boolean]", e.getMessage()); } } @@ -548,7 +548,7 @@ public void testConversionToUnsignedLong() { assertEquals(BigInteger.valueOf(zdt.toEpochSecond() * 1000), conversion.convert(zdt)); ZonedDateTime zdtn = asDateOnly(-l); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(zdtn)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(zdtn)); assertEquals("[" + zdtn.toEpochSecond() * 1000 + "] out of [unsigned_long] range", e.getMessage()); } { @@ -569,7 +569,7 @@ public void testConversionToInt() { assertEquals(10, conversion.convert(10.0)); assertEquals(10, conversion.convert(10.1)); assertEquals(11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Long.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Long.MAX_VALUE)); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); } { @@ -579,7 +579,7 @@ public void testConversionToInt() { assertEquals(86400000, conversion.convert(asDateOnly(123456789L))); assertEquals(172800000, conversion.convert(asDateOnly(223456789L))); assertEquals(-172800000, conversion.convert(asDateOnly(-123456789L))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(Long.MAX_VALUE))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateOnly(Long.MAX_VALUE))); assertEquals("[9223372036828800000] out of [integer] range", e.getMessage()); } { @@ -600,7 +600,7 @@ public void testConversionToInt() { // Nanos are ignored, only millis are used assertEquals(62123, conversion.convert(asDateTimeWithNanos("1970-01-01T00:01:02.123456789Z"))); Exception e = expectThrows( - QlIllegalArgumentException.class, + InvalidArgumentException.class, () -> conversion.convert(DateUtils.asDateTimeWithMillis(Long.MAX_VALUE)) ); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); @@ -615,23 +615,23 @@ public void testConversionToShort() { assertEquals((short) 10, conversion.convert(10.0)); assertEquals((short) 10, conversion.convert(10.1)); assertEquals((short) 11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE)); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); } { Converter conversion = converterFor(DATE, to); assertNull(conversion.convert(null)); assertEquals((short) 0, conversion.convert(asDateOnly(12345678L))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); assertEquals("[86400000] out of [short] range", e.getMessage()); } { Converter conversion = converterFor(TIME, to); assertNull(conversion.convert(null)); assertEquals((short) 12345, conversion.convert(asTimeOnly(12345L))); - Exception e1 = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(-123456789L))); + Exception e1 = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asTimeOnly(-123456789L))); assertEquals("[49343211] out of [short] range", e1.getMessage()); - Exception e2 = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); + Exception e2 = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); assertEquals("[37056789] out of [short] range", e2.getMessage()); } { @@ -642,7 +642,7 @@ public void testConversionToShort() { // Nanos are ignored, only millis are used assertEquals((short) 1123, conversion.convert(asDateTimeWithNanos("1970-01-01T00:00:01.123456789Z"))); Exception e = expectThrows( - QlIllegalArgumentException.class, + InvalidArgumentException.class, () -> conversion.convert(DateUtils.asDateTimeWithMillis(Integer.MAX_VALUE)) ); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); @@ -657,23 +657,23 @@ public void testConversionToByte() { assertEquals((byte) 10, conversion.convert(10.0)); assertEquals((byte) 10, conversion.convert(10.1)); assertEquals((byte) 11, conversion.convert(10.6)); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(Short.MAX_VALUE)); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Short.MAX_VALUE)); assertEquals("[" + Short.MAX_VALUE + "] out of [byte] range", e.getMessage()); } { Converter conversion = converterFor(DATE, to); assertNull(conversion.convert(null)); assertEquals((byte) 0, conversion.convert(asDateOnly(12345678L))); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); assertEquals("[86400000] out of [byte] range", e.getMessage()); } { Converter conversion = converterFor(TIME, to); assertNull(conversion.convert(null)); assertEquals((byte) 123, conversion.convert(asTimeOnly(123L))); - Exception e1 = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(-123L))); + Exception e1 = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asTimeOnly(-123L))); assertEquals("[86399877] out of [byte] range", e1.getMessage()); - Exception e2 = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); + Exception e2 = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); assertEquals("[37056789] out of [byte] range", e2.getMessage()); } { @@ -684,7 +684,7 @@ public void testConversionToByte() { // Nanos are ignored, only millis are used assertEquals((byte) 123, conversion.convert(asDateTimeWithNanos("1970-01-01T00:00:00.123456789Z"))); Exception e = expectThrows( - QlIllegalArgumentException.class, + InvalidArgumentException.class, () -> conversion.convert(DateUtils.asDateTimeWithMillis(Integer.MAX_VALUE)) ); assertEquals("[" + Integer.MAX_VALUE + "] out of [byte] range", e.getMessage()); @@ -764,7 +764,7 @@ public void testStringToIp() { Converter conversion = converterFor(KEYWORD, IP); assertNull(conversion.convert(null)); assertEquals("192.168.1.1", conversion.convert("192.168.1.1")); - Exception e = expectThrows(QlIllegalArgumentException.class, () -> conversion.convert("10.1.1.300")); + Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10.1.1.300")); assertEquals("[10.1.1.300] is not a valid IPv4 or IPv6 address", e.getMessage()); } From b10d0d10c53584191433e13632ed8a15b00abc1c Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Thu, 2 Nov 2023 10:56:59 -0400 Subject: [PATCH 40/47] Disable weight_matches when kNN query is present (#101713) 8.10 added a new flag called `weight_matches` and we use it by default when highlighting. However, every hybrid search with kNN will fail with cryptic errors. This PR disables weight_matches mode when kNN queries are present. Supporting weigh_matches & kNN will take more work. closes: https://github.com/elastic/elasticsearch/issues/101667 --- docs/changelog/101713.yaml | 5 ++ .../test/search.highlight/10_unified.yml | 47 +++++++++++++++++++ .../uhighlight/CustomUnifiedHighlighter.java | 8 ++++ 3 files changed, 60 insertions(+) create mode 100644 docs/changelog/101713.yaml diff --git a/docs/changelog/101713.yaml b/docs/changelog/101713.yaml new file mode 100644 index 000000000000..c3addf929658 --- /dev/null +++ b/docs/changelog/101713.yaml @@ -0,0 +1,5 @@ +pr: 101713 +summary: Disable `weight_matches` when kNN query is present +area: Highlighting +type: bug +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index 1a03896f6d08..4607ae758b91 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -93,3 +93,50 @@ teardown: - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} +--- +"Test hybrid search with knn where automatically disables weighted mode": + - skip: + version: ' - 8.11.99' + reason: 'kNN was not correctly skipped until 8.12' + + - do: + indices.create: + index: test-highlighting-knn + body: + mappings: + "properties": + "vectors": + "type": "dense_vector" + "dims": 2 + "index": true + "similarity": "l2_norm" + "text": + "type": "text" + "fields": + "fvh": + "type": "text" + "term_vector": "with_positions_offsets" + "postings": + "type": "text" + "index_options": "offsets" + - do: + index: + index: test-highlighting-knn + id: "1" + body: + "text" : "The quick brown fox is brown." + "vectors": [1, 2] + - do: + indices.refresh: {} + + - do: + search: + index: test-highlighting-knn + body: { + "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, + "highlight": { "type": "unified", "fields": { "*": { } } }, + "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } diff --git a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java index d5c3bdbbc65c..5c1381f73001 100644 --- a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.search.ESToParentBlockJoinQuery; import org.elasticsearch.search.runtime.AbstractScriptFieldQuery; +import org.elasticsearch.search.vectors.KnnScoreDocQuery; import java.io.IOException; import java.text.BreakIterator; @@ -249,6 +250,13 @@ public void visitLeaf(Query leafQuery) { if (leafQuery.getClass().getSimpleName().equals("LateParsingQuery")) { hasUnknownLeaf[0] = true; } + /** + * KnnScoreDocQuery requires the same reader that built the docs + * When using {@link HighlightFlag#WEIGHT_MATCHES} different readers are used and isn't supported by this query + */ + if (leafQuery instanceof KnnScoreDocQuery) { + hasUnknownLeaf[0] = true; + } super.visitLeaf(query); } From 5c4ff982b1c267d1d6e60c1aaca006975728ca32 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 2 Nov 2023 16:14:40 +0100 Subject: [PATCH 41/47] Remove explicit SearchResponse references from test framework module (#101279) Remove explicit SearchResponse references from test module --- .../bucket/AbstractTermsTestCase.java | 61 +-- .../metrics/AbstractGeoTestCase.java | 43 +- .../geo/BasePointShapeQueryTestCase.java | 429 +++++++++--------- .../geo/DatelinePointShapeQueryTestCase.java | 39 +- .../search/geo/GeoShapeIntegTestCase.java | 49 +- .../search/geo/GeoShapeQueryTestCase.java | 12 +- 6 files changed, 314 insertions(+), 319 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java index ea94f342a953..edadf15af5ca 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -16,6 +15,7 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; public abstract class AbstractTermsTestCase extends ESIntegTestCase { @@ -33,34 +33,43 @@ private static long sumOfDocCounts(Terms terms) { public void testOtherDocCount(String... fieldNames) { for (String fieldName : fieldNames) { - SearchResponse allTerms = prepareSearch("idx").addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(fieldName) - .size(10000) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ).get(); - assertNoFailures(allTerms); + assertResponse( + prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(fieldName) + .size(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ), + allTerms -> { + assertNoFailures(allTerms); - Terms terms = allTerms.getAggregations().get("terms"); - assertEquals(0, terms.getSumOfOtherDocCounts()); // size is 0 - final long sumOfDocCounts = sumOfDocCounts(terms); - final int totalNumTerms = terms.getBuckets().size(); + Terms terms = allTerms.getAggregations().get("terms"); + assertEquals(0, terms.getSumOfOtherDocCounts()); // size is 0 + final long sumOfDocCounts = sumOfDocCounts(terms); + final int totalNumTerms = terms.getBuckets().size(); - for (int size = 1; size < totalNumTerms + 2; size += randomIntBetween(1, 5)) { - for (int shardSize = size; shardSize <= totalNumTerms + 2; shardSize += randomIntBetween(1, 5)) { - SearchResponse resp = prepareSearch("idx").addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(fieldName) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ).get(); - assertNoFailures(resp); - terms = resp.getAggregations().get("terms"); - assertEquals(Math.min(size, totalNumTerms), terms.getBuckets().size()); - assertEquals(sumOfDocCounts, sumOfDocCounts(terms)); + for (int size = 1; size < totalNumTerms + 2; size += randomIntBetween(1, 5)) { + for (int shardSize = size; shardSize <= totalNumTerms + 2; shardSize += randomIntBetween(1, 5)) { + final int finalSize = size; + assertResponse( + prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(fieldName) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ), + response -> { + assertNoFailures(response); + Terms innerTerms = response.getAggregations().get("terms"); + assertEquals(Math.min(finalSize, totalNumTerms), innerTerms.getBuckets().size()); + assertEquals(sumOfDocCounts, sumOfDocCounts(innerTerms)); + } + ); + } + } } - } + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 7b4e591051e6..2138c0f750ac 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.geo.SpatialPoint; @@ -30,6 +29,7 @@ import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertCheckedResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -229,25 +229,28 @@ public void setupSuiteScopeCluster() throws Exception { // value for NUMBER_FIELD_NAME. This will check that after random indexing each document only has 1 value for // NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type. - SearchResponse response = prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME) - .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC)) - .setSize(5000) - .get(); - assertNoFailures(response); - long totalHits = response.getHits().getTotalHits().value; - XContentBuilder builder = XContentFactory.jsonBuilder(); - ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); - logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); - for (int i = 0; i < totalHits; i++) { - SearchHit searchHit = response.getHits().getAt(i); - assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx")); - DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME); - - assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1)); - Long value = hitField.getValue(); - assertThat("Hit " + i + " has wrong value", value.intValue(), equalTo(i)); - } - assertThat(totalHits, equalTo(2000L)); + assertCheckedResponse( + prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME) + .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC)) + .setSize(5000), + response -> { + assertNoFailures(response); + long totalHits = response.getHits().getTotalHits().value; + XContentBuilder builder = XContentFactory.jsonBuilder(); + ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); + logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); + for (int i = 0; i < totalHits; i++) { + SearchHit searchHit = response.getHits().getAt(i); + assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx")); + DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME); + + assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1)); + Long value = hitField.getValue(); + assertThat("Hit " + i + " has wrong value", value.intValue(), equalTo(i)); + } + assertThat(totalHits, equalTo(2000L)); + } + ); } private SpatialPoint computeCentroid(SpatialPoint[] points) { diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 6abce0455631..ed6f0e1c87f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.geo.GeometryNormalizer; @@ -47,7 +46,8 @@ import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -111,22 +111,24 @@ public void testIndexPointsFilterRectangle() throws Exception { .get(); Geometry geometry = new Rectangle(-45, 45, 45, -45); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); // default query, without specifying relation (expect intersects) - searchResponse = client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)).get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); } public void testIndexPointsCircle() throws Exception { @@ -177,14 +179,15 @@ public void testIndexPointsPolygon() throws Exception { Polygon polygon = new Polygon(new LinearRing(new double[] { -35, -35, -25, -25, -35 }, new double[] { -35, -25, -25, -35, -35 })); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertNoFailures(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); - assertThat(searchHits.getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), + response -> { + SearchHits searchHits = response.getHits(); + assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getAt(0).getId(), equalTo("1")); + } + ); } public void testIndexPointsMultiPolygon() throws Exception { @@ -218,47 +221,44 @@ public void testIndexPointsMultiPolygon() throws Exception { ); MultiPolygon multiPolygon = new MultiPolygon(List.of(encloseDocument1Cb, encloseDocument2Cb)); - { - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getAt(0).getId(), not(equalTo("2"))); - assertThat(searchResponse.getHits().getAt(1).getId(), not(equalTo("2"))); - } - { - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getAt(0).getId(), not(equalTo("2"))); - assertThat(searchResponse.getHits().getAt(1).getId(), not(equalTo("2"))); - } - { - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); - } - { - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(0)); - } + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); + assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); + } + ); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); + assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); + } + ); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + } + ); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getHits().length, equalTo(0)); + } + ); } public void testIndexPointsRectangle() throws Exception { @@ -279,14 +279,15 @@ public void testIndexPointsRectangle() throws Exception { Rectangle rectangle = new Rectangle(-50, -40, -45, -55); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + } + ); } public void testIndexPointsIndexedRectangle() throws Exception { @@ -332,30 +333,31 @@ public void testIndexPointsIndexedRectangle() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery( - queryBuilder().shapeQuery(defaultFieldName, "shape1") - .relation(ShapeRelation.INTERSECTS) - .indexedShapeIndex(indexedShapeIndex) - .indexedShapePath(indexedShapePath) - ) - .get(); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery( + queryBuilder().shapeQuery(defaultFieldName, "shape1") + .relation(ShapeRelation.INTERSECTS) + .indexedShapeIndex(indexedShapeIndex) + .indexedShapePath(indexedShapePath) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("point2")); + } + ); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("point2")); - - searchResponse = client().prepareSearch(defaultIndexName) - .setQuery( - queryBuilder().shapeQuery(defaultFieldName, "shape2") - .relation(ShapeRelation.INTERSECTS) - .indexedShapeIndex(indexedShapeIndex) - .indexedShapePath(indexedShapePath) - ) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery( + queryBuilder().shapeQuery(defaultFieldName, "shape2") + .relation(ShapeRelation.INTERSECTS) + .indexedShapeIndex(indexedShapeIndex) + .indexedShapePath(indexedShapePath) + ), + 0L + ); } public void testWithInQueryLine() throws Exception { @@ -421,34 +423,29 @@ public void testQueryPoint() throws Exception { .get(); Point point = new Point(-35, -25); - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(0, searchHits.getTotalHits().value); - } + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, point)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)), + 0L + ); } public void testQueryMultiPoint() throws Exception { @@ -463,34 +460,28 @@ public void testQueryMultiPoint() throws Exception { MultiPoint multiPoint = new MultiPoint(List.of(new Point(-35, -25), new Point(-15, -5))); - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.WITHIN)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.CONTAINS)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(0, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.DISJOINT)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(0, searchHits.getTotalHits().value); - } + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.WITHIN)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.CONTAINS)), + 0L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPoint).relation(ShapeRelation.DISJOINT)), + 0L + ); } public void testQueryPointFromGeoJSON() throws Exception { @@ -507,34 +498,30 @@ public void testQueryPointFromGeoJSON() throws Exception { client().index(new IndexRequest(defaultIndexName).id("1").source(doc1, XContentType.JSON).setRefreshPolicy(IMMEDIATE)).actionGet(); Point point = new Point(-35, -25); - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(1, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals(0, searchHits.getTotalHits().value); - } + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, point)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)), + 1L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)), + 0L + ); + } /** @@ -570,34 +557,34 @@ public void testQueryPointFromMultiPoint() throws Exception { for (Point point : new Point[] { pointA, pointB, pointC, pointD, pointInvalid }) { int expectedDocs = point.equals(pointInvalid) ? 0 : 1; int disjointDocs = point.equals(pointInvalid) ? 1 : 0; - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals("Doc matches %s" + point, expectedDocs, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals("Doc WITHIN %s" + point, 0, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals("Doc CONTAINS %s" + point, expectedDocs, searchHits.getTotalHits().value); - } - { - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)) - .get(); - SearchHits searchHits = response.getHits(); - assertEquals("Doc DISJOINT with %s" + point, disjointDocs, searchHits.getTotalHits().value); - } + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point)), + expectedDocs + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.WITHIN)), + 0L + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.CONTAINS)), + expectedDocs + ); + + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.DISJOINT)), + disjointDocs + ); } } @@ -617,13 +604,12 @@ public void testIndexPointsFromLine() throws Exception { } client().admin().indices().prepareRefresh(defaultIndexName).get(); // all points from a line intersect with the line - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setTrackTotalHits(true) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, line).relation(ShapeRelation.INTERSECTS)) - .get(); - assertNoFailures(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo((long) line.length())); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, line).relation(ShapeRelation.INTERSECTS)), + line.length() + ); } public void testIndexPointsFromPolygon() throws Exception { @@ -643,13 +629,12 @@ public void testIndexPointsFromPolygon() throws Exception { } client().admin().indices().prepareRefresh(defaultIndexName).get(); // all points from a polygon intersect with the polygon - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setTrackTotalHits(true) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)) - .get(); - assertNoFailures(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo((long) linearRing.length())); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), + linearRing.length() + ); } /** Only LegacyGeoShape has limited support, so other tests will ignore nothing */ diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java index fc8510330fd2..751a4d835610 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.geo; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.geometry.LinearRing; import org.elasticsearch.geometry.MultiPolygon; @@ -21,6 +20,7 @@ import java.util.List; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; @@ -65,11 +65,12 @@ public void testRectangleSpanningDateline(BasePointShapeQueryTestCase { + SearchHits searchHits = response.getHits(); + assertEquals(2, searchHits.getTotalHits().value); + assertNotEquals("1", searchHits.getAt(0).getId()); + assertNotEquals("1", searchHits.getAt(1).getId()); + }); } public void testPolygonSpanningDateline(BasePointShapeQueryTestCase tests) throws Exception { @@ -108,13 +109,14 @@ public void testPolygonSpanningDateline(BasePointShapeQueryTestCase { + SearchHits searchHits = response.getHits(); + assertEquals(2, searchHits.getTotalHits().value); + assertNotEquals("1", searchHits.getAt(0).getId()); + assertNotEquals("4", searchHits.getAt(0).getId()); + assertNotEquals("1", searchHits.getAt(1).getId()); + assertNotEquals("4", searchHits.getAt(1).getId()); + }); } public void testMultiPolygonSpanningDateline(BasePointShapeQueryTestCase tests) throws Exception { @@ -150,10 +152,11 @@ public void testMultiPolygonSpanningDateline(BasePointShapeQueryTestCase { + SearchHits searchHits = response.getHits(); + assertEquals(2, searchHits.getTotalHits().value); + assertNotEquals("3", searchHits.getAt(0).getId()); + assertNotEquals("3", searchHits.getAt(1).getId()); + }); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java index 29307f7f63ce..8397dece4f53 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.geo; import org.apache.lucene.util.SloppyMath; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.query.GeoShapeQueryBuilder; @@ -22,6 +21,7 @@ import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -57,17 +57,11 @@ public void testIndexPolygonDateLine() throws Exception { indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); - SearchResponse searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-179.75, 1))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-179.75, 1))), 1L); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(90, 1))), 0L); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-180, 1))), 1L); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(180, 1))), 1L); - searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(90, 1))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - - searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-180, 1))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(180, 1))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } /** The testBulk method uses this only for Geo-specific tests */ @@ -77,23 +71,24 @@ protected void doDistanceAndBoundingBoxTest(String key) { 53 ); - SearchResponse distance = prepareSearch().addStoredField("pin") - .setQuery(geoDistanceQuery("pin").distance("425km").point(51.11, 9.851)) - .get(); - - assertHitCount(distance, 5); - GeoPoint point = new GeoPoint(); - for (SearchHit hit : distance.getHits()) { - String name = hit.getId(); - point.resetFromString(hit.getFields().get("pin").getValue()); - double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); - - assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); - assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU"))); - if (key.equals(name)) { - assertThat(dist, closeTo(0d, 0.1d)); + assertResponse( + prepareSearch().addStoredField("pin").setQuery(geoDistanceQuery("pin").distance("425km").point(51.11, 9.851)), + response -> { + assertHitCount(response, 5L); + GeoPoint point = new GeoPoint(); + for (SearchHit hit : response.getHits()) { + String name = hit.getId(); + point.resetFromString(hit.getFields().get("pin").getValue()); + double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); + + assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); + assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU"))); + if (key.equals(name)) { + assertThat(dist, closeTo(0d, 0.1d)); + } + } } - } + ); } private static double distance(double lat1, double lon1, double lat2, double lon2) { diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java index b8f5cdf00da3..5bd3a3ba69f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java @@ -10,7 +10,6 @@ import org.apache.lucene.tests.geo.GeoTestUtil; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.geo.GeometryNormalizer; @@ -37,6 +36,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; public abstract class GeoShapeQueryTestCase extends BaseShapeQueryTestCase { @@ -141,11 +141,11 @@ public void testEnvelopeSpanningDateline() throws Exception { } } ); - - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(querySupplier.get()).get(); - assertEquals(2, response.getHits().getTotalHits().value); - assertNotEquals("1", response.getHits().getAt(0).getId()); - assertNotEquals("1", response.getHits().getAt(1).getId()); + assertResponse(client().prepareSearch(defaultIndexName).setQuery(querySupplier.get()), response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertNotEquals("1", response.getHits().getAt(0).getId()); + assertNotEquals("1", response.getHits().getAt(1).getId()); + }); } public void testIndexRectangleSpanningDateLine() throws Exception { From 476dc2cb1a7901e868d42a30bb7f610427e0fb12 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 2 Nov 2023 15:23:35 +0000 Subject: [PATCH 42/47] Process all snapshots in syncShardStatsOnNewMaster (#101702) Fixes bug introduced in #101665 --- .../java/org/elasticsearch/snapshots/SnapshotShardsService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 411205c5261e..134e76c57ed4 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -510,7 +510,7 @@ private void syncShardStatsOnNewMaster(List entries) synchronized (shardSnapshots) { final var currentLocalShards = shardSnapshots.get(snapshot.snapshot()); if (currentLocalShards == null) { - return; + continue; } localShards = Map.copyOf(currentLocalShards); } From d9054072c9ee04dec542d41e6e9e9c0abfc3fe32 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Thu, 2 Nov 2023 10:24:57 -0500 Subject: [PATCH 43/47] Metrics: Reject names longer than 63 characters (#101680) Open Telemetry supports instrument names lengths 63 characters or less for versions before 1.30. Reject those names early to avoid runtime failures that hit the log. Refs: #101679 --- .../telemetry/apm/APMMeterRegistry.java | 5 +-- .../telemetry/apm/AbstractInstrument.java | 6 ++++ .../telemetry/apm/APMMeterRegistryTests.java | 31 +++++++++++++------ 3 files changed, 31 insertions(+), 11 deletions(-) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java index 57649f7e3dfa..07bbc5c55f7c 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java @@ -52,7 +52,7 @@ public class APMMeterRegistry implements MeterRegistry { private final Registrar longGauges = new Registrar<>(); private final Registrar longHistograms = new Registrar<>(); - private final Meter meter; + private Meter meter; public APMMeterRegistry(Meter meter) { this.meter = meter; @@ -170,8 +170,9 @@ public LongHistogram getLongHistogram(String name) { public void setProvider(Meter meter) { try (ReleasableLock lock = registerLock.acquire()) { + this.meter = meter; for (Registrar registrar : registrars) { - registrar.setProvider(meter); + registrar.setProvider(this.meter); } } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java index 61b53f2087f6..2a806ca19a4e 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java @@ -25,6 +25,7 @@ * @param delegated instrument */ public abstract class AbstractInstrument implements Instrument { + private static final int MAX_NAME_LENGTH = 63; // TODO(stu): change to 255 when we upgrade to otel 1.30+, see #101679 private final AtomicReference delegate; private final String name; private final String description; @@ -33,6 +34,11 @@ public abstract class AbstractInstrument implements Instrument { @SuppressWarnings("this-escape") public AbstractInstrument(Meter meter, String name, String description, String unit) { this.name = Objects.requireNonNull(name); + if (name.length() > MAX_NAME_LENGTH) { + throw new IllegalArgumentException( + "Instrument name [" + name + "] with length [" + name.length() + "] exceeds maximum length [" + MAX_NAME_LENGTH + "]" + ); + } this.description = Objects.requireNonNull(description); this.unit = Objects.requireNonNull(unit); this.delegate = new AtomicReference<>(doBuildInstrument(meter)); diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java index 38fb0f0e0a8a..b393edd6e58e 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java @@ -16,15 +16,20 @@ import org.elasticsearch.telemetry.apm.internal.APMMeterService; import org.elasticsearch.telemetry.apm.internal.TestAPMMeterService; import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.sameInstance; public class APMMeterRegistryTests extends ESTestCase { - Meter testOtel = OpenTelemetry.noop().getMeter("test"); + Meter testOtel = new RecordingOtelMeter(); Meter noopOtel = OpenTelemetry.noop().getMeter("noop"); + private Settings TELEMETRY_ENABLED = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + public void testMeterIsSetUponConstruction() { // test default APMMeterService apmMeter = new APMMeterService(Settings.EMPTY, () -> testOtel, () -> noopOtel); @@ -33,14 +38,13 @@ public void testMeterIsSetUponConstruction() { assertThat(meter, sameInstance(noopOtel)); // test explicitly enabled - var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); + apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel); meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(testOtel)); // test explicitly disabled - settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); + var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false).build(); apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); meter = apmMeter.getMeterRegistry().getMeter(); @@ -60,9 +64,7 @@ public void testMeterIsOverridden() { } public void testLookupByName() { - var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - - var apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel).getMeterRegistry(); + var apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel).getMeterRegistry(); DoubleCounter registeredCounter = apmMeter.registerDoubleCounter("name", "desc", "unit"); DoubleCounter lookedUpCounter = apmMeter.getDoubleCounter("name"); @@ -71,8 +73,7 @@ public void testLookupByName() { } public void testNoopIsSetOnStop() { - var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - APMMeterService apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); + APMMeterService apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel); apmMeter.start(); Meter meter = apmMeter.getMeterRegistry().getMeter(); @@ -84,4 +85,16 @@ public void testNoopIsSetOnStop() { assertThat(meter, sameInstance(noopOtel)); } + public void testMaxNameLength() { + APMMeterService apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel); + apmMeter.start(); + int max_length = 63; + var counter = apmMeter.getMeterRegistry().registerLongCounter("a".repeat(max_length), "desc", "count"); + assertThat(counter, instanceOf(LongCounter.class)); + IllegalArgumentException iae = expectThrows( + IllegalArgumentException.class, + () -> apmMeter.getMeterRegistry().registerLongCounter("a".repeat(max_length + 1), "desc", "count") + ); + assertThat(iae.getMessage(), containsString("exceeds maximum length [63]")); + } } From b750f6e9805024668029e718d27909b6c509d827 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Thu, 2 Nov 2023 11:39:59 -0400 Subject: [PATCH 44/47] Remove HLRC from EQL tests (#101697) --- .../client/RestHighLevelClient.java | 11 ---- .../EqlCcsRollingUpgradeIT.java | 38 +++++++------ .../test/eql/BaseEqlSpecTestCase.java | 2 +- .../elasticsearch/test/eql/DataLoader.java | 57 +++++++++---------- .../RemoteClusterAwareEqlRestTestCase.java | 8 --- .../test/eql/stats/EqlUsageRestTestCase.java | 14 +---- .../xpack/eql/EsEQLCorrectnessIT.java | 12 ---- 7 files changed, 49 insertions(+), 93 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 85803ec68de1..b0998957910a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -301,17 +301,6 @@ public final void close() throws IOException { doClose.accept(client); } - /** - * Executes a bulk request using the Bulk API. - * See Bulk API on elastic.co - * @param bulkRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - */ - public final BulkResponse bulk(BulkRequest bulkRequest, RequestOptions options) throws IOException { - return performRequestAndParseEntity(bulkRequest, RequestConverters::bulk, options, BulkResponse::fromXContent, emptySet()); - } - /** * Asynchronously executes a bulk request using the Bulk API. * See Bulk API on elastic.co diff --git a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java index 444ee72cf8aa..6efe56fbf222 100644 --- a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java +++ b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/src/test/java/org/elasticsearch/xpack/eql/qa/ccs_rolling_upgrade/EqlCcsRollingUpgradeIT.java @@ -12,13 +12,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; @@ -84,7 +82,7 @@ static List parseHosts(String props) { public static void configureRemoteClusters(List remoteNodes) throws Exception { assertThat(remoteNodes, hasSize(3)); final String remoteClusterSettingPrefix = "cluster.remote." + CLUSTER_ALIAS + "."; - try (RestClient localClient = newLocalClient().getLowLevelClient()) { + try (RestClient localClient = newLocalClient()) { final Settings remoteConnectionSettings; if (randomBoolean()) { final List seeds = remoteNodes.stream() @@ -118,28 +116,32 @@ public static void configureRemoteClusters(List remoteNodes) throws Except } } - static RestHighLevelClient newLocalClient() { + static RestClient newLocalClient() { final List hosts = parseHosts("tests.rest.cluster"); final int index = random().nextInt(hosts.size()); LOGGER.info("Using client node {}", index); - return new RestHighLevelClient(RestClient.builder(hosts.get(index))); + return RestClient.builder(hosts.get(index)).build(); } - static RestHighLevelClient newRemoteClient() { - return new RestHighLevelClient(RestClient.builder(randomFrom(parseHosts("tests.rest.remote_cluster")))); + static RestClient newRemoteClient() { + return RestClient.builder(randomFrom(parseHosts("tests.rest.remote_cluster"))).build(); } - static int indexDocs(RestHighLevelClient client, String index, int numDocs) throws IOException { + static int indexDocs(RestClient client, String index, int numDocs) throws IOException { for (int i = 0; i < numDocs; i++) { - client.index(new IndexRequest(index).id("id_" + i).source("f", i, "@timestamp", i), RequestOptions.DEFAULT); + Request createDoc = new Request("POST", "/" + index + "/_doc/id_" + i); + createDoc.setJsonEntity(Strings.format(""" + { "f": %s, "@timestamp": %s } + """, i, i)); + assertOK(client.performRequest(createDoc)); } - refresh(client.getLowLevelClient(), index); + refresh(client, index); return numDocs; } void verify(String localIndex, int localNumDocs, String remoteIndex, int remoteNumDocs) { - try (RestClient localClient = newLocalClient().getLowLevelClient()) { + try (RestClient localClient = newLocalClient()) { Request request = new Request("POST", "/" + randomFrom(remoteIndex, localIndex + "," + remoteIndex) + "/_eql/search"); int size = between(1, 100); @@ -161,9 +163,9 @@ void verify(String localIndex, int localNumDocs, String remoteIndex, int remoteN public void testSequences() throws Exception { String localIndex = "test_bwc_search_states_index"; String remoteIndex = "test_bwc_search_states_remote_index"; - try (RestHighLevelClient localClient = newLocalClient(); RestHighLevelClient remoteClient = newRemoteClient()) { + try (RestClient localClient = newLocalClient(); RestClient remoteClient = newRemoteClient()) { createIndex( - localClient.getLowLevelClient(), + localClient, localIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build(), "{\"properties\": {\"@timestamp\": {\"type\": \"date\"}}}", @@ -171,7 +173,7 @@ public void testSequences() throws Exception { ); int localNumDocs = indexDocs(localClient, localIndex, between(10, 100)); createIndex( - remoteClient.getLowLevelClient(), + remoteClient, remoteIndex, Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5)).build(), "{\"properties\": {\"@timestamp\": {\"type\": \"date\"}}}", @@ -179,13 +181,13 @@ public void testSequences() throws Exception { ); int remoteNumDocs = indexDocs(remoteClient, remoteIndex, between(10, 100)); - configureRemoteClusters(getNodes(remoteClient.getLowLevelClient())); + configureRemoteClusters(getNodes(remoteClient)); int iterations = between(1, 20); for (int i = 0; i < iterations; i++) { verify(localIndex, localNumDocs, CLUSTER_ALIAS + ":" + remoteIndex, remoteNumDocs); } - deleteIndex(localClient.getLowLevelClient(), localIndex); - deleteIndex(remoteClient.getLowLevelClient(), remoteIndex); + deleteIndex(localClient, localIndex); + deleteIndex(remoteClient, remoteIndex); } } } diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java index e11d1cab8eaa..90244d9b2c01 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/BaseEqlSpecTestCase.java @@ -66,7 +66,7 @@ public void setup() throws Exception { ); if (dataLoaded == false) { - DataLoader.loadDatasetIntoEs(highLevelClient(provisioningClient), this::createParser); + DataLoader.loadDatasetIntoEs(provisioningClient, this::createParser); } } diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java index 588c2d87f743..1d51af574c81 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/DataLoader.java @@ -8,23 +8,20 @@ import org.apache.http.HttpHost; import org.apache.logging.log4j.LogManager; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.common.CheckedBiFunction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContent; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.ql.TestUtils; @@ -53,7 +50,6 @@ * * While the loader could be made generic, the queries are bound to each index and generalizing that would make things way too complicated. */ -@SuppressWarnings("removal") public class DataLoader { public static final String TEST_INDEX = "endgame-140"; public static final String TEST_EXTRA_INDEX = "extra"; @@ -79,15 +75,12 @@ private static Map getReplacementPatterns() { public static void main(String[] args) throws IOException { main = true; try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) { - loadDatasetIntoEs(new RestHighLevelClient(client, ignore -> {}, List.of()) { - }, DataLoader::createParser); + loadDatasetIntoEs(client, DataLoader::createParser); } } - public static void loadDatasetIntoEs( - RestHighLevelClient client, - CheckedBiFunction p - ) throws IOException { + public static void loadDatasetIntoEs(RestClient client, CheckedBiFunction p) + throws IOException { // // Main Index @@ -113,7 +106,7 @@ public static void loadDatasetIntoEs( } private static void load( - RestHighLevelClient client, + RestClient client, String indexNames, String dataName, Consumer> datasetTransform, @@ -136,14 +129,8 @@ private static void load( } } - private static void createTestIndex(RestHighLevelClient client, String indexName, String mapping) throws IOException { - ESRestTestCase.createIndex( - client.getLowLevelClient(), - indexName, - Settings.builder().put("number_of_shards", 1).build(), - mapping, - null - ); + private static void createTestIndex(RestClient client, String indexName, String mapping) throws IOException { + ESRestTestCase.createIndex(client, indexName, Settings.builder().put("number_of_shards", 1).build(), mapping, null); } /** @@ -171,30 +158,40 @@ private static CharSequence randomOf(String... values) { @SuppressWarnings("unchecked") private static void loadData( - RestHighLevelClient client, + RestClient client, String indexName, Consumer> datasetTransform, URL resource, CheckedBiFunction p ) throws IOException { - BulkRequest bulk = new BulkRequest(); - bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + StringBuilder bulkRequestBody = new StringBuilder(); + String actionMetadata = Strings.format("{ \"index\" : { \"_index\" : \"%s\" } }%n", indexName); + int bulkDocuments; try (XContentParser parser = p.apply(JsonXContent.jsonXContent, TestUtils.inputStream(resource))) { List list = parser.list(); + bulkDocuments = list.size(); for (Object item : list) { assertThat(item, instanceOf(Map.class)); Map entry = (Map) item; if (datasetTransform != null) { datasetTransform.accept(entry); } - bulk.add(new IndexRequest(indexName).source(entry, XContentType.JSON)); + bulkRequestBody.append(actionMetadata); + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.map(entry); + bulkRequestBody.append(Strings.toString(builder)); + } + bulkRequestBody.append("\n"); } } - if (bulk.numberOfActions() > 0) { - BulkResponse bulkResponse = client.bulk(bulk, RequestOptions.DEFAULT); - if (bulkResponse.hasFailures()) { + if (bulkDocuments > 0) { + Request request = new Request("POST", "_bulk?refresh=true"); + request.setJsonEntity(bulkRequestBody.toString()); + ObjectPath response = ObjectPath.createFromResponse(client.performRequest(request)); + boolean errors = response.evaluate("errors"); + if (errors) { LogManager.getLogger(DataLoader.class).info("Data loading FAILED"); } else { LogManager.getLogger(DataLoader.class).info("Data loading OK"); diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/RemoteClusterAwareEqlRestTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/RemoteClusterAwareEqlRestTestCase.java index 739f3fc83cd1..089232bfa438 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/RemoteClusterAwareEqlRestTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/RemoteClusterAwareEqlRestTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -24,11 +23,9 @@ import org.junit.BeforeClass; import java.io.IOException; -import java.util.Collections; import static org.elasticsearch.common.Strings.hasText; -@SuppressWarnings("removal") public abstract class RemoteClusterAwareEqlRestTestCase extends ESRestTestCase { private static final long CLIENT_TIMEOUT = 40L; // upped from 10s to accomodate for max measured throughput decline @@ -61,11 +58,6 @@ public static void closeRemoteClients() throws IOException { } } - protected static RestHighLevelClient highLevelClient(RestClient client) { - return new RestHighLevelClient(client, ignore -> {}, Collections.emptyList()) { - }; - } - protected static RestClient clientBuilder(Settings settings, HttpHost[] hosts) throws IOException { RestClientBuilder builder = RestClient.builder(hosts); doConfigureClient(builder, settings); diff --git a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/EqlUsageRestTestCase.java b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/EqlUsageRestTestCase.java index 2626fcfda1ef..38820056db81 100644 --- a/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/EqlUsageRestTestCase.java +++ b/x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/stats/EqlUsageRestTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.test.eql.stats; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -20,7 +19,6 @@ import java.io.IOException; import java.io.InputStream; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -29,10 +27,8 @@ /** * Tests a random number of queries that increase various (most of the times, one query will "touch" multiple metrics values) metrics. */ -@SuppressWarnings("removal") public abstract class EqlUsageRestTestCase extends ESRestTestCase { - private RestHighLevelClient highLevelClient; private Map baseMetrics = new HashMap<>(); private Integer baseAllTotalQueries = 0; private Integer baseAllFailedQueries = 0; @@ -117,7 +113,7 @@ public void testEqlRestUsage() throws IOException { // it doesn't matter if the index is already there (probably created by another test); _if_ its mapping is the expected one // it should be enough if (client().performRequest(new Request("HEAD", "/" + DataLoader.TEST_INDEX)).getStatusLine().getStatusCode() == 404) { - DataLoader.loadDatasetIntoEs(highLevelClient(), this::createParser); + DataLoader.loadDatasetIntoEs(client(), this::createParser); } String defaultPipe = "pipe_tail"; @@ -382,14 +378,6 @@ private void assertFeaturesMetricsExcept(Map responseAsMap, Set< } } - private RestHighLevelClient highLevelClient() { - if (highLevelClient == null) { - highLevelClient = new RestHighLevelClient(client(), ignore -> {}, Collections.emptyList()) { - }; - } - return highLevelClient; - } - @Override protected Settings restClientSettings() { String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); diff --git a/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java b/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java index 6f9c8c937335..1d5ed1ffdcba 100644 --- a/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java +++ b/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java @@ -19,7 +19,6 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -38,7 +37,6 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Properties; @@ -47,14 +45,12 @@ @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) @TestLogging(value = "org.elasticsearch.xpack.eql.EsEQLCorrectnessIT:INFO", reason = "Log query execution time") -@SuppressWarnings("removal") public class EsEQLCorrectnessIT extends ESRestTestCase { private static final String PARAM_FORMATTING = "%1$s"; private static final String QUERIES_FILENAME = "queries.toml"; private static Properties CFG; - private static RestHighLevelClient highLevelClient; private static RequestOptions COMMON_REQUEST_OPTIONS; private static long totalTime = 0; @@ -117,14 +113,6 @@ public EsEQLCorrectnessIT(EqlSpec spec) { this.spec = spec; } - private RestHighLevelClient highLevelClient() { - if (highLevelClient == null) { - highLevelClient = new RestHighLevelClient(client(), ignore -> {}, Collections.emptyList()) { - }; - } - return highLevelClient; - } - @ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING) public static Iterable parameters() throws Exception { Collection specs; From 8334b883a8d8e7a85f9ef4a886b4606fb0e20695 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 2 Nov 2023 08:48:36 -0700 Subject: [PATCH 45/47] Abort drivers on node shutting down (#101701) The TransportResponseHandler can be notified while the Driver is still running during node shutdown or the Driver hasn't started when the parent task is canceled. In such cases, we should abort the Driver and wait for it to finish; otherwise, multiple threads can access a Driver at the same time Closes #101595 --- .../compute/operator/Driver.java | 31 ++++++++++++++++--- .../compute/operator/DriverContext.java | 8 ++++- .../compute/operator/DriverRunner.java | 8 ----- .../compute/operator/DriverTaskRunner.java | 8 ++++- 4 files changed, 40 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index be3ee5ff4079..bd0629630988 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -24,6 +24,7 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -56,6 +57,10 @@ public class Driver implements Releasable, Describable { private final AtomicReference cancelReason = new AtomicReference<>(); private final AtomicReference> blocked = new AtomicReference<>(); + + private final AtomicBoolean started = new AtomicBoolean(); + private final SubscribableListener completionListener = new SubscribableListener<>(); + /** * Status reported to the tasks API. We write the status at most once every * {@link #statusNanos}, as soon as loop has finished and after {@link #statusNanos} @@ -149,7 +154,7 @@ private SubscribableListener run(TimeValue maxTime, int maxIterations) { if (isFinished()) { status.set(updateStatus(DriverStatus.Status.DONE)); driverContext.finish(); - releasable.close(); + Releasables.close(releasable, driverContext.getSnapshot()); } else { status.set(updateStatus(DriverStatus.Status.WAITING)); } @@ -159,7 +164,7 @@ private SubscribableListener run(TimeValue maxTime, int maxIterations) { /** * Whether the driver has run the chain of operators to completion. */ - public boolean isFinished() { + private boolean isFinished() { return activeOperators.isEmpty(); } @@ -168,6 +173,19 @@ public void close() { drainAndCloseOperators(null); } + /** + * Abort the driver and wait for it to finish + */ + public void abort(Exception reason, ActionListener listener) { + completionListener.addListener(listener); + if (started.compareAndSet(false, true)) { + drainAndCloseOperators(reason); + completionListener.onFailure(reason); + } else { + cancel(reason.getMessage()); + } + } + private SubscribableListener runSingleLoopIteration() { ensureNotCancelled(); boolean movedPage = false; @@ -261,8 +279,11 @@ public static void start( int maxIterations, ActionListener listener ) { - driver.status.set(driver.updateStatus(DriverStatus.Status.STARTING)); - schedule(DEFAULT_TIME_BEFORE_YIELDING, maxIterations, threadContext, executor, driver, listener); + driver.completionListener.addListener(listener); + if (driver.started.compareAndSet(false, true)) { + driver.status.set(driver.updateStatus(DriverStatus.Status.STARTING)); + schedule(DEFAULT_TIME_BEFORE_YIELDING, maxIterations, threadContext, executor, driver, driver.completionListener); + } } // Drains all active operators and closes them. @@ -279,7 +300,7 @@ private void drainAndCloseOperators(@Nullable Exception e) { itr.remove(); } driverContext.finish(); - Releasables.closeWhileHandlingException(releasable); + Releasables.closeWhileHandlingException(releasable, driverContext.getSnapshot()); } private static void schedule( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java index b21671cd3051..85860cf8766f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import java.util.Collections; import java.util.IdentityHashMap; @@ -69,7 +70,12 @@ public BlockFactory blockFactory() { } /** A snapshot of the driver context. */ - public record Snapshot(Set releasables) {} + public record Snapshot(Set releasables) implements Releasable { + @Override + public void close() { + Releasables.close(releasables); + } + } /** * Adds a releasable to this context. Releasables are identified by Object identity. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java index 788fc2887ebd..4f16a615572b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.Releasables; import org.elasticsearch.tasks.TaskCancelledException; import java.util.HashMap; @@ -84,13 +83,6 @@ private void done() { responseHeaders.setOnce(driverIndex, threadContext.getResponseHeaders()); if (counter.countDown()) { mergeResponseHeaders(responseHeaders); - for (Driver d : drivers) { - if (d.status().status() == DriverStatus.Status.QUEUED) { - d.close(); - } else { - Releasables.close(d.driverContext().getSnapshot().releasables()); - } - } Exception error = failure.get(); if (error != null) { listener.onFailure(error); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java index b486318f8540..38d879f8f7ad 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java @@ -52,7 +52,13 @@ protected void start(Driver driver, ActionListener driverListener) { new DriverRequest(driver, executor), parentTask, TransportRequestOptions.EMPTY, - TransportResponseHandler.empty(executor, driverListener) + TransportResponseHandler.empty( + executor, + // The TransportResponseHandler can be notified while the Driver is still running during node shutdown + // or the Driver hasn't started when the parent task is canceled. In such cases, we should abort + // the Driver and wait for it to finish. + ActionListener.wrap(driverListener::onResponse, e -> driver.abort(e, driverListener)) + ) ); } }; From 1f77422529ed60fe625d315a7583d793d23588cb Mon Sep 17 00:00:00 2001 From: William Brafford Date: Thu, 2 Nov 2023 12:04:30 -0400 Subject: [PATCH 46/47] Unfinalize class with serverless implementation (#101712) In #101699, I overlooked a class that's subclassed in the serverless project. GatewayMetaState.LucenePersistedState can't be final, and some of its methods need to be protected instead of private. --- .../java/org/elasticsearch/gateway/GatewayMetaState.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 1a81f3053012..a7cf7299a850 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -495,7 +495,7 @@ boolean allPendingAsyncStatesWritten() { /** * Encapsulates the incremental writing of metadata to a {@link PersistedClusterStateService.Writer}. */ - public static final class LucenePersistedState implements PersistedState { + public static class LucenePersistedState implements PersistedState { private long currentTerm; private ClusterState lastAcceptedState; @@ -505,6 +505,7 @@ public static final class LucenePersistedState implements PersistedState { private final AtomicReference persistenceWriter = new AtomicReference<>(); private boolean writeNextStateFully; + @SuppressWarnings("this-escape") public LucenePersistedState( PersistedClusterStateService persistedClusterStateService, long currentTerm, @@ -525,7 +526,7 @@ public LucenePersistedState( persistenceWriter.set(writer); } - private void maybeWriteInitialState(long currentTerm, ClusterState lastAcceptedState, PersistedClusterStateService.Writer writer) + protected void maybeWriteInitialState(long currentTerm, ClusterState lastAcceptedState, PersistedClusterStateService.Writer writer) throws IOException { try { writer.writeFullStateAndCommit(currentTerm, lastAcceptedState); @@ -555,7 +556,7 @@ public void setCurrentTerm(long currentTerm) { this.currentTerm = currentTerm; } - private void writeCurrentTermToDisk(long currentTerm) { + protected void writeCurrentTermToDisk(long currentTerm) { try { if (writeNextStateFully) { getWriterSafe().writeFullStateAndCommit(currentTerm, lastAcceptedState); @@ -583,7 +584,7 @@ public void setLastAcceptedState(ClusterState clusterState) { lastAcceptedState = clusterState; } - private void writeClusterStateToDisk(ClusterState clusterState) { + protected void writeClusterStateToDisk(ClusterState clusterState) { try { if (writeNextStateFully) { getWriterSafe().writeFullStateAndCommit(currentTerm, clusterState); From 8485cd7e838f842437a99c3b3f6c2513a8c0a024 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Thu, 2 Nov 2023 16:33:36 +0000 Subject: [PATCH 47/47] Health report infrastructure doesn't trip the circuit breakers (#101629) --- docs/changelog/101629.yaml | 5 +++++ .../health/RestGetHealthAction.java | 5 +++++ .../action/TransportHealthNodeAction.java | 2 +- .../health/RestGetHealthActionTests.java | 20 +++++++++++++++++++ .../TransportHealthNodeActionTests.java | 6 ++++++ 5 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/101629.yaml create mode 100644 server/src/test/java/org/elasticsearch/health/RestGetHealthActionTests.java diff --git a/docs/changelog/101629.yaml b/docs/changelog/101629.yaml new file mode 100644 index 000000000000..1b8691c9798f --- /dev/null +++ b/docs/changelog/101629.yaml @@ -0,0 +1,5 @@ +pr: 101629 +summary: Health report infrastructure doesn't trip the circuit breakers +area: Health +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/health/RestGetHealthAction.java b/server/src/main/java/org/elasticsearch/health/RestGetHealthAction.java index dd3176cf912a..8dcea1bb0e7e 100644 --- a/server/src/main/java/org/elasticsearch/health/RestGetHealthAction.java +++ b/server/src/main/java/org/elasticsearch/health/RestGetHealthAction.java @@ -51,4 +51,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli new RestChunkedToXContentListener<>(channel) ); } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/health/node/action/TransportHealthNodeAction.java b/server/src/main/java/org/elasticsearch/health/node/action/TransportHealthNodeAction.java index 18c009deb884..f25168bd4e98 100644 --- a/server/src/main/java/org/elasticsearch/health/node/action/TransportHealthNodeAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/action/TransportHealthNodeAction.java @@ -74,7 +74,7 @@ protected TransportHealthNodeAction( Writeable.Reader response, Executor executor ) { - super(actionName, true, transportService, actionFilters, request, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(actionName, false, transportService, actionFilters, request, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.transportService = transportService; this.clusterService = clusterService; this.threadPool = threadPool; diff --git a/server/src/test/java/org/elasticsearch/health/RestGetHealthActionTests.java b/server/src/test/java/org/elasticsearch/health/RestGetHealthActionTests.java new file mode 100644 index 000000000000..0eeb1811849e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/health/RestGetHealthActionTests.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class RestGetHealthActionTests extends ESTestCase { + + public void testHealthReportAPIDoesNotTripCircuitBreakers() { + assertThat(new RestGetHealthAction().canTripCircuitBreaker(), is(false)); + } +} diff --git a/server/src/test/java/org/elasticsearch/health/node/action/TransportHealthNodeActionTests.java b/server/src/test/java/org/elasticsearch/health/node/action/TransportHealthNodeActionTests.java index 0781cf6614da..a228f0e4792a 100644 --- a/server/src/test/java/org/elasticsearch/health/node/action/TransportHealthNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/action/TransportHealthNodeActionTests.java @@ -52,6 +52,7 @@ import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; public class TransportHealthNodeActionTests extends ESTestCase { private static ThreadPool threadPool; @@ -250,6 +251,7 @@ protected void healthOperation(Task task, Request request, ClusterState state, A } }, null, request, listener); assertTrue(listener.isDone()); + assertThat(transportService.getRequestHandler("internal:testAction").canTripCircuitBreaker(), is(false)); if (healthOperationFailure) { try { @@ -283,6 +285,7 @@ public void testDelegateToHealthNodeWithoutParentTask() throws ExecutionExceptio PlainActionFuture listener = new PlainActionFuture<>(); ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), null, request, listener); + assertThat(transportService.getRequestHandler("internal:testAction").canTripCircuitBreaker(), is(false)); assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; @@ -303,6 +306,7 @@ public void testDelegateToHealthNodeWithParentTask() throws ExecutionException, PlainActionFuture listener = new PlainActionFuture<>(); final CancellableTask task = (CancellableTask) taskManager.register("type", "internal:testAction", request); ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), task, request, listener); + assertThat(transportService.getRequestHandler("internal:testAction").canTripCircuitBreaker(), is(false)); assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; @@ -327,6 +331,8 @@ public void testHealthNodeOperationWithException() throws InterruptedException { listener ); assertTrue(listener.isDone()); + assertThat(transportService.getRequestHandler("internal:testAction").canTripCircuitBreaker(), is(false)); + try { listener.get(); fail("A simulated RuntimeException should be thrown");