diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java index 60de1981f9827..7a2504efdd0fc 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java @@ -43,7 +43,7 @@ private void scanForJDKJarHell(Path root) throws IOException { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { String entry = root.relativize(file).toString().replace('\\', '/'); - if (entry.endsWith(".class")) { + if (entry.endsWith(".class") && entry.endsWith("module-info.class") == false) { if (ext.getResource(entry) != null) { detected.add( entry diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java index 7e4766ada6541..bffa011cb7be2 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java @@ -20,12 +20,12 @@ import org.apache.commons.io.output.NullOutputStream; import org.elasticsearch.gradle.JdkJarHellCheck; -import org.elasticsearch.test.NamingConventionsCheck; import org.gradle.api.DefaultTask; import org.gradle.api.GradleException; import org.gradle.api.JavaVersion; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; +import org.gradle.api.file.FileTree; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFile; import org.gradle.api.tasks.InputFiles; @@ -47,6 +47,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.IntStream; public class ThirdPartyAuditTask extends DefaultTask { @@ -171,19 +172,38 @@ private void extractJars(FileCollection jars) { File jarExpandDir = getJarExpandDir(); // We need to clean up to make sure old dependencies don't linger getProject().delete(jarExpandDir); - jars.forEach(jar -> + + jars.forEach(jar -> { + FileTree jarFiles = getProject().zipTree(jar); getProject().copy(spec -> { + spec.from(jarFiles); + spec.into(jarExpandDir); + // exclude classes from multi release jars + spec.exclude("META-INF/versions/**"); + }); + // Deal with multi release jars: + // The order is important, we iterate here so we don't depend on the order in which Gradle executes the spec + // We extract multi release jar classes ( if these exist ) going from 9 - the first to support them, to the + // current `targetCompatibility` version. + // Each extract will overwrite the top level classes that existed before it, the result is that we end up + // with a single version of the class in `jarExpandDir`. + // This will be the closes version to `targetCompatibility`, the same class that would be loaded in a JVM + // that has `targetCompatibility` version. + // This means we only scan classes that would be loaded into `targetCompatibility`, and don't look at any + // pther version specific implementation of said classes. + IntStream.rangeClosed( + Integer.parseInt(JavaVersion.VERSION_1_9.getMajorVersion()), + Integer.parseInt(targetCompatibility.getMajorVersion()) + ).forEach(majorVersion -> getProject().copy(spec -> { spec.from(getProject().zipTree(jar)); spec.into(jarExpandDir); - // Exclude classes for multi release jars above target - for (int i = Integer.parseInt(targetCompatibility.getMajorVersion()) + 1; - i <= Integer.parseInt(JavaVersion.VERSION_HIGHER.getMajorVersion()); - i++ - ) { - spec.exclude("META-INF/versions/" + i + "/**"); - } - }) - ); + String metaInfPrefix = "META-INF/versions/" + majorVersion; + spec.include(metaInfPrefix + "/**"); + // Drop the version specific prefix + spec.eachFile(details -> details.setPath(details.getPath().replace(metaInfPrefix, ""))); + spec.setIncludeEmptyDirs(false); + })); + }); } private void assertNoJarHell(Set jdkJarHellClasses) { @@ -276,9 +296,9 @@ private String formatClassList(Set classList) { private Set runJdkJarHellCheck() throws IOException { ByteArrayOutputStream standardOut = new ByteArrayOutputStream(); ExecResult execResult = getProject().javaexec(spec -> { - URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation(); + URL location = JdkJarHellCheck.class.getProtectionDomain().getCodeSource().getLocation(); if (location.getProtocol().equals("file") == false) { - throw new GradleException("Unexpected location for NamingConventionCheck class: " + location); + throw new GradleException("Unexpected location for JdkJarHellCheck class: " + location); } try { spec.classpath( diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchConfiguration.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java similarity index 96% rename from buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchConfiguration.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java index 913d88e9fa11b..a200c75880e6c 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchConfiguration.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchConfiguration.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.clusterformation; +package org.elasticsearch.gradle.testclusters; import org.elasticsearch.gradle.Distribution; import org.elasticsearch.gradle.Version; diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java similarity index 98% rename from buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchNode.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 8b78fc2b627cb..a196cb09e97e3 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.clusterformation; +package org.elasticsearch.gradle.testclusters; import org.elasticsearch.GradleServicesAdapter; import org.elasticsearch.gradle.Distribution; diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ClusterformationPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java similarity index 95% rename from buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ClusterformationPlugin.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 779e7b61ed9ce..3a137906bec86 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/clusterformation/ClusterformationPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.clusterformation; +package org.elasticsearch.gradle.testclusters; import groovy.lang.Closure; import org.elasticsearch.GradleServicesAdapter; @@ -37,12 +37,12 @@ import java.util.List; import java.util.Map; -public class ClusterformationPlugin implements Plugin { +public class TestClustersPlugin implements Plugin { public static final String LIST_TASK_NAME = "listElasticSearchClusters"; public static final String EXTENSION_NAME = "elasticSearchClusters"; - private final Logger logger = Logging.getLogger(ClusterformationPlugin.class); + private final Logger logger = Logging.getLogger(TestClustersPlugin.class); @Override public void apply(Project project) { diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties deleted file mode 100644 index dfd6cd9956a58..0000000000000 --- a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.clusterformation.properties +++ /dev/null @@ -1 +0,0 @@ -implementation-class=org.elasticsearch.gradle.clusterformation.ClusterformationPlugin diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.testclusters.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.testclusters.properties new file mode 100644 index 0000000000000..8d81f05fc69f5 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.testclusters.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.testclusters.TestClustersPlugin diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/clusterformation/ClusterformationPluginIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java similarity index 89% rename from buildSrc/src/test/java/org/elasticsearch/gradle/clusterformation/ClusterformationPluginIT.java rename to buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java index c690557537dfb..021bd9bb15169 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/clusterformation/ClusterformationPluginIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.clusterformation; +package org.elasticsearch.gradle.testclusters; import org.elasticsearch.gradle.test.GradleIntegrationTestCase; import org.gradle.testkit.runner.BuildResult; @@ -26,11 +26,11 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; -public class ClusterformationPluginIT extends GradleIntegrationTestCase { +public class TestClustersPluginIT extends GradleIntegrationTestCase { public void testListClusters() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("listElasticSearchClusters", "-s") .withPluginClasspath() .build(); @@ -45,7 +45,7 @@ public void testListClusters() { public void testUseClusterByOne() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("user1", "-s") .withPluginClasspath() .build(); @@ -60,7 +60,7 @@ public void testUseClusterByOne() { public void testUseClusterByOneWithDryRun() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("user1", "-s", "--dry-run") .withPluginClasspath() .build(); @@ -75,7 +75,7 @@ public void testUseClusterByOneWithDryRun() { public void testUseClusterByTwo() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("user1", "user2", "-s") .withPluginClasspath() .build(); @@ -92,7 +92,7 @@ public void testUseClusterByTwo() { public void testUseClusterByUpToDateTask() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("upToDate1", "upToDate2", "-s") .withPluginClasspath() .build(); @@ -109,7 +109,7 @@ public void testUseClusterByUpToDateTask() { public void testUseClusterBySkippedTask() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("skipped1", "skipped2", "-s") .withPluginClasspath() .build(); @@ -126,7 +126,7 @@ public void testUseClusterBySkippedTask() { public void tetUseClusterBySkippedAndWorkingTask() { BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("clusterformation")) + .withProjectDir(getProjectDir("testclusters")) .withArguments("skipped1", "user1", "-s") .withPluginClasspath() .build(); diff --git a/buildSrc/src/testKit/clusterformation/build.gradle b/buildSrc/src/testKit/testclusters/build.gradle similarity index 94% rename from buildSrc/src/testKit/clusterformation/build.gradle rename to buildSrc/src/testKit/testclusters/build.gradle index ae9dd8a2c335c..083ce97b963bb 100644 --- a/buildSrc/src/testKit/clusterformation/build.gradle +++ b/buildSrc/src/testKit/testclusters/build.gradle @@ -1,5 +1,5 @@ plugins { - id 'elasticsearch.clusterformation' + id 'elasticsearch.testclusters' } elasticSearchClusters { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 1db26087ae88c..e3570a2a837a6 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -148,7 +148,12 @@ static Request deleteJob(DeleteJobRequest deleteJobRequest) { Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(request); - params.putParam("force", Boolean.toString(deleteJobRequest.isForce())); + if (deleteJobRequest.getForce() != null) { + params.putParam("force", Boolean.toString(deleteJobRequest.getForce())); + } + if (deleteJobRequest.getWaitForCompletion() != null) { + params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion())); + } return request; } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 29250e5d440bd..8c442d8ffa646 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -26,6 +26,7 @@ import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -211,14 +212,15 @@ public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, * * @param request The request to delete the job * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement + * @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for + * completion * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public AcknowledgedResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { + public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, MLRequestConverters::deleteJob, options, - AcknowledgedResponse::fromXContent, + DeleteJobResponse::fromXContent, Collections.emptySet()); } @@ -232,11 +234,11 @@ public AcknowledgedResponse deleteJob(DeleteJobRequest request, RequestOptions o * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ - public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { + public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(request, MLRequestConverters::deleteJob, options, - AcknowledgedResponse::fromXContent, + DeleteJobResponse::fromXContent, listener, Collections.emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java index a355f7ec659bb..44e3668059c47 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java @@ -29,7 +29,8 @@ public class DeleteJobRequest extends ActionRequest { private String jobId; - private boolean force; + private Boolean force; + private Boolean waitForCompletion; public DeleteJobRequest(String jobId) { this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); @@ -47,7 +48,7 @@ public void setJobId(String jobId) { this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); } - public boolean isForce() { + public Boolean getForce() { return force; } @@ -57,10 +58,24 @@ public boolean isForce() { * * @param force When {@code true} forcefully delete an opened job. Defaults to {@code false} */ - public void setForce(boolean force) { + public void setForce(Boolean force) { this.force = force; } + public Boolean getWaitForCompletion() { + return waitForCompletion; + } + + /** + * Set whether this request should wait until the operation has completed before returning + * @param waitForCompletion When {@code true} the call will wait for the job deletion to complete. + * Otherwise, the deletion will be executed asynchronously and the response + * will contain the task id. + */ + public void setWaitForCompletion(Boolean waitForCompletion) { + this.waitForCompletion = waitForCompletion; + } + @Override public ActionRequestValidationException validate() { return null; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java new file mode 100644 index 0000000000000..f1487c8c2765b --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.tasks.TaskId; + +import java.io.IOException; +import java.util.Objects; + +/** + * Response object that contains the acknowledgement or the task id + * depending on whether the delete job action was requested to wait for completion. + */ +public class DeleteJobResponse extends ActionResponse implements ToXContentObject { + + private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); + private static final ParseField TASK = new ParseField("task"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("delete_job_response", + true, a-> new DeleteJobResponse((Boolean) a[0], (TaskId) a[1])); + + static { + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ACKNOWLEDGED); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), TaskId.parser(), TASK, ObjectParser.ValueType.STRING); + } + + public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + private final Boolean acknowledged; + private final TaskId task; + + DeleteJobResponse(@Nullable Boolean acknowledged, @Nullable TaskId task) { + assert acknowledged != null || task != null; + this.acknowledged = acknowledged; + this.task = task; + } + + /** + * Get the action acknowledgement + * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code false} or + * otherwise a {@code boolean} that indicates whether the job was deleted successfully. + */ + public Boolean getAcknowledged() { + return acknowledged; + } + + /** + * Get the task id + * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code true} or + * otherwise the id of the job deletion task. + */ + public TaskId getTask() { + return task; + } + + @Override + public int hashCode() { + return Objects.hash(acknowledged, task); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DeleteJobResponse that = (DeleteJobResponse) other; + return Objects.equals(acknowledged, that.acknowledged) && Objects.equals(task, that.task); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (acknowledged != null) { + builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); + } + if (task != null) { + builder.field(TASK.getPreferredName(), task.toString()); + } + builder.endObject(); + return builder; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java index f30a003e02a7d..13b4dcb955a05 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java @@ -65,6 +65,7 @@ public class Job implements ToXContentObject { public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days"); public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); + public static final ParseField DELETING = new ParseField("deleting"); public static final ObjectParser PARSER = new ObjectParser<>("job_details", true, Builder::new); @@ -94,6 +95,7 @@ public class Job implements ToXContentObject { PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); + PARSER.declareBoolean(Builder::setDeleting, DELETING); } private final String jobId; @@ -115,13 +117,14 @@ public class Job implements ToXContentObject { private final Map customSettings; private final String modelSnapshotId; private final String resultsIndexName; + private final Boolean deleting; private Job(String jobId, String jobType, List groups, String description, Date createTime, Date finishedTime, Long establishedModelMemory, AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription, ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval, Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map customSettings, - String modelSnapshotId, String resultsIndexName) { + String modelSnapshotId, String resultsIndexName, Boolean deleting) { this.jobId = jobId; this.jobType = jobType; @@ -141,6 +144,7 @@ private Job(String jobId, String jobType, List groups, String descriptio this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings); this.modelSnapshotId = modelSnapshotId; this.resultsIndexName = resultsIndexName; + this.deleting = deleting; } /** @@ -275,6 +279,10 @@ public String getModelSnapshotId() { return modelSnapshotId; } + public Boolean getDeleting() { + return deleting; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -330,6 +338,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (resultsIndexName != null) { builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); } + if (deleting != null) { + builder.field(DELETING.getPreferredName(), deleting); + } builder.endObject(); return builder; } @@ -362,7 +373,8 @@ public boolean equals(Object other) { && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) && Objects.equals(this.customSettings, that.customSettings) && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) - && Objects.equals(this.resultsIndexName, that.resultsIndexName); + && Objects.equals(this.resultsIndexName, that.resultsIndexName) + && Objects.equals(this.deleting, that.deleting); } @Override @@ -370,7 +382,7 @@ public int hashCode() { return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, resultsIndexName); + modelSnapshotId, resultsIndexName, deleting); } @Override @@ -402,6 +414,7 @@ public static class Builder { private Map customSettings; private String modelSnapshotId; private String resultsIndexName; + private Boolean deleting; private Builder() { } @@ -429,6 +442,7 @@ public Builder(Job job) { this.customSettings = job.getCustomSettings(); this.modelSnapshotId = job.getModelSnapshotId(); this.resultsIndexName = job.getResultsIndexNameNoPrefix(); + this.deleting = job.getDeleting(); } public Builder setId(String id) { @@ -525,6 +539,11 @@ public Builder setResultsIndexName(String resultsIndexName) { return this; } + Builder setDeleting(Boolean deleting) { + this.deleting = deleting; + return this; + } + /** * Builds a job. * @@ -537,7 +556,7 @@ public Job build() { id, jobType, groups, description, createTime, finishedTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, resultsIndexName); + modelSnapshotId, resultsIndexName, deleting); } } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index b07f78cab1b81..8c5f49c943f33 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -164,11 +164,18 @@ public void testDeleteJob() { Request request = MLRequestConverters.deleteJob(deleteJobRequest); assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint()); - assertEquals(Boolean.toString(false), request.getParameters().get("force")); + assertNull(request.getParameters().get("force")); + assertNull(request.getParameters().get("wait_for_completion")); + deleteJobRequest = new DeleteJobRequest(jobId); deleteJobRequest.setForce(true); request = MLRequestConverters.deleteJob(deleteJobRequest); assertEquals(Boolean.toString(true), request.getParameters().get("force")); + + deleteJobRequest = new DeleteJobRequest(jobId); + deleteJobRequest.setWaitForCompletion(false); + request = MLRequestConverters.deleteJob(deleteJobRequest); + assertEquals(Boolean.toString(false), request.getParameters().get("wait_for_completion")); } public void testFlushJob() throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 5d3fc82a4bb1b..cac9f533501b5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -151,17 +152,33 @@ public void testGetJob() throws Exception { assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2)); } - public void testDeleteJob() throws Exception { + public void testDeleteJob_GivenWaitForCompletionIsTrue() throws Exception { String jobId = randomValidJobId(); Job job = buildJob(jobId); MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); - AcknowledgedResponse response = execute(new DeleteJobRequest(jobId), + DeleteJobResponse response = execute(new DeleteJobRequest(jobId), machineLearningClient::deleteJob, machineLearningClient::deleteJobAsync); - assertTrue(response.isAcknowledged()); + assertTrue(response.getAcknowledged()); + assertNull(response.getTask()); + } + + public void testDeleteJob_GivenWaitForCompletionIsFalse() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId); + deleteJobRequest.setWaitForCompletion(false); + + DeleteJobResponse response = execute(deleteJobRequest, machineLearningClient::deleteJob, machineLearningClient::deleteJobAsync); + + assertNull(response.getAcknowledged()); + assertNotNull(response.getTask()); } public void testOpenJob() throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index eb1d65a380565..0c0efb241f9ab 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -108,6 +109,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.TaskId; import org.junit.After; import java.io.IOException; @@ -281,20 +283,34 @@ public void testDeleteJob() throws Exception { { //tag::x-pack-delete-ml-job-request - DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); - deleteJobRequest.setForce(false); // <1> - AcknowledgedResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); + DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); // <1> //end::x-pack-delete-ml-job-request + //tag::x-pack-delete-ml-job-request-force + deleteJobRequest.setForce(false); // <1> + //end::x-pack-delete-ml-job-request-force + + //tag::x-pack-delete-ml-job-request-wait-for-completion + deleteJobRequest.setWaitForCompletion(true); // <1> + //end::x-pack-delete-ml-job-request-wait-for-completion + + //tag::x-pack-delete-ml-job-execute + DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); + //end::x-pack-delete-ml-job-execute + //tag::x-pack-delete-ml-job-response - boolean isAcknowledged = deleteJobResponse.isAcknowledged(); // <1> + Boolean isAcknowledged = deleteJobResponse.getAcknowledged(); // <1> + TaskId task = deleteJobResponse.getTask(); // <2> //end::x-pack-delete-ml-job-response + + assertTrue(isAcknowledged); + assertNull(task); } { //tag::x-pack-delete-ml-job-request-listener - ActionListener listener = new ActionListener() { + ActionListener listener = new ActionListener() { @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { + public void onResponse(DeleteJobResponse deleteJobResponse) { // <1> } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobRequestTests.java index d3ccb98eeb68a..d9f96fd0f288c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobRequestTests.java @@ -34,12 +34,4 @@ public void test_WithNullJobId() { ex = expectThrows(NullPointerException.class, () -> createTestInstance().setJobId(null)); assertEquals("[job_id] must not be null", ex.getMessage()); } - - public void test_WithForce() { - DeleteJobRequest deleteJobRequest = createTestInstance(); - assertFalse(deleteJobRequest.isForce()); - - deleteJobRequest.setForce(true); - assertTrue(deleteJobRequest.isForce()); - } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java new file mode 100644 index 0000000000000..97a8c5b892c69 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class DeleteJobResponseTests extends AbstractXContentTestCase { + + @Override + protected DeleteJobResponse createTestInstance() { + if (randomBoolean()) { + return new DeleteJobResponse(randomBoolean(), null); + } + return new DeleteJobResponse(null, new TaskId(randomAlphaOfLength(20) + ":" + randomIntBetween(1, 100))); + } + + @Override + protected DeleteJobResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteJobResponse.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobTests.java index b678dce6cffc9..667932d591231 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobTests.java @@ -34,9 +34,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Date; -import java.util.HashMap; import java.util.List; -import java.util.Map; public class JobTests extends AbstractXContentTestCase { @@ -77,93 +75,6 @@ public void testFutureMetadataParse() throws IOException { assertNotNull(Job.PARSER.apply(parser, null).build()); } - public void testEquals_GivenDifferentClass() { - Job job = buildJobBuilder("foo").build(); - assertFalse(job.equals("a string")); - } - - public void testEquals_GivenDifferentIds() { - Date createTime = new Date(); - Job.Builder builder = buildJobBuilder("foo"); - builder.setCreateTime(createTime); - Job job1 = builder.build(); - builder.setId("bar"); - Job job2 = builder.build(); - assertFalse(job1.equals(job2)); - } - - public void testEquals_GivenDifferentRenormalizationWindowDays() { - Date date = new Date(); - Job.Builder jobDetails1 = new Job.Builder("foo"); - jobDetails1.setDataDescription(new DataDescription.Builder()); - jobDetails1.setAnalysisConfig(createAnalysisConfig()); - jobDetails1.setRenormalizationWindowDays(3L); - jobDetails1.setCreateTime(date); - Job.Builder jobDetails2 = new Job.Builder("foo"); - jobDetails2.setDataDescription(new DataDescription.Builder()); - jobDetails2.setRenormalizationWindowDays(4L); - jobDetails2.setAnalysisConfig(createAnalysisConfig()); - jobDetails2.setCreateTime(date); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - - public void testEquals_GivenDifferentBackgroundPersistInterval() { - Date date = new Date(); - Job.Builder jobDetails1 = new Job.Builder("foo"); - jobDetails1.setDataDescription(new DataDescription.Builder()); - jobDetails1.setAnalysisConfig(createAnalysisConfig()); - jobDetails1.setBackgroundPersistInterval(TimeValue.timeValueSeconds(10000L)); - jobDetails1.setCreateTime(date); - Job.Builder jobDetails2 = new Job.Builder("foo"); - jobDetails2.setDataDescription(new DataDescription.Builder()); - jobDetails2.setBackgroundPersistInterval(TimeValue.timeValueSeconds(8000L)); - jobDetails2.setAnalysisConfig(createAnalysisConfig()); - jobDetails2.setCreateTime(date); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - - public void testEquals_GivenDifferentModelSnapshotRetentionDays() { - Date date = new Date(); - Job.Builder jobDetails1 = new Job.Builder("foo"); - jobDetails1.setDataDescription(new DataDescription.Builder()); - jobDetails1.setAnalysisConfig(createAnalysisConfig()); - jobDetails1.setModelSnapshotRetentionDays(10L); - jobDetails1.setCreateTime(date); - Job.Builder jobDetails2 = new Job.Builder("foo"); - jobDetails2.setDataDescription(new DataDescription.Builder()); - jobDetails2.setModelSnapshotRetentionDays(8L); - jobDetails2.setAnalysisConfig(createAnalysisConfig()); - jobDetails2.setCreateTime(date); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - - public void testEquals_GivenDifferentResultsRetentionDays() { - Date date = new Date(); - Job.Builder jobDetails1 = new Job.Builder("foo"); - jobDetails1.setDataDescription(new DataDescription.Builder()); - jobDetails1.setAnalysisConfig(createAnalysisConfig()); - jobDetails1.setCreateTime(date); - jobDetails1.setResultsRetentionDays(30L); - Job.Builder jobDetails2 = new Job.Builder("foo"); - jobDetails2.setDataDescription(new DataDescription.Builder()); - jobDetails2.setResultsRetentionDays(4L); - jobDetails2.setAnalysisConfig(createAnalysisConfig()); - jobDetails2.setCreateTime(date); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - - public void testEquals_GivenDifferentCustomSettings() { - Job.Builder jobDetails1 = buildJobBuilder("foo"); - Map customSettings1 = new HashMap<>(); - customSettings1.put("key1", "value1"); - jobDetails1.setCustomSettings(customSettings1); - Job.Builder jobDetails2 = buildJobBuilder("foo"); - Map customSettings2 = new HashMap<>(); - customSettings2.put("key2", "value2"); - jobDetails2.setCustomSettings(customSettings2); - assertFalse(jobDetails1.build().equals(jobDetails2.build())); - } - public void testCopyConstructor() { for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) { Job job = createTestInstance(); @@ -184,20 +95,6 @@ public void testBuilder_WithNullJobType() { assertEquals("[job_type] must not be null", ex.getMessage()); } - public static Job.Builder buildJobBuilder(String id, Date date) { - Job.Builder builder = new Job.Builder(id); - builder.setCreateTime(date); - AnalysisConfig.Builder ac = createAnalysisConfig(); - DataDescription.Builder dc = new DataDescription.Builder(); - builder.setAnalysisConfig(ac); - builder.setDataDescription(dc); - return builder; - } - - public static Job.Builder buildJobBuilder(String id) { - return buildJobBuilder(id, new Date()); - } - public static String randomValidJobId() { CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray()); return generator.ofCodePointsLength(random(), 10, 10); @@ -262,6 +159,9 @@ public static Job.Builder createRandomizedJobBuilder() { if (randomBoolean()) { builder.setResultsIndexName(randomValidJobId()); } + if (randomBoolean()) { + builder.setDeleting(randomBoolean()); + } return builder; } diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 1eed8b41c0ebc..f878878e6b606 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -149,21 +149,30 @@ subprojects { task buildBwcVersion(type: Exec) { dependsOn checkoutBwcBranch, writeBuildMetadata - // send RUNTIME_JAVA_HOME so the build doesn't fails on newer version the branch doesn't know about - environment('RUNTIME_JAVA_HOME', getJavaHome(it, rootProject.ext.minimumRuntimeVersion.getMajorVersion() as int)) workingDir = checkoutDir - // we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds - if (["5.6", "6.0", "6.1"].contains(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 8)) - } else if ("6.2".equals(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 9)) - } else if (["6.3", "6.4"].contains(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 10)) - } else if (["6.x"].contains(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 11)) - } else { - environment('JAVA_HOME', project.compilerJavaHome) + doFirst { + // Execution time so that the checkouts are available + List lines = file("$checkoutDir/.ci/java-versions.properties").readLines() + environment( + 'JAVA_HOME', + getJavaHome(it, Integer.parseInt( + lines + .findAll({ it.startsWith("ES_BUILD_JAVA=java") }) + .collect({ it.replace("ES_BUILD_JAVA=java", "").trim() }) + .join("!!") + )) + ) + environment( + 'RUNTIME_JAVA_HOME', + getJavaHome(it, Integer.parseInt( + lines + .findAll({ it.startsWith("ES_RUNTIME_JAVA=java") }) + .collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() }) + .join("!!") + )) + ) } + if (Os.isFamily(Os.FAMILY_WINDOWS)) { executable 'cmd' args '/C', 'call', new File(checkoutDir, 'gradlew').toString() @@ -237,4 +246,4 @@ class IndentingOutputStream extends OutputStream { } } } -} \ No newline at end of file +} diff --git a/docs/java-rest/high-level/ml/delete-job.asciidoc b/docs/java-rest/high-level/ml/delete-job.asciidoc index 43f1e2fb02bbf..7cdc4149b231c 100644 --- a/docs/java-rest/high-level/ml/delete-job.asciidoc +++ b/docs/java-rest/high-level/ml/delete-job.asciidoc @@ -4,26 +4,57 @@ [[java-rest-high-x-pack-machine-learning-delete-job-request]] ==== Delete Job Request -A `DeleteJobRequest` object requires a non-null `jobId` and can optionally set `force`. -Can be executed as follows: +A `DeleteJobRequest` object requires a non-null `jobId`. ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request] --------------------------------------------------- +<1> Constructing a new request referencing an existing `jobId` + +==== Optional Arguments + +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-force] +--------------------------------------------------- <1> Use to forcefully delete an opened job; this method is quicker than closing and deleting the job. -Defaults to `false` +Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-request-wait-for-completion] +--------------------------------------------------- +<1> Use to set whether the request should wait until the operation has completed before returning. +Defaults to `true`. + +[[java-rest-high-x-pack-machine-learning-delete-job-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-execute] +-------------------------------------------------- [[java-rest-high-x-pack-machine-learning-delete-job-response]] ==== Delete Job Response -The returned `AcknowledgedResponse` object indicates the acknowledgement of the request: +The returned `DeleteJobResponse` object contains the acknowledgement of the +job deletion or the deletion task depending on whether the request was set +to wait for completion: + ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-response] --------------------------------------------------- -<1> `isAcknowledged` was the deletion request acknowledged or not +<1> whether was job deletion was acknowledged or not; will be `null` when set not to wait for completion +<2> the id of the job deletion task; will be `null` when set to wait for completion [[java-rest-high-x-pack-machine-learning-delete-job-async]] ==== Delete Job Asynchronously diff --git a/docs/reference/ml/apis/delete-job.asciidoc b/docs/reference/ml/apis/delete-job.asciidoc index d5ef120ad040b..b9dbe9e3cd6bd 100644 --- a/docs/reference/ml/apis/delete-job.asciidoc +++ b/docs/reference/ml/apis/delete-job.asciidoc @@ -41,6 +41,9 @@ separated list. (boolean) Use to forcefully delete an opened job; this method is quicker than closing and deleting the job. +`wait_for_completion`:: + (boolean) Specifies whether the request should return immediately or wait + until the job deletion completes. Defaults to `true`. ==== Authorization @@ -66,4 +69,23 @@ When the job is deleted, you receive the following results: "acknowledged": true } ---- -// TESTRESPONSE \ No newline at end of file +// TESTRESPONSE + +In the next example we delete the `total-requests` job asynchronously: + +[source,js] +-------------------------------------------------- +DELETE _xpack/ml/anomaly_detectors/total-requests?wait_for_completion=false +-------------------------------------------------- +// CONSOLE +// TEST[skip:setup:server_metrics_job] + +When `wait_for_completion` is set to `false`, the response contains the id +of the job deletion task: +[source,js] +---- +{ + "task": "oTUltX4IQMOUUVeiohTt8A:39" +} +---- +// TESTRESPONSE[s/"task": "oTUltX4IQMOUUVeiohTt8A:39"/"task": $body.task/] \ No newline at end of file diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 544bea86b0dae..1875c402adab4 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -127,6 +127,9 @@ field support has a `nested` sort option with the following properties: should match with in order for its field values to be taken into account by sorting. Common case is to repeat the query / filter inside the nested filter or query. By default no `nested_filter` is active. +`max_children`:: + The maximum number of children to consider per root document + when picking the sort value. Defaults to unlimited. `nested`:: Same as top-level `nested` but applies to another nested path within the current nested object. diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index cda5ac4977b56..8d5c832adcc86 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -55,6 +55,7 @@ Enables fips mode of operation. Set this to `true` if you run this {es} instance `xpack.security.authc.accept_default_password`:: In `elasticsearch.yml`, set this to `false` to disable support for the default "changeme" password. +[float] [[password-hashing-settings]] ==== Password hashing settings `xpack.security.authc.password_hashing.algorithm`:: @@ -82,6 +83,33 @@ resource. When set to `false`, an HTTP 401 response is returned and the user can provide credentials with the appropriate permissions to gain access. Defaults to `true`. +[float] +[[security-automata-settings]] +==== Automata Settings +In places where {security} accepts wildcard patterns (e.g. index patterns in +roles, group matches in the role mapping API), each pattern is compiled into +an Automaton. The follow settings are available to control this behaviour. + +`xpack.security.automata.max_determinized_states`:: +The upper limit on how many automaton states may be created by a single pattern. +This protects against too-difficult (e.g. exponentially hard) patterns. +Defaults to `100,000`. + +`xpack.security.automata.cache.enabled`:: +Whether to cache the compiled automata. Compiling automata can be CPU intensive +and may slowdown some operations. The cache reduces the frequency with which +automata need to be compiled. +Defaults to `true`. + +`xpack.security.automata.cache.size`:: +The maximum number of items to retain in the automata cache. +Defaults to `10,000`. + +`xpack.security.automata.cache.ttl`:: +The length of time to retain in an item in the automata cache (based on most +recent usage). +Defaults to `48h` (48 hours). + [float] [[field-document-security-settings]] ==== Document and field level security settings @@ -176,6 +204,11 @@ cache at any given time. Defaults to 100,000. in-memory cached user credentials. For possible values, see <>. Defaults to `ssha256`. +`authentication.enabled`:: If set to `false`, disables authentication support in +this realm, so that it only supports user lookups. +(See the {xpack-ref}/run-as-privilege.html[run as] and +{stack-ov}/realm-chains.html#authorization_realms[authorization realms] features). +Defaults to `true`. [[ref-users-settings]] @@ -200,6 +233,12 @@ Defaults to 100,000. (Expert Setting) The hashing algorithm that is used for the in-memory cached user credentials. See <>. Defaults to `ssha256`. +`authentication.enabled`:: If set to `false`, disables authentication support in +this realm, so that it only supports user lookups. +(See the {xpack-ref}/run-as-privilege.html[run as] and +{stack-ov}/realm-chains.html#authorization_realms[authorization realms] features). +Defaults to `true`. + [[ref-ldap-settings]] [float] ===== LDAP realm settings @@ -462,6 +501,12 @@ Defaults to `100000`. (Expert Setting) Specifies the hashing algorithm that is used for the in-memory cached user credentials. See <>. Defaults to `ssha256`. +`authentication.enabled`:: If set to `false`, disables authentication support in +this realm, so that it only supports user lookups. +(See the {xpack-ref}/run-as-privilege.html[run as] and +{stack-ov}/realm-chains.html#authorization_realms[authorization realms] features). +Defaults to `true`. + [[ref-ad-settings]] [float] ===== Active Directory realm settings @@ -703,6 +748,12 @@ Defaults to `100000`. (Expert Setting) Specifies the hashing algorithm that is used for the in-memory cached user credentials. See <>. Defaults to `ssha256`. +`authentication.enabled`:: If set to `false`, disables authentication support in +this realm, so that it only supports user lookups. +(See the {xpack-ref}/run-as-privilege.html[run as] and +{stack-ov}/realm-chains.html#authorization_realms[authorization realms] features). +Defaults to `true`. + `follow_referrals`:: If set to `true` {security} follows referrals returned by the LDAP server. Referrals are URLs returned by the server that are to be used to continue the @@ -1320,6 +1371,7 @@ a PKCS#12 container includes trusted certificate ("anchor") entries look for `openssl pkcs12 -info` output, or `trustedCertEntry` in the `keytool -list` output. +[float] ===== PKCS#11 tokens When using a PKCS#11 cryptographic token, which contains the @@ -1340,7 +1392,7 @@ a keystore or a truststore for Elasticsearch, the PIN for the token can be configured by setting the appropriate value to `xpack.ssl.truststore.password` or `xpack.ssl.truststore.secure_password`. In the absence of the above, {es} will fallback to use he appropriate JVM setting (`-Djavax.net.ssl.trustStorePassword`) -if that s set. +if that is set. Since there can only be one PKCS#11 token configured, only one keystore and truststore will be usable for configuration in {es}. This in turn means that only one certificate can be used for TLS both in the transport and the diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java index 5c769774f6197..2d50f160812dc 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java @@ -31,12 +31,12 @@ final class DateField { // no instance private DateField() {} - + // supported variables static final String VALUE_VARIABLE = "value"; static final String EMPTY_VARIABLE = "empty"; static final String LENGTH_VARIABLE = "length"; - + // supported methods static final String GETVALUE_METHOD = "getValue"; static final String ISEMPTY_METHOD = "isEmpty"; @@ -47,7 +47,7 @@ private DateField() {} static final String MEDIAN_METHOD = "median"; static final String SUM_METHOD = "sum"; static final String COUNT_METHOD = "count"; - + // date-specific static final String GET_YEAR_METHOD = "getYear"; static final String GET_MONTH_METHOD = "getMonth"; @@ -55,7 +55,7 @@ private DateField() {} static final String GET_HOUR_OF_DAY_METHOD = "getHourOfDay"; static final String GET_MINUTES_METHOD = "getMinutes"; static final String GET_SECONDS_METHOD = "getSeconds"; - + static ValueSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { switch (variable) { case VALUE_VARIABLE: @@ -68,7 +68,7 @@ static ValueSource getVariable(IndexFieldData fieldData, String fieldName, St throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for date field [" + fieldName + "]."); } } - + static ValueSource getMethod(IndexFieldData fieldData, String fieldName, String method) { switch (method) { case GETVALUE_METHOD: diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java index 7cd918cf914b7..06875632134d7 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java @@ -29,12 +29,12 @@ final class NumericField { // no instance private NumericField() {} - + // supported variables static final String VALUE_VARIABLE = "value"; static final String EMPTY_VARIABLE = "empty"; static final String LENGTH_VARIABLE = "length"; - + // supported methods static final String GETVALUE_METHOD = "getValue"; static final String ISEMPTY_METHOD = "isEmpty"; @@ -45,7 +45,7 @@ private NumericField() {} static final String MEDIAN_METHOD = "median"; static final String SUM_METHOD = "sum"; static final String COUNT_METHOD = "count"; - + static ValueSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { switch (variable) { case VALUE_VARIABLE: @@ -55,11 +55,11 @@ static ValueSource getVariable(IndexFieldData fieldData, String fieldName, St case LENGTH_VARIABLE: return new CountMethodValueSource(fieldData); default: - throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for " + + throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for " + "numeric field [" + fieldName + "]."); } } - + static ValueSource getMethod(IndexFieldData fieldData, String fieldName, String method) { switch (method) { case GETVALUE_METHOD: diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java index 4989cd35f7b7e..16513c57bb337 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java @@ -49,19 +49,6 @@ public static Netty4CorsConfigBuilder forAnyOrigin() { return new Netty4CorsConfigBuilder(); } - /** - * Creates a {@link Netty4CorsConfigBuilder} instance with the specified origin. - * - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public static Netty4CorsConfigBuilder forOrigin(final String origin) { - if ("*".equals(origin)) { - return new Netty4CorsConfigBuilder(); - } - return new Netty4CorsConfigBuilder(origin); - } - - /** * Create a {@link Netty4CorsConfigBuilder} instance with the specified pattern origin. * @@ -94,7 +81,6 @@ public static Netty4CorsConfigBuilder forOrigins(final String... origins) { final Set requestMethods = new HashSet<>(); final Set requestHeaders = new HashSet<>(); final Map> preflightHeaders = new HashMap<>(); - private boolean noPreflightHeaders; boolean shortCircuit; /** @@ -130,18 +116,6 @@ public static Netty4CorsConfigBuilder forOrigins(final String... origins) { anyOrigin = false; } - /** - * Web browsers may set the 'Origin' request header to 'null' if a resource is loaded - * from the local file system. Calling this method will enable a successful CORS response - * with a wildcard for the CORS response header 'Access-Control-Allow-Origin'. - * - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - Netty4CorsConfigBuilder allowNullOrigin() { - allowNullOrigin = true; - return this; - } - /** * Disables CORS support. * @@ -219,71 +193,6 @@ public Netty4CorsConfigBuilder allowedRequestHeaders(final String... headers) { return this; } - /** - * Returns HTTP response headers that should be added to a CORS preflight response. - * - * An intermediary like a load balancer might require that a CORS preflight request - * have certain headers set. This enables such headers to be added. - * - * @param name the name of the HTTP header. - * @param values the values for the HTTP header. - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Object... values) { - if (values.length == 1) { - preflightHeaders.put(name, new ConstantValueGenerator(values[0])); - } else { - preflightResponseHeader(name, Arrays.asList(values)); - } - return this; - } - - /** - * Returns HTTP response headers that should be added to a CORS preflight response. - * - * An intermediary like a load balancer might require that a CORS preflight request - * have certain headers set. This enables such headers to be added. - * - * @param name the name of the HTTP header. - * @param value the values for the HTTP header. - * @param the type of values that the Iterable contains. - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Iterable value) { - preflightHeaders.put(name, new ConstantValueGenerator(value)); - return this; - } - - /** - * Returns HTTP response headers that should be added to a CORS preflight response. - * - * An intermediary like a load balancer might require that a CORS preflight request - * have certain headers set. This enables such headers to be added. - * - * Some values must be dynamically created when the HTTP response is created, for - * example the 'Date' response header. This can be accomplished by using a Callable - * which will have its 'call' method invoked when the HTTP response is created. - * - * @param name the name of the HTTP header. - * @param valueGenerator a Callable which will be invoked at HTTP response creation. - * @param the type of the value that the Callable can return. - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Callable valueGenerator) { - preflightHeaders.put(name, valueGenerator); - return this; - } - - /** - * Specifies that no preflight response headers should be added to a preflight response. - * - * @return {@link Netty4CorsConfigBuilder} to support method chaining. - */ - public Netty4CorsConfigBuilder noPreflightResponseHeaders() { - noPreflightHeaders = true; - return this; - } - /** * Specifies that a CORS request should be rejected if it's invalid before being * further processing. @@ -305,7 +214,7 @@ public Netty4CorsConfigBuilder shortCircuit() { * @return {@link Netty4CorsConfig} the configured CorsConfig instance. */ public Netty4CorsConfig build() { - if (preflightHeaders.isEmpty() && !noPreflightHeaders) { + if (preflightHeaders.isEmpty()) { preflightHeaders.put("date", DateValueGenerator.INSTANCE); preflightHeaders.put("content-length", new ConstantValueGenerator("0")); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 76d7864c71692..ad216f8ff2cf7 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -22,8 +22,6 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.Unpooled; -import io.netty.channel.Channel; -import io.netty.channel.ChannelFuture; import io.netty.util.NettyRuntime; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; @@ -34,7 +32,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.concurrent.atomic.AtomicBoolean; @@ -133,27 +130,4 @@ static BytesReference toBytesReference(final ByteBuf buffer, final int size) { return new ByteBufBytesReference(buffer, size); } - public static void closeChannels(final Collection channels) throws IOException { - IOException closingExceptions = null; - final List futures = new ArrayList<>(); - for (final Channel channel : channels) { - try { - if (channel != null && channel.isOpen()) { - futures.add(channel.close()); - } - } catch (Exception e) { - if (closingExceptions == null) { - closingExceptions = new IOException("failed to close channels"); - } - closingExceptions.addSuppressed(e); - } - } - for (final ChannelFuture future : futures) { - future.awaitUninterruptibly(); - } - - if (closingExceptions != null) { - throw closingExceptions; - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index 6896432bcdd55..642270113cf76 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.NestedSortBuilder; import java.io.IOException; @@ -129,10 +130,12 @@ public static class Nested { private final BitSetProducer rootFilter; private final Query innerQuery; + private final NestedSortBuilder nestedSort; - public Nested(BitSetProducer rootFilter, Query innerQuery) { + public Nested(BitSetProducer rootFilter, Query innerQuery, NestedSortBuilder nestedSort) { this.rootFilter = rootFilter; this.innerQuery = innerQuery; + this.nestedSort = nestedSort; } public Query getInnerQuery() { @@ -143,6 +146,8 @@ public BitSetProducer getRootFilter() { return rootFilter; } + public NestedSortBuilder getNestedSort() { return nestedSort; } + /** * Get a {@link BitDocIdSet} that matches the root documents. */ diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 8e0a31859a132..859848df49ed1 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -91,7 +91,9 @@ protected SortedDocValues getSortedDocValues(LeafReaderContext context, String f } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, rootDocs, innerDocs); + final int maxChildren = nested.getNestedSort() != null ? + nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, rootDocs, innerDocs, maxChildren); } if (sortMissingFirst(missingValue) || sortMissingLast(missingValue)) { return selectedValues; @@ -119,7 +121,8 @@ protected BinaryDocValues getBinaryDocValues(LeafReaderContext context, String f } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, context.reader().maxDoc()); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); } return selectedValues; } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 1ae3fb692ec61..0a273d88380eb 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -76,7 +76,8 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); } return selectedValues.getRawDoubleValues(); } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index b271dd54bd7fd..beb27644a1b95 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -68,7 +68,8 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); } return selectedValues.getRawFloatValues(); } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 362dde6099680..f323709e8f5ee 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -67,7 +67,8 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); + final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; + selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren); } return selectedValues; } diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java index eaaa5f74fa4d5..249a110b01dc7 100644 --- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -48,7 +48,6 @@ * Defines what values to pick in the case a document contains multiple values for a particular field. */ public enum MultiValueMode implements Writeable { - /** * Pick the sum of all the values. */ @@ -64,16 +63,21 @@ protected long pick(SortedNumericDocValues values) throws IOException { } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int totalCount = 0; long totalValue = 0; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int index = 0; index < count; ++index) { + if (++count > maxChildren) { + break; + } + + final int docCount = values.docValueCount(); + for (int index = 0; index < docCount; ++index) { totalValue += values.nextValue(); } - totalCount += count; + totalCount += docCount; } } return totalCount > 0 ? totalValue : missingValue; @@ -90,18 +94,23 @@ protected double pick(SortedNumericDoubleValues values) throws IOException { } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int totalCount = 0; double totalValue = 0; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int index = 0; index < count; ++index) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int index = 0; index < docCount; ++index) { totalValue += values.nextValue(); } - totalCount += count; + totalCount += docCount; } } + return totalCount > 0 ? totalValue : missingValue; } }, @@ -117,26 +126,30 @@ protected long pick(SortedNumericDocValues values) throws IOException { for (int index = 0; index < count; ++index) { total += values.nextValue(); } - return count > 1 ? Math.round((double)total/(double)count) : total; + return count > 1 ? Math.round((double) total / (double) count) : total; } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int totalCount = 0; long totalValue = 0; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int index = 0; index < count; ++index) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int index = 0; index < docCount; ++index) { totalValue += values.nextValue(); } - totalCount += count; + totalCount += docCount; } } if (totalCount < 1) { return missingValue; } - return totalCount > 1 ? Math.round((double)totalValue/(double)totalCount) : totalValue; + return totalCount > 1 ? Math.round((double) totalValue / (double) totalCount) : totalValue; } @Override @@ -146,26 +159,30 @@ protected double pick(SortedNumericDoubleValues values) throws IOException { for (int index = 0; index < count; ++index) { total += values.nextValue(); } - return total/count; + return total / count; } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int totalCount = 0; double totalValue = 0; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int index = 0; index < count; ++index) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int index = 0; index < docCount; ++index) { totalValue += values.nextValue(); } - totalCount += count; + totalCount += docCount; } } if (totalCount < 1) { return missingValue; } - return totalValue/totalCount; + return totalValue / totalCount; } }, @@ -210,11 +227,15 @@ protected long pick(SortedNumericDocValues values) throws IOException { } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { boolean hasValue = false; long minValue = Long.MAX_VALUE; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } minValue = Math.min(minValue, values.nextValue()); hasValue = true; } @@ -228,11 +249,15 @@ protected double pick(SortedNumericDoubleValues values) throws IOException { } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { boolean hasValue = false; double minValue = Double.POSITIVE_INFINITY; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } minValue = Math.min(minValue, values.nextValue()); hasValue = true; } @@ -246,23 +271,27 @@ protected BytesRef pick(SortedBinaryDocValues values) throws IOException { } @Override - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { - BytesRefBuilder value = null; + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { + BytesRefBuilder bytesRefBuilder = null; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } final BytesRef innerValue = values.binaryValue(); - if (value == null) { + if (bytesRefBuilder == null) { builder.copyBytes(innerValue); - value = builder; + bytesRefBuilder = builder; } else { - final BytesRef min = value.get().compareTo(innerValue) <= 0 ? value.get() : innerValue; + final BytesRef min = bytesRefBuilder.get().compareTo(innerValue) <= 0 ? bytesRefBuilder.get() : innerValue; if (min == innerValue) { - value.copyBytes(min); + bytesRefBuilder.copyBytes(min); } } } } - return value == null ? null : value.get(); + return bytesRefBuilder == null ? null : bytesRefBuilder.get(); } @Override @@ -271,16 +300,21 @@ protected int pick(SortedSetDocValues values) throws IOException { } @Override - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int ord = Integer.MAX_VALUE; boolean hasValue = false; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } final int innerOrd = values.ordValue(); ord = Math.min(ord, innerOrd); hasValue = true; } } + return hasValue ? ord : -1; } }, @@ -299,13 +333,17 @@ protected long pick(SortedNumericDocValues values) throws IOException { } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { boolean hasValue = false; long maxValue = Long.MIN_VALUE; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int i = 0; i < count - 1; ++i) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int i = 0; i < docCount - 1; ++i) { values.nextValue(); } maxValue = Math.max(maxValue, values.nextValue()); @@ -325,13 +363,17 @@ protected double pick(SortedNumericDoubleValues values) throws IOException { } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { boolean hasValue = false; double maxValue = Double.NEGATIVE_INFINITY; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { - final int count = values.docValueCount(); - for (int i = 0; i < count - 1; ++i) { + if (++count > maxChildren) { + break; + } + final int docCount = values.docValueCount(); + for (int i = 0; i < docCount - 1; ++i) { values.nextValue(); } maxValue = Math.max(maxValue, values.nextValue()); @@ -351,23 +393,27 @@ protected BytesRef pick(SortedBinaryDocValues values) throws IOException { } @Override - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { - BytesRefBuilder value = null; + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { + BytesRefBuilder bytesRefBuilder = null; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } final BytesRef innerValue = values.binaryValue(); - if (value == null) { + if (bytesRefBuilder == null) { builder.copyBytes(innerValue); - value = builder; + bytesRefBuilder = builder; } else { - final BytesRef max = value.get().compareTo(innerValue) > 0 ? value.get() : innerValue; + final BytesRef max = bytesRefBuilder.get().compareTo(innerValue) > 0 ? bytesRefBuilder.get() : innerValue; if (max == innerValue) { - value.copyBytes(max); + bytesRefBuilder.copyBytes(max); } } } } - return value == null ? null : value.get(); + return bytesRefBuilder == null ? null : bytesRefBuilder.get(); } @Override @@ -380,10 +426,14 @@ protected int pick(SortedSetDocValues values) throws IOException { } @Override - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { int ord = -1; + int count = 0; for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { if (values.advanceExact(doc)) { + if (++count > maxChildren) { + break; + } ord = Math.max(ord, values.ordValue()); } } @@ -458,7 +508,7 @@ protected long pick(SortedNumericDocValues values) throws IOException { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public NumericDocValues select(final SortedNumericDocValues values, final long missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc) throws IOException { + public NumericDocValues select(final SortedNumericDocValues values, final long missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException { if (parentDocs == null || childDocs == null) { return FieldData.replaceMissing(DocValues.emptyNumeric(), missingValue); } @@ -486,7 +536,7 @@ public boolean advanceExact(int parentDoc) throws IOException { } lastSeenParentDoc = parentDoc; - lastEmittedValue = pick(values, missingValue, childDocs, firstChildDoc, parentDoc); + lastEmittedValue = pick(values, missingValue, childDocs, firstChildDoc, parentDoc, maxChildren); return true; } @@ -502,7 +552,7 @@ public long longValue() { }; } - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -555,7 +605,7 @@ protected double pick(SortedNumericDoubleValues values) throws IOException { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc) throws IOException { + public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException { if (parentDocs == null || childDocs == null) { return FieldData.replaceMissing(FieldData.emptyNumericDouble(), missingValue); } @@ -580,7 +630,7 @@ public boolean advanceExact(int parentDoc) throws IOException { } lastSeenParentDoc = parentDoc; - lastEmittedValue = pick(values, missingValue, childDocs, firstChildDoc, parentDoc); + lastEmittedValue = pick(values, missingValue, childDocs, firstChildDoc, parentDoc, maxChildren); return true; } @@ -591,7 +641,7 @@ public double doubleValue() throws IOException { }; } - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -663,7 +713,7 @@ protected BytesRef pick(SortedBinaryDocValues values) throws IOException { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc) throws IOException { + public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException { if (parentDocs == null || childDocs == null) { return select(FieldData.emptySortedBinary(), missingValue); } @@ -692,7 +742,7 @@ public boolean advanceExact(int parentDoc) throws IOException { } lastSeenParentDoc = parentDoc; - lastEmittedValue = pick(selectedValues, builder, childDocs, firstChildDoc, parentDoc); + lastEmittedValue = pick(selectedValues, builder, childDocs, firstChildDoc, parentDoc, maxChildren); if (lastEmittedValue == null) { lastEmittedValue = missingValue; } @@ -706,7 +756,7 @@ public BytesRef binaryValue() throws IOException { }; } - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -718,7 +768,7 @@ protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSe */ public SortedDocValues select(final SortedSetDocValues values) { if (values.getValueCount() >= Integer.MAX_VALUE) { - throw new UnsupportedOperationException("fields containing more than " + (Integer.MAX_VALUE-1) + " unique terms are unsupported"); + throw new UnsupportedOperationException("fields containing more than " + (Integer.MAX_VALUE - 1) + " unique terms are unsupported"); } final SortedDocValues singleton = DocValues.unwrapSingleton(values); @@ -779,7 +829,7 @@ protected int pick(SortedSetDocValues values) throws IOException { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public SortedDocValues select(final SortedSetDocValues values, final BitSet parentDocs, final DocIdSetIterator childDocs) throws IOException { + public SortedDocValues select(final SortedSetDocValues values, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxChildren) throws IOException { if (parentDocs == null || childDocs == null) { return select(DocValues.emptySortedSet()); } @@ -817,7 +867,7 @@ public boolean advanceExact(int parentDoc) throws IOException { } docID = lastSeenParentDoc = parentDoc; - lastEmittedOrd = pick(selectedValues, childDocs, firstChildDoc, parentDoc); + lastEmittedOrd = pick(selectedValues, childDocs, firstChildDoc, parentDoc, maxChildren); return lastEmittedOrd != -1; } @@ -833,7 +883,7 @@ public int ordValue() { }; } - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 6a64b1c0cc940..19a62d7444376 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -332,6 +332,14 @@ public SortFieldAndFormat build(QueryShardContext context) throws IOException { final Nested nested; if (nestedSort != null) { + if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on v6.5.0 or higher"); + } + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); + } // new nested sorts takes priority nested = resolveNested(context, nestedSort); } else { diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 6adad6dabf0b5..2c8c4e234dbb8 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -54,6 +54,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; @@ -633,6 +634,14 @@ public SortFieldAndFormat build(QueryShardContext context) throws IOException { final Nested nested; if (nestedSort != null) { + if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on v6.5.0 or higher"); + } + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); + } // new nested sorts takes priority nested = resolveNested(context, nestedSort); } else { @@ -672,8 +681,10 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); + final int maxChildren = nested.getNestedSort() != null ? + nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE; selectedValues = finalSortMode.select(distanceValues, Double.POSITIVE_INFINITY, rootDocs, innerDocs, - context.reader().maxDoc()); + context.reader().maxDoc(), maxChildren); } return selectedValues.getRawDoubleValues(); } diff --git a/server/src/main/java/org/elasticsearch/search/sort/NestedSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/NestedSortBuilder.java index a6ad028403453..03cb64da5a9d4 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/NestedSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/NestedSortBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -38,9 +39,11 @@ public class NestedSortBuilder implements Writeable, ToXContentObject { public static final ParseField NESTED_FIELD = new ParseField("nested"); public static final ParseField PATH_FIELD = new ParseField("path"); public static final ParseField FILTER_FIELD = new ParseField("filter"); + public static final ParseField MAX_CHILDREN_FIELD = new ParseField("max_children"); private final String path; private QueryBuilder filter; + private int maxChildren = Integer.MAX_VALUE; private NestedSortBuilder nestedSort; public NestedSortBuilder(String path) { @@ -51,6 +54,11 @@ public NestedSortBuilder(StreamInput in) throws IOException { path = in.readOptionalString(); filter = in.readOptionalNamedWriteable(QueryBuilder.class); nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + maxChildren = in.readVInt(); + } else { + maxChildren = Integer.MAX_VALUE; + } } public String getPath() { @@ -61,11 +69,18 @@ public QueryBuilder getFilter() { return filter; } + public int getMaxChildren() { return maxChildren; } + public NestedSortBuilder setFilter(final QueryBuilder filter) { this.filter = filter; return this; } + public NestedSortBuilder setMaxChildren(final int maxChildren) { + this.maxChildren = maxChildren; + return this; + } + public NestedSortBuilder getNestedSort() { return nestedSort; } @@ -83,6 +98,11 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeOptionalString(path); out.writeOptionalNamedWriteable(filter); out.writeOptionalWriteable(nestedSort); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeVInt(maxChildren); + } else { + out.writeVInt(Integer.MAX_VALUE); + } } @Override @@ -94,6 +114,11 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa if (filter != null) { builder.field(FILTER_FIELD.getPreferredName(), filter); } + + if (maxChildren != Integer.MAX_VALUE) { + builder.field(MAX_CHILDREN_FIELD.getPreferredName(), maxChildren); + } + if (nestedSort != null) { builder.field(NESTED_FIELD.getPreferredName(), nestedSort); } @@ -104,6 +129,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa public static NestedSortBuilder fromXContent(XContentParser parser) throws IOException { String path = null; QueryBuilder filter = null; + int maxChildren = Integer.MAX_VALUE; NestedSortBuilder nestedSort = null; XContentParser.Token token = parser.currentToken(); @@ -116,6 +142,8 @@ public static NestedSortBuilder fromXContent(XContentParser parser) throws IOExc path = parser.text(); } else if (currentName.equals(FILTER_FIELD.getPreferredName())) { filter = parseNestedFilter(parser); + } else if (currentName.equals(MAX_CHILDREN_FIELD.getPreferredName())) { + maxChildren = parser.intValue(); } else if (currentName.equals(NESTED_FIELD.getPreferredName())) { nestedSort = NestedSortBuilder.fromXContent(parser); } else { @@ -129,7 +157,7 @@ public static NestedSortBuilder fromXContent(XContentParser parser) throws IOExc throw new IllegalArgumentException("malformed nested sort format, must start with an object"); } - return new NestedSortBuilder(path).setFilter(filter).setNestedSort(nestedSort); + return new NestedSortBuilder(path).setFilter(filter).setMaxChildren(maxChildren).setNestedSort(nestedSort); } @Override @@ -143,12 +171,13 @@ public boolean equals(final Object obj) { NestedSortBuilder that = (NestedSortBuilder) obj; return Objects.equals(path, that.path) && Objects.equals(filter, that.filter) + && Objects.equals(maxChildren, that.maxChildren) && Objects.equals(nestedSort, that.nestedSort); } @Override public int hashCode() { - return Objects.hash(path, filter, nestedSort); + return Objects.hash(path, filter, nestedSort, maxChildren); } public NestedSortBuilder rewrite(QueryRewriteContext ctx) throws IOException { @@ -164,7 +193,7 @@ public NestedSortBuilder rewrite(QueryRewriteContext ctx) throws IOException { rewriteNested = nestedSort.rewrite(ctx); } if (rewriteFilter != this.filter || rewriteNested != this.nestedSort) { - return new NestedSortBuilder(this.path).setFilter(rewriteFilter).setNestedSort(rewriteNested); + return new NestedSortBuilder(this.path).setFilter(rewriteFilter).setMaxChildren(this.maxChildren).setNestedSort(rewriteNested); } else { return this; } diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 1b71c51d4162b..e425755a55edd 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -319,6 +319,14 @@ public SortFieldAndFormat build(QueryShardContext context) throws IOException { final Nested nested; if (nestedSort != null) { + if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on v6.5.0 or higher"); + } + if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on last level of nested sort"); + } // new nested sorts takes priority nested = resolveNested(context, nestedSort); } else { diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 9d2a5c9f1e256..a7861dee9bba0 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -195,7 +195,7 @@ protected static Nested resolveNested(QueryShardContext context, NestedSortBuild } else { parentQuery = objectMapper.nestedTypeFilter(); } - return new Nested(context.bitsetFilter(parentQuery), childQuery); + return new Nested(context.bitsetFilter(parentQuery), childQuery, nestedSort); } private static Query resolveNestedQuery(QueryShardContext context, NestedSortBuilder nestedSort, Query parentQuery) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskId.java b/server/src/main/java/org/elasticsearch/tasks/TaskId.java index 1aeceef247f47..f92997b047c13 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskId.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskId.java @@ -19,10 +19,13 @@ package org.elasticsearch.tasks; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ContextParser; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -96,6 +99,15 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(id); } + public static ContextParser parser() { + return (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return new TaskId(p.text()); + } + throw new ElasticsearchParseException("Expected a string but found [{}] instead", p.currentToken()); + }; + } + public String getNodeId() { return nodeId; } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResult.java b/server/src/main/java/org/elasticsearch/tasks/TaskResult.java index a866ad9bb2dd1..46b68ce16028c 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResult.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResult.java @@ -76,7 +76,7 @@ public TaskResult(TaskInfo task, Exception error) throws IOException { * Construct a {@linkplain TaskResult} for a task that completed successfully. */ public TaskResult(TaskInfo task, ToXContent response) throws IOException { - this(true, task, null, toXContent(response)); + this(true, task, null, XContentHelper.toXContent(response, Requests.INDEX_CONTENT_TYPE, true)); } private TaskResult(boolean completed, TaskInfo task, @Nullable BytesReference error, @Nullable BytesReference result) { @@ -222,16 +222,6 @@ public int hashCode() { return Objects.hash(completed, task, getErrorAsMap(), getResponseAsMap()); } - private static BytesReference toXContent(ToXContent result) throws IOException { - try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { - // Elasticsearch's Response object never emit starting or ending objects. Most other implementers of ToXContent do.... - builder.startObject(); - result.toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); - return BytesReference.bytes(builder); - } - } - private static BytesReference toXContent(Exception error) throws IOException { try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { builder.startObject(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index ee8f18aa11e6b..7decbe9024fdf 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -161,7 +161,7 @@ public void tearDown() throws Exception { protected Nested createNested(IndexSearcher searcher, Query parentFilter, Query childFilter) throws IOException { BitsetFilterCache s = indexService.cache().bitsetFilterCache(); - return new Nested(s.getBitSetProducer(parentFilter), childFilter); + return new Nested(s.getBitSetProducer(parentFilter), childFilter, null); } public void testEmpty() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index b64f6ee0ee3d1..5cfee0a021358 100644 --- a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -109,7 +109,8 @@ public long longValue() { verifySortedNumeric(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } public void testMultiValuedLongs() throws Exception { @@ -147,7 +148,8 @@ public int docValueCount() { verifySortedNumeric(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } private void verifySortedNumeric(Supplier supplier, int maxDoc) throws IOException { @@ -210,11 +212,11 @@ private void verifyLongValueCanCalledMoreThanOnce(NumericDocValues values, long } } - private void verifySortedNumeric(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedNumeric(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs, int maxChildren) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { SortedNumericDocValues values = supplier.get(); - final NumericDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); + final NumericDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc, maxChildren); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); @@ -228,8 +230,12 @@ private void verifySortedNumeric(Supplier supplier, int expected = Long.MAX_VALUE; } int numValues = 0; + int count = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { + if (++count > maxChildren) { + break; + } for (int j = 0; j < values.docValueCount(); ++j) { if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { expected += values.nextValue(); @@ -285,7 +291,8 @@ public double doubleValue() { verifySortedNumericDouble(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } public void testMultiValuedDoubles() throws Exception { @@ -323,7 +330,8 @@ public int docValueCount() { verifySortedNumericDouble(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } private void verifySortedNumericDouble(Supplier supplier, int maxDoc) throws IOException { @@ -385,11 +393,11 @@ private void verifyDoubleValueCanCalledMoreThanOnce(NumericDoubleValues values, } } - private void verifySortedNumericDouble(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedNumericDouble(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs, int maxChildren) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { SortedNumericDoubleValues values = supplier.get(); - final NumericDoubleValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); + final NumericDoubleValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc, maxChildren); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); @@ -403,8 +411,12 @@ private void verifySortedNumericDouble(Supplier suppl expected = Long.MAX_VALUE; } int numValues = 0; + int count = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { + if (++count > maxChildren) { + break; + } for (int j = 0; j < values.docValueCount(); ++j) { if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { expected += values.nextValue(); @@ -463,7 +475,8 @@ public BytesRef binaryValue() { verifySortedBinary(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } public void testMultiValuedStrings() throws Exception { @@ -501,7 +514,8 @@ public int docValueCount() { verifySortedBinary(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } private void verifySortedBinary(Supplier supplier, int maxDoc) throws IOException { @@ -548,11 +562,11 @@ private void verifyBinaryValueCanCalledMoreThanOnce(BinaryDocValues values, Byte } } - private void verifySortedBinary(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedBinary(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs, int maxChildren) throws IOException { for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(randomAlphaOfLengthBetween(8, 8)) }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { SortedBinaryDocValues values = supplier.get(); - final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); + final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc, maxChildren); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { assertTrue(selected.advanceExact(root)); @@ -560,8 +574,12 @@ private void verifySortedBinary(Supplier supplier, int ma verifyBinaryValueCanCalledMoreThanOnce(selected, actual); BytesRef expected = null; + int count = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { + if (++count > maxChildren) { + break; + } for (int j = 0; j < values.docValueCount(); ++j) { if (expected == null) { expected = BytesRef.deepCopyOf(values.nextValue()); @@ -630,7 +648,8 @@ public int getValueCount() { verifySortedSet(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedSet(multiValues, numDocs, rootDocs, innerDocs); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } public void testMultiValuedOrds() throws Exception { @@ -676,7 +695,8 @@ public long getValueCount() { verifySortedSet(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verifySortedSet(multiValues, numDocs, rootDocs, innerDocs); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs, Integer.MAX_VALUE); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs, randomIntBetween(1, numDocs)); } private void verifySortedSet(Supplier supplier, int maxDoc) throws IOException { @@ -715,10 +735,10 @@ private void verifyOrdValueCanCalledMoreThanOnce(SortedDocValues values, long ex } } - private void verifySortedSet(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedSet(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs, int maxChildren) throws IOException { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { SortedSetDocValues values = supplier.get(); - final SortedDocValues selected = mode.select(values, rootDocs, new BitSetIterator(innerDocs, 0L)); + final SortedDocValues selected = mode.select(values, rootDocs, new BitSetIterator(innerDocs, 0L), maxChildren); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { int actual = -1; @@ -727,8 +747,12 @@ private void verifySortedSet(Supplier supplier, int maxDoc, verifyOrdValueCanCalledMoreThanOnce(selected, actual); } int expected = -1; + int count = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { if (values.advanceExact(child)) { + if (++count > maxChildren) { + break; + } for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { if (expected == -1) { expected = (int) ord; diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index 40d6b26b4f930..352e27e4f4f44 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -1423,9 +1423,16 @@ public void testNestedSort() throws IOException, InterruptedException, Execution ensureGreen(); client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject() - .startObject("nested") - .field("foo", "bar bar") - .endObject() + .startArray("nested") + .startObject().field("foo", "bar bar").endObject() + .startObject().field("foo", "abc abc").endObject() + .endArray() + .endObject()).execute().actionGet(); + client().prepareIndex("test", "type", "2").setSource(jsonBuilder().startObject() + .startArray("nested") + .startObject().field("foo", "abc abc").endObject() + .startObject().field("foo", "cba bca").endObject() + .endArray() .endObject()).execute().actionGet(); refresh(); @@ -1436,11 +1443,27 @@ public void testNestedSort() throws IOException, InterruptedException, Execution .execute().actionGet(); assertNoFailures(searchResponse); SearchHit[] hits = searchResponse.getHits().getHits(); - for (int i = 0; i < hits.length; ++i) { - assertThat(hits[i].getSortValues().length, is(1)); - assertThat(hits[i].getSortValues()[0], is("bar")); - } + assertThat(hits.length, is(2)); + assertThat(hits[0].getSortValues().length, is(1)); + assertThat(hits[1].getSortValues().length, is(1)); + assertThat(hits[0].getSortValues()[0], is("cba")); + assertThat(hits[1].getSortValues()[0], is("bar")); + // We sort on nested fields with max_children limit + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addSort(SortBuilders + .fieldSort("nested.foo") + .setNestedSort(new NestedSortBuilder("nested").setMaxChildren(1)) + .order(SortOrder.DESC)) + .execute().actionGet(); + assertNoFailures(searchResponse); + hits = searchResponse.getHits().getHits(); + assertThat(hits.length, is(2)); + assertThat(hits[0].getSortValues().length, is(1)); + assertThat(hits[1].getSortValues().length, is(1)); + assertThat(hits[0].getSortValues()[0], is("bar")); + assertThat(hits[1].getSortValues()[0], is("abc")); // We sort on nested sub field searchResponse = client().prepareSearch() @@ -1449,10 +1472,11 @@ public void testNestedSort() throws IOException, InterruptedException, Execution .execute().actionGet(); assertNoFailures(searchResponse); hits = searchResponse.getHits().getHits(); - for (int i = 0; i < hits.length; ++i) { - assertThat(hits[i].getSortValues().length, is(1)); - assertThat(hits[i].getSortValues()[0], is("bar bar")); - } + assertThat(hits.length, is(2)); + assertThat(hits[0].getSortValues().length, is(1)); + assertThat(hits[1].getSortValues().length, is(1)); + assertThat(hits[0].getSortValues()[0], is("cba bca")); + assertThat(hits[1].getSortValues()[0], is("bar bar")); } public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index 193695ac69362..8d3c6a3565f93 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -91,9 +91,9 @@ public Set expandJobIds(String expression, boolean allowNoJobs) { return groupOrJobLookup.expandJobIds(expression, allowNoJobs); } - public boolean isJobDeleted(String jobId) { + public boolean isJobDeleting(String jobId) { Job job = jobs.get(jobId); - return job == null || job.isDeleted(); + return job == null || job.isDeleting(); } public SortedMap getDatafeeds() { @@ -287,7 +287,7 @@ public Builder deleteJob(String jobId, PersistentTasksCustomMetaData tasks) { if (job == null) { throw new ResourceNotFoundException("job [" + jobId + "] does not exist"); } - if (job.isDeleted() == false) { + if (job.isDeleting() == false) { throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] because it hasn't marked as deleted"); } return this; @@ -318,7 +318,7 @@ public Builder putDatafeed(DatafeedConfig datafeedConfig, Map he private void checkJobIsAvailableForDatafeed(String jobId) { Job job = jobs.get(jobId); - if (job == null || job.isDeleted()) { + if (job == null || job.isDeleting()) { throw ExceptionsHelper.missingJobException(jobId); } Optional existingDatafeed = getDatafeedByJobId(jobId); @@ -387,14 +387,14 @@ public MlMetadata build() { return new MlMetadata(jobs, datafeeds); } - public void markJobAsDeleted(String jobId, PersistentTasksCustomMetaData tasks, boolean allowDeleteOpenJob) { + public void markJobAsDeleting(String jobId, PersistentTasksCustomMetaData tasks, boolean allowDeleteOpenJob) { Job job = jobs.get(jobId); if (job == null) { throw ExceptionsHelper.missingJobException(jobId); } - if (job.isDeleted()) { + if (job.isDeleting()) { // Job still exists but is already being deleted - throw new JobAlreadyMarkedAsDeletedException(); + return; } checkJobHasNoDatafeed(jobId); @@ -408,7 +408,7 @@ public void markJobAsDeleted(String jobId, PersistentTasksCustomMetaData tasks, } } Job.Builder jobBuilder = new Job.Builder(job); - jobBuilder.setDeleted(true); + jobBuilder.setDeleting(true); putJob(jobBuilder.build(), true); } @@ -430,7 +430,4 @@ public static MlMetadata getMlMetadata(ClusterState state) { } return mlMetadata; } - - public static class JobAlreadyMarkedAsDeletedException extends RuntimeException { - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index 9fbde4721cd6a..6b279e0852183 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -42,6 +42,11 @@ public static class Request extends AcknowledgedRequest { private String jobId; private boolean force; + /** + * Should this task store its result? + */ + private boolean shouldStoreResult; + public Request(String jobId) { this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); } @@ -64,6 +69,18 @@ public void setForce(boolean force) { this.force = force; } + /** + * Should this task store its result after it has finished? + */ + public void setShouldStoreResult(boolean shouldStoreResult) { + this.shouldStoreResult = shouldStoreResult; + } + + @Override + public boolean getShouldStoreResult() { + return shouldStoreResult; + } + @Override public ActionRequestValidationException validate() { return null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index a5293cdcbc75d..5a352ab26657c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -75,7 +75,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); public static final ParseField MODEL_SNAPSHOT_MIN_VERSION = new ParseField("model_snapshot_min_version"); public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); - public static final ParseField DELETED = new ParseField("deleted"); + public static final ParseField DELETING = new ParseField("deleting"); // Used for QueryPage public static final ParseField RESULTS_FIELD = new ParseField("jobs"); @@ -119,7 +119,7 @@ private static ObjectParser createParser(boolean ignoreUnknownFie parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION); parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - parser.declareBoolean(Builder::setDeleted, DELETED); + parser.declareBoolean(Builder::setDeleting, DELETING); return parser; } @@ -152,14 +152,14 @@ private static ObjectParser createParser(boolean ignoreUnknownFie private final String modelSnapshotId; private final Version modelSnapshotMinVersion; private final String resultsIndexName; - private final boolean deleted; + private final boolean deleting; private Job(String jobId, String jobType, Version jobVersion, List groups, String description, Date createTime, Date finishedTime, Long establishedModelMemory, AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription, ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval, Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map customSettings, - String modelSnapshotId, Version modelSnapshotMinVersion, String resultsIndexName, boolean deleted) { + String modelSnapshotId, Version modelSnapshotMinVersion, String resultsIndexName, boolean deleting) { this.jobId = jobId; this.jobType = jobType; @@ -181,7 +181,7 @@ private Job(String jobId, String jobType, Version jobVersion, List group this.modelSnapshotId = modelSnapshotId; this.modelSnapshotMinVersion = modelSnapshotMinVersion; this.resultsIndexName = resultsIndexName; - this.deleted = deleted; + this.deleting = deleting; } public Job(StreamInput in) throws IOException { @@ -224,7 +224,7 @@ public Job(StreamInput in) throws IOException { modelSnapshotMinVersion = null; } resultsIndexName = in.readString(); - deleted = in.readBoolean(); + deleting = in.readBoolean(); } /** @@ -375,8 +375,8 @@ public Version getModelSnapshotMinVersion() { return modelSnapshotMinVersion; } - public boolean isDeleted() { - return deleted; + public boolean isDeleting() { + return deleting; } /** @@ -489,7 +489,7 @@ public void writeTo(StreamOutput out) throws IOException { } } out.writeString(resultsIndexName); - out.writeBoolean(deleted); + out.writeBoolean(deleting); } @Override @@ -554,8 +554,8 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.field(MODEL_SNAPSHOT_MIN_VERSION.getPreferredName(), modelSnapshotMinVersion); } builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); - if (params.paramAsBoolean("all", false)) { - builder.field(DELETED.getPreferredName(), deleted); + if (deleting) { + builder.field(DELETING.getPreferredName(), deleting); } return builder; } @@ -591,7 +591,7 @@ public boolean equals(Object other) { && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) && Objects.equals(this.modelSnapshotMinVersion, that.modelSnapshotMinVersion) && Objects.equals(this.resultsIndexName, that.resultsIndexName) - && Objects.equals(this.deleted, that.deleted); + && Objects.equals(this.deleting, that.deleting); } @Override @@ -599,7 +599,7 @@ public int hashCode() { return Objects.hash(jobId, jobType, jobVersion, groups, description, createTime, finishedTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleted); + modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleting); } // Class already extends from AbstractDiffable, so copied from ToXContentToBytes#toString() @@ -647,7 +647,7 @@ public static class Builder implements Writeable, ToXContentObject { private String modelSnapshotId; private Version modelSnapshotMinVersion; private String resultsIndexName; - private boolean deleted; + private boolean deleting; public Builder() { } @@ -677,7 +677,7 @@ public Builder(Job job) { this.modelSnapshotId = job.getModelSnapshotId(); this.modelSnapshotMinVersion = job.getModelSnapshotMinVersion(); this.resultsIndexName = job.getResultsIndexNameNoPrefix(); - this.deleted = job.isDeleted(); + this.deleting = job.isDeleting(); } public Builder(StreamInput in) throws IOException { @@ -717,7 +717,7 @@ public Builder(StreamInput in) throws IOException { modelSnapshotMinVersion = null; } resultsIndexName = in.readOptionalString(); - deleted = in.readBoolean(); + deleting = in.readBoolean(); } public Builder setId(String id) { @@ -834,8 +834,8 @@ public Builder setResultsIndexName(String resultsIndexName) { return this; } - public Builder setDeleted(boolean deleted) { - this.deleted = deleted; + public Builder setDeleting(boolean deleting) { + this.deleting = deleting; return this; } @@ -911,7 +911,7 @@ public void writeTo(StreamOutput out) throws IOException { } } out.writeOptionalString(resultsIndexName); - out.writeBoolean(deleted); + out.writeBoolean(deleting); } @Override @@ -972,8 +972,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (resultsIndexName != null) { builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); } - if (params.paramAsBoolean("all", false)) { - builder.field(DELETED.getPreferredName(), deleted); + if (deleting) { + builder.field(DELETING.getPreferredName(), deleting); } builder.endObject(); @@ -1006,7 +1006,7 @@ public boolean equals(Object o) { && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) && Objects.equals(this.modelSnapshotMinVersion, that.modelSnapshotMinVersion) && Objects.equals(this.resultsIndexName, that.resultsIndexName) - && Objects.equals(this.deleted, that.deleted); + && Objects.equals(this.deleting, that.deleting); } @Override @@ -1014,7 +1014,7 @@ public int hashCode() { return Objects.hash(id, jobType, jobVersion, groups, description, analysisConfig, analysisLimits, dataDescription, createTime, finishedTime, establishedModelMemory, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, modelSnapshotId, - modelSnapshotMinVersion, resultsIndexName, deleted); + modelSnapshotMinVersion, resultsIndexName, deleting); } /** @@ -1127,7 +1127,7 @@ public Job build() { id, jobType, jobVersion, groups, description, createTime, finishedTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleted); + modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleting); } private void checkValidBackgroundPersistInterval() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 3c571c9d60509..b669e8f1edcfb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -67,6 +67,8 @@ public final class Messages { public static final String JOB_AUDIT_DATAFEED_STARTED_FROM_TO = "Datafeed started (from: {0} to: {1}) with frequency [{2}]"; public static final String JOB_AUDIT_DATAFEED_STARTED_REALTIME = "Datafeed started in real-time"; public static final String JOB_AUDIT_DATAFEED_STOPPED = "Datafeed stopped"; + public static final String JOB_AUDIT_DELETING = "Deleting job by task with id ''{0}''"; + public static final String JOB_AUDIT_DELETING_FAILED = "Error deleting job: {0}"; public static final String JOB_AUDIT_DELETED = "Job deleted"; public static final String JOB_AUDIT_KILLING = "Killing job"; public static final String JOB_AUDIT_OLD_RESULTS_DELETED = "Deleted results prior to {1}"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java index 2f218cfb2dc4d..f3cd2abf461b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobDeletionTask.java @@ -12,7 +12,17 @@ public class JobDeletionTask extends Task { + private volatile boolean started; + public JobDeletionTask(long id, String type, String action, String description, TaskId parentTask, Map headers) { super(id, type, action, description, parentTask, headers); } + + public void start() { + started = true; + } + + public boolean isStarted() { + return started; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/NativeRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/NativeRealmSettings.java index e41b14099813a..eebcb6db7af87 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/NativeRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/NativeRealmSettings.java @@ -19,6 +19,6 @@ private NativeRealmSettings() {} * @return The {@link Setting setting configuration} for this realm type */ public static Set> getSettings() { - return CachingUsernamePasswordRealmSettings.getCachingSettings(); + return CachingUsernamePasswordRealmSettings.getSettings(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/file/FileRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/file/FileRealmSettings.java index 110b8af9d7bcb..ed81d07d4ccc9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/file/FileRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/file/FileRealmSettings.java @@ -19,6 +19,6 @@ private FileRealmSettings() {} * @return The {@link Setting setting configuration} for this realm type */ public static Set> getSettings() { - return CachingUsernamePasswordRealmSettings.getCachingSettings(); + return CachingUsernamePasswordRealmSettings.getSettings(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/LdapRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/LdapRealmSettings.java index 3f79c722be3f0..272b4115b285e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/LdapRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/LdapRealmSettings.java @@ -29,7 +29,7 @@ private LdapRealmSettings() {} */ public static Set> getSettings(String type) { Set> settings = new HashSet<>(); - settings.addAll(CachingUsernamePasswordRealmSettings.getCachingSettings()); + settings.addAll(CachingUsernamePasswordRealmSettings.getSettings()); settings.addAll(CompositeRoleMapperSettings.getSettings()); settings.add(LdapRealmSettings.EXECUTION_TIMEOUT); if (AD_TYPE.equals(type)) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CachingUsernamePasswordRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CachingUsernamePasswordRealmSettings.java index 6d060b0febbd4..6b7867e421180 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CachingUsernamePasswordRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/CachingUsernamePasswordRealmSettings.java @@ -21,12 +21,15 @@ public final class CachingUsernamePasswordRealmSettings { public static final Setting CACHE_MAX_USERS_SETTING = Setting.intSetting("cache.max_users", DEFAULT_MAX_USERS, Setting.Property.NodeScope); + public static final Setting AUTHC_ENABLED_SETTING = Setting.boolSetting("authentication.enabled", true, + Setting.Property.NodeScope); + private CachingUsernamePasswordRealmSettings() {} /** * Returns the {@link Setting setting configuration} that is common for all caching realms */ - public static Set> getCachingSettings() { - return new HashSet<>(Arrays.asList(CACHE_HASH_ALGO_SETTING, CACHE_TTL_SETTING, CACHE_MAX_USERS_SETTING)); + public static Set> getSettings() { + return new HashSet<>(Arrays.asList(CACHE_HASH_ALGO_SETTING, CACHE_TTL_SETTING, CACHE_MAX_USERS_SETTING, AUTHC_ENABLED_SETTING)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index b11867f836507..87a0099580b5f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -9,13 +9,18 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.set.Sets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.concurrent.ExecutionException; import java.util.function.Predicate; import static org.apache.lucene.util.automaton.MinimizationOperations.minimize; @@ -27,14 +32,23 @@ public final class Automatons { - public static final Setting MAX_DETERMINIZED_STATES_SETTING = + static final Setting MAX_DETERMINIZED_STATES_SETTING = Setting.intSetting("xpack.security.automata.max_determinized_states", 100000, DEFAULT_MAX_DETERMINIZED_STATES, Setting.Property.NodeScope); + + static final Setting CACHE_ENABLED = + Setting.boolSetting("xpack.security.automata.cache.enabled", true, Setting.Property.NodeScope); + static final Setting CACHE_SIZE = + Setting.intSetting("xpack.security.automata.cache.size", 10_000, Setting.Property.NodeScope); + static final Setting CACHE_TTL = + Setting.timeSetting("xpack.security.automata.cache.ttl", TimeValue.timeValueHours(48), Setting.Property.NodeScope); + public static final Automaton EMPTY = Automata.makeEmpty(); public static final Automaton MATCH_ALL = Automata.makeAnyString(); - // this value is not final since we allow it to be set at runtime + // these values are not final since we allow them to be set at runtime private static int maxDeterminizedStates = 100000; + private static Cache cache = buildCache(Settings.EMPTY); static final char WILDCARD_STRING = '*'; // String equality with support for wildcards static final char WILDCARD_CHAR = '?'; // Char equality with support for wildcards @@ -57,6 +71,18 @@ public static Automaton patterns(Collection patterns) { if (patterns.isEmpty()) { return EMPTY; } + if (cache == null) { + return buildAutomaton(patterns); + } else { + try { + return cache.computeIfAbsent(Sets.newHashSet(patterns), ignore -> buildAutomaton(patterns)); + } catch (ExecutionException e) { + throw unwrapCacheException(e); + } + } + } + + private static Automaton buildAutomaton(Collection patterns) { List automata = new ArrayList<>(patterns.size()); for (String pattern : patterns) { final Automaton patternAutomaton = pattern(pattern); @@ -69,11 +95,23 @@ public static Automaton patterns(Collection patterns) { * Builds and returns an automaton that represents the given pattern. */ static Automaton pattern(String pattern) { + if (cache == null) { + return buildAutomaton(pattern); + } else { + try { + return cache.computeIfAbsent(pattern, ignore -> buildAutomaton(pattern)); + } catch (ExecutionException e) { + throw unwrapCacheException(e); + } + } + } + + private static Automaton buildAutomaton(String pattern) { if (pattern.startsWith("/")) { // it's a lucene regexp if (pattern.length() == 1 || !pattern.endsWith("/")) { throw new IllegalArgumentException("invalid pattern [" + pattern + "]. patterns starting with '/' " + - "indicate regular expression pattern and therefore must also end with '/'." + - " other patterns (those that do not start with '/') will be treated as simple wildcard patterns"); + "indicate regular expression pattern and therefore must also end with '/'." + + " other patterns (those that do not start with '/') will be treated as simple wildcard patterns"); } String regex = pattern.substring(1, pattern.length() - 1); return new RegExp(regex).toAutomaton(); @@ -84,16 +122,25 @@ static Automaton pattern(String pattern) { } } + private static RuntimeException unwrapCacheException(ExecutionException e) { + final Throwable cause = e.getCause(); + if (cause instanceof RuntimeException) { + return (RuntimeException) cause; + } else { + return new RuntimeException(cause); + } + } + /** * Builds and returns an automaton that represents the given pattern. */ @SuppressWarnings("fallthrough") // explicit fallthrough at end of switch static Automaton wildcard(String text) { List automata = new ArrayList<>(); - for (int i = 0; i < text.length();) { + for (int i = 0; i < text.length(); ) { final char c = text.charAt(i); int length = 1; - switch(c) { + switch (c) { case WILDCARD_STRING: automata.add(Automata.makeAnyString()); break; @@ -138,8 +185,19 @@ public static Predicate predicate(Automaton automaton) { return predicate(automaton, "Predicate for " + automaton); } - public static void updateMaxDeterminizedStates(Settings settings) { + public static void updateConfiguration(Settings settings) { maxDeterminizedStates = MAX_DETERMINIZED_STATES_SETTING.get(settings); + cache = buildCache(settings); + } + + private static Cache buildCache(Settings settings) { + if (CACHE_ENABLED.get(settings) == false) { + return null; + } + return CacheBuilder.builder() + .setExpireAfterAccess(CACHE_TTL.get(settings)) + .setMaximumWeight(CACHE_SIZE.get(settings)) + .build(); } // accessor for testing @@ -161,4 +219,11 @@ public String toString() { } }; } + + public static void addSettings(List> settingsList) { + settingsList.add(MAX_DETERMINIZED_STATES_SETTING); + settingsList.add(CACHE_ENABLED); + settingsList.add(CACHE_SIZE); + settingsList.add(CACHE_TTL); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java index 72c988fc22710..bd9e8d8e0583c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java @@ -22,6 +22,8 @@ import static org.elasticsearch.xpack.core.security.support.Automatons.predicate; import static org.elasticsearch.xpack.core.security.support.Automatons.wildcard; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class AutomatonsTests extends ESTestCase { public void testPatternsUnionOfMultiplePatterns() throws Exception { @@ -70,29 +72,29 @@ public void testPredicateToString() throws Exception { public void testPatternComplexity() { List patterns = Arrays.asList("*", "filebeat*de-tst-chatclassification*", - "metricbeat*de-tst-chatclassification*", - "packetbeat*de-tst-chatclassification*", - "heartbeat*de-tst-chatclassification*", - "filebeat*documentationdev*", - "metricbeat*documentationdev*", - "packetbeat*documentationdev*", - "heartbeat*documentationdev*", - "filebeat*devsupport-website*", - "metricbeat*devsupport-website*", - "packetbeat*devsupport-website*", - "heartbeat*devsupport-website*", - ".kibana-tcloud", - ".reporting-tcloud", - "filebeat-app-ingress-*", - "filebeat-app-tcloud-*", - "filebeat*documentationprod*", - "metricbeat*documentationprod*", - "packetbeat*documentationprod*", - "heartbeat*documentationprod*", - "filebeat*bender-minio-test-1*", - "metricbeat*bender-minio-test-1*", - "packetbeat*bender-minio-test-1*", - "heartbeat*bender-minio-test-1*"); + "metricbeat*de-tst-chatclassification*", + "packetbeat*de-tst-chatclassification*", + "heartbeat*de-tst-chatclassification*", + "filebeat*documentationdev*", + "metricbeat*documentationdev*", + "packetbeat*documentationdev*", + "heartbeat*documentationdev*", + "filebeat*devsupport-website*", + "metricbeat*devsupport-website*", + "packetbeat*devsupport-website*", + "heartbeat*devsupport-website*", + ".kibana-tcloud", + ".reporting-tcloud", + "filebeat-app-ingress-*", + "filebeat-app-tcloud-*", + "filebeat*documentationprod*", + "metricbeat*documentationprod*", + "packetbeat*documentationprod*", + "heartbeat*documentationprod*", + "filebeat*bender-minio-test-1*", + "metricbeat*bender-minio-test-1*", + "packetbeat*bender-minio-test-1*", + "heartbeat*bender-minio-test-1*"); final Automaton automaton = Automatons.patterns(patterns); assertTrue(Operations.isTotal(automaton)); assertTrue(automaton.isDeterministic()); @@ -137,7 +139,7 @@ public void testSettingMaxDeterminizedStates() { assertNotEquals(10000, Automatons.getMaxDeterminizedStates()); // set to the min value Settings settings = Settings.builder().put(Automatons.MAX_DETERMINIZED_STATES_SETTING.getKey(), 10000).build(); - Automatons.updateMaxDeterminizedStates(settings); + Automatons.updateConfiguration(settings); assertEquals(10000, Automatons.getMaxDeterminizedStates()); final List names = new ArrayList<>(1024); @@ -147,8 +149,63 @@ public void testSettingMaxDeterminizedStates() { TooComplexToDeterminizeException e = expectThrows(TooComplexToDeterminizeException.class, () -> Automatons.patterns(names)); assertThat(e.getMaxDeterminizedStates(), equalTo(10000)); } finally { - Automatons.updateMaxDeterminizedStates(Settings.EMPTY); + Automatons.updateConfiguration(Settings.EMPTY); assertEquals(100000, Automatons.getMaxDeterminizedStates()); } } + + public void testCachingOfAutomatons() { + Automatons.updateConfiguration(Settings.EMPTY); + + String pattern1 = randomAlphaOfLengthBetween(3, 8) + "*"; + String pattern2 = "/" + randomAlphaOfLengthBetween(1, 2) + "*" + randomAlphaOfLengthBetween(2, 4) + "/"; + + final Automaton a1 = Automatons.pattern(pattern1); + final Automaton a2 = Automatons.pattern(pattern2); + + assertThat(Automatons.pattern(pattern1), sameInstance(a1)); + assertThat(Automatons.pattern(pattern2), sameInstance(a2)); + + final Automaton a3 = Automatons.patterns(pattern1, pattern2); + final Automaton a4 = Automatons.patterns(pattern2, pattern1); + assertThat(a3, sameInstance(a4)); + } + + public void testConfigurationOfCacheSize() { + final Settings settings = Settings.builder() + .put(Automatons.CACHE_SIZE.getKey(), 2) + .build(); + Automatons.updateConfiguration(settings); + + String pattern1 = "a"; + String pattern2 = "b"; + String pattern3 = "c"; + + final Automaton a1 = Automatons.pattern(pattern1); + final Automaton a2 = Automatons.pattern(pattern2); + + assertThat(Automatons.pattern(pattern1), sameInstance(a1)); + assertThat(Automatons.pattern(pattern2), sameInstance(a2)); + + final Automaton a3 = Automatons.pattern(pattern3); + assertThat(Automatons.pattern(pattern3), sameInstance(a3)); + + // either pattern 1 or 2 should be evicted (in theory it should be 1, but we don't care about that level of precision) + final Automaton a1b = Automatons.pattern(pattern1); + final Automaton a2b = Automatons.pattern(pattern2); + if (a1b == a1 && a2b == a2) { + fail("Expected one of the existing automatons to be evicted, but both were still cached"); + } + } + + public void testDisableCache() { + final Settings settings = Settings.builder() + .put(Automatons.CACHE_ENABLED.getKey(), false) + .build(); + Automatons.updateConfiguration(settings); + + final String pattern = randomAlphaOfLengthBetween(5, 10); + final Automaton automaton = Automatons.pattern(pattern); + assertThat(Automatons.pattern(pattern), not(sameInstance(automaton))); + } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 4d7c5cd058be3..e9f9166a2a3ba 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -16,9 +16,9 @@ import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; +import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; import org.junit.After; @@ -26,6 +26,8 @@ import java.util.Locale; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; @@ -386,6 +388,41 @@ public void testDeleteJob() throws Exception { String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); assertThat(indicesAfterDelete, containsString(indexName)); + waitUntilIndexIsEmpty(indexName); + + // check that the job itself is gone + expectThrows(ResponseException.class, () -> + client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"))); + } + + public void testDeleteJobAsync() throws Exception { + String jobId = "delete-job-async-job"; + String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; + createFarequoteJob(jobId); + + String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + assertThat(indicesBeforeDelete, containsString(indexName)); + + Response response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + + "?wait_for_completion=false")); + + // Wait for task to complete + String taskId = extractTaskId(response); + Response taskResponse = client().performRequest(new Request("GET", "_tasks/" + taskId + "?wait_for_completion=true")); + assertThat(EntityUtils.toString(taskResponse.getEntity()), containsString("\"acknowledged\":true")); + + // check that the index still exists (it's shared by default) + String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + assertThat(indicesAfterDelete, containsString(indexName)); + + waitUntilIndexIsEmpty(indexName); + + // check that the job itself is gone + expectThrows(ResponseException.class, () -> + client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"))); + } + + private void waitUntilIndexIsEmpty(String indexName) throws Exception { assertBusy(() -> { try { String count = EntityUtils.toString(client().performRequest(new Request("GET", indexName + "/_count")).getEntity()); @@ -394,10 +431,14 @@ public void testDeleteJob() throws Exception { fail(e.getMessage()); } }); + } - // check that the job itself is gone - expectThrows(ResponseException.class, () -> - client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"))); + private static String extractTaskId(Response response) throws IOException { + String responseAsString = EntityUtils.toString(response.getEntity()); + Pattern matchTaskId = Pattern.compile(".*\"task\":.*\"(.*)\".*"); + Matcher taskIdMatcher = matchTaskId.matcher(responseAsString); + assertTrue(taskIdMatcher.matches()); + return taskIdMatcher.group(1); } public void testDeleteJobAfterMissingIndex() throws Exception { @@ -521,7 +562,7 @@ public void testMultiIndexDelete() throws Exception { } public void testDelete_multipleRequest() throws Exception { - String jobId = "delete-job-mulitple-times"; + String jobId = "delete-job-multiple-times"; createFarequoteJob(jobId); ConcurrentMapLong responses = ConcurrentCollections.newConcurrentMapLong(); @@ -532,8 +573,8 @@ public void testDelete_multipleRequest() throws Exception { AtomicReference recreationException = new AtomicReference<>(); Runnable deleteJob = () -> { + boolean forceDelete = randomBoolean(); try { - boolean forceDelete = randomBoolean(); String url = MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId; if (forceDelete) { url += "?force=true"; @@ -554,6 +595,7 @@ public void testDelete_multipleRequest() throws Exception { } catch (ResponseException re) { recreationException.set(re); } catch (IOException e) { + logger.error("Error trying to recreate the job", e); ioe.set(e); } } @@ -563,14 +605,14 @@ public void testDelete_multipleRequest() throws Exception { // the other to complete. This is difficult to schedule but // hopefully it will happen in CI int numThreads = 5; - Thread [] threads = new Thread[numThreads]; - for (int i=0; i jobIdProcessor = id -> { validateJobAndTaskState(id, mlMetadata, tasksMetaData); Job job = mlMetadata.getJobs().get(id); - if (job.isDeleted()) { + if (job.isDeleting()) { return; } addJobAccordingToState(id, tasksMetaData, openJobIds, closingJobIds, failedJobs); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 1d285b91f2f8e..89f42d622411f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -23,9 +23,9 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; +import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -34,9 +34,9 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -45,14 +45,13 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; -import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; @@ -72,10 +71,11 @@ import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeoutException; import java.util.function.Consumer; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -90,6 +90,14 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction>> listenersByJobId; + @Inject public TransportDeleteJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, @@ -101,6 +109,7 @@ public TransportDeleteJobAction(Settings settings, TransportService transportSer this.persistentTasksService = persistentTasksService; this.auditor = auditor; this.jobResultsProvider = jobResultsProvider; + this.listenersByJobId = new HashMap<>(); } @Override @@ -113,58 +122,82 @@ protected AcknowledgedResponse newResponse() { return new AcknowledgedResponse(); } + @Override + protected ClusterBlockException checkBlock(DeleteJobAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected void masterOperation(DeleteJobAction.Request request, ClusterState state, ActionListener listener) { + throw new UnsupportedOperationException("the Task parameter is required"); + } + @Override protected void masterOperation(Task task, DeleteJobAction.Request request, ClusterState state, ActionListener listener) { + logger.debug("Deleting job '{}'", request.getJobId()); + + TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); + ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, taskId); + + // Check if there is a deletion task for this job already and if yes wait for it to complete + synchronized (listenersByJobId) { + if (listenersByJobId.containsKey(request.getJobId())) { + logger.debug("[{}] Deletion task [{}] will wait for existing deletion task to complete", + request.getJobId(), task.getId()); + listenersByJobId.get(request.getJobId()).add(listener); + return; + } else { + List> listeners = new ArrayList<>(); + listeners.add(listener); + listenersByJobId.put(request.getJobId(), listeners); + } + } + + auditor.info(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING, taskId)); + + // The listener that will be executed at the end of the chain will notify all listeners + ActionListener finalListener = ActionListener.wrap( + ack -> notifyListeners(request.getJobId(), ack, null), + e -> notifyListeners(request.getJobId(), null, e) + ); ActionListener markAsDeletingListener = ActionListener.wrap( response -> { if (request.isForce()) { - forceDeleteJob(request, listener); + forceDeleteJob(parentTaskClient, request, finalListener); } else { - normalDeleteJob(request, listener); + normalDeleteJob(parentTaskClient, request, finalListener); } }, e -> { - if (e instanceof MlMetadata.JobAlreadyMarkedAsDeletedException) { - // Don't kick off a parallel deletion task, but just wait for - // the in-progress request to finish. This is much safer in the - // case where the job with the same name might be immediately - // recreated after the delete returns. However, if a force - // delete times out then eventually kick off a parallel delete - // in case the original completely failed for some reason. - waitForDeletingJob(request.getJobId(), MachineLearningField.STATE_PERSIST_RESTORE_TIMEOUT, - ActionListener.wrap( - listener::onResponse, - e2 -> { - if (request.isForce() && e2 instanceof TimeoutException) { - forceDeleteJob(request, listener); - } else { - listener.onFailure(e2); - } - } - )); - } else { - listener.onFailure(e); - } + auditor.error(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING_FAILED, e.getMessage())); + finalListener.onFailure(e); }); markJobAsDeleting(request.getJobId(), markAsDeletingListener, request.isForce()); } - @Override - protected void masterOperation(DeleteJobAction.Request request, ClusterState state, ActionListener listener) { - throw new UnsupportedOperationException("the Task parameter is required"); - } - - @Override - protected ClusterBlockException checkBlock(DeleteJobAction.Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + private void notifyListeners(String jobId, @Nullable AcknowledgedResponse ack, @Nullable Exception error) { + synchronized (listenersByJobId) { + List> listeners = listenersByJobId.remove(jobId); + if (listeners == null) { + logger.error("[{}] No deletion job listeners could be found", jobId); + return; + } + for (ActionListener listener : listeners) { + if (error != null) { + listener.onFailure(error); + } else { + listener.onResponse(ack); + } + } + } } - private void normalDeleteJob(DeleteJobAction.Request request, ActionListener listener) { + private void normalDeleteJob(ParentTaskAssigningClient parentTaskClient, DeleteJobAction.Request request, + ActionListener listener) { String jobId = request.getJobId(); - logger.debug("Deleting job '" + jobId + "'"); // Step 4. When the job has been removed from the cluster state, return a response // ------- @@ -212,10 +245,11 @@ public ClusterState execute(ClusterState currentState) { // Step 1. Delete the physical storage - deleteJobDocuments(jobId, removeFromCalendarsHandler, listener::onFailure); + deleteJobDocuments(parentTaskClient, jobId, removeFromCalendarsHandler, listener::onFailure); } - private void deleteJobDocuments(String jobId, CheckedConsumer finishedHandler, Consumer failureHandler) { + private void deleteJobDocuments(ParentTaskAssigningClient parentTaskClient, String jobId, + CheckedConsumer finishedHandler, Consumer failureHandler) { final String indexName = AnomalyDetectorsIndex.getPhysicalIndexFromState(clusterService.state(), jobId); final String indexPattern = indexName + "-*"; @@ -241,7 +275,7 @@ private void deleteJobDocuments(String jobId, CheckedConsumerwrap( response -> deleteByQueryExecutor.onResponse(false), // skip DBQ && Alias failureHandler), - client.admin().indices()::delete); + parentTaskClient.admin().indices()::delete); } }, failure -> { @@ -312,7 +346,7 @@ private void deleteJobDocuments(String jobId, CheckedConsumer deleteQuantilesHandler = ActionListener.wrap( - response -> deleteCategorizerState(jobId, client, 1, deleteCategorizerStateHandler), + response -> deleteCategorizerState(parentTaskClient, jobId, 1, deleteCategorizerStateHandler), failureHandler); // Step 2. Delete state done, delete the quantiles ActionListener deleteStateHandler = ActionListener.wrap( - bulkResponse -> deleteQuantiles(jobId, client, deleteQuantilesHandler), + bulkResponse -> deleteQuantiles(parentTaskClient, jobId, deleteQuantilesHandler), failureHandler); // Step 1. Delete the model state - deleteModelState(jobId, client, deleteStateHandler); + deleteModelState(parentTaskClient, jobId, deleteStateHandler); } - private void deleteQuantiles(String jobId, Client client, ActionListener finishedHandler) { + private void deleteQuantiles(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener finishedHandler) { // The quantiles type and doc ID changed in v5.5 so delete both the old and new format DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace @@ -344,7 +378,7 @@ private void deleteQuantiles(String jobId, Client client, ActionListener finishedHandler.onResponse(true), e -> { // It's not a problem for us if the index wasn't found - it's equivalent to document not found @@ -356,19 +390,20 @@ private void deleteQuantiles(String jobId, Client client, ActionListener listener) { + private void deleteModelState(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener listener) { GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null); request.setPageParams(new PageParams(0, MAX_SNAPSHOTS_TO_DELETE)); - executeAsyncWithOrigin(client, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap( + executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap( response -> { List deleteCandidates = response.getPage().results(); - JobDataDeleter deleter = new JobDataDeleter(client, jobId); + JobDataDeleter deleter = new JobDataDeleter(parentTaskClient, jobId); deleter.deleteModelSnapshots(deleteCandidates, listener); }, listener::onFailure)); } - private void deleteCategorizerState(String jobId, Client client, int docNum, ActionListener finishedHandler) { + private void deleteCategorizerState(ParentTaskAssigningClient parentTaskClient, String jobId, int docNum, + ActionListener finishedHandler) { // The categorizer state type and doc ID changed in v5.5 so delete both the old and new format DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace @@ -380,13 +415,13 @@ private void deleteCategorizerState(String jobId, Client client, int docNum, Act request.setAbortOnVersionConflict(false); request.setRefresh(true); - executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( + executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( response -> { // If we successfully deleted a document try the next one; if not we're done if (response.getDeleted() > 0) { // There's an assumption here that there won't be very many categorizer // state documents, so the recursion won't go more than, say, 5 levels deep - deleteCategorizerState(jobId, client, docNum + 1, finishedHandler); + deleteCategorizerState(parentTaskClient, jobId, docNum + 1, finishedHandler); return; } finishedHandler.onResponse(true); @@ -401,14 +436,15 @@ private void deleteCategorizerState(String jobId, Client client, int docNum, Act })); } - private void deleteAliases(String jobId, Client client, ActionListener finishedHandler) { + private void deleteAliases(ParentTaskAssigningClient parentTaskClient, String jobId, + ActionListener finishedHandler) { final String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); final String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); // first find the concrete indices associated with the aliases GetAliasesRequest aliasesRequest = new GetAliasesRequest().aliases(readAliasName, writeAliasName) .indicesOptions(IndicesOptions.lenientExpandOpen()); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, aliasesRequest, + executeAsyncWithOrigin(parentTaskClient.threadPool().getThreadContext(), ML_ORIGIN, aliasesRequest, ActionListener.wrap( getAliasesResponse -> { // remove the aliases from the concrete indices found in the first step @@ -419,13 +455,13 @@ private void deleteAliases(String jobId, Client client, ActionListenerwrap( finishedHandler::onResponse, finishedHandler::onFailure), - client.admin().indices()::aliases); + parentTaskClient.admin().indices()::aliases); }, - finishedHandler::onFailure), client.admin().indices()::getAliases); + finishedHandler::onFailure), parentTaskClient.admin().indices()::getAliases); } private IndicesAliasesRequest buildRemoveAliasesRequest(GetAliasesResponse getAliasesResponse) { @@ -445,7 +481,10 @@ private IndicesAliasesRequest buildRemoveAliasesRequest(GetAliasesResponse getAl .indices(indices.toArray(new String[indices.size()]))); } - private void forceDeleteJob(DeleteJobAction.Request request, ActionListener listener) { + private void forceDeleteJob(ParentTaskAssigningClient parentTaskClient, DeleteJobAction.Request request, + ActionListener listener) { + + logger.debug("Force deleting job [{}]", request.getJobId()); final ClusterState state = clusterService.state(); final String jobId = request.getJobId(); @@ -454,13 +493,13 @@ private void forceDeleteJob(DeleteJobAction.Request request, ActionListener removeTaskListener = new ActionListener() { @Override public void onResponse(Boolean response) { - normalDeleteJob(request, listener); + normalDeleteJob(parentTaskClient, request, listener); } @Override public void onFailure(Exception e) { if (e instanceof ResourceNotFoundException) { - normalDeleteJob(request, listener); + normalDeleteJob(parentTaskClient, request, listener); } else { listener.onFailure(e); } @@ -483,12 +522,13 @@ public void onFailure(Exception e) { ); // 1. Kill the job's process - killProcess(jobId, killJobListener); + killProcess(parentTaskClient, jobId, killJobListener); } - private void killProcess(String jobId, ActionListener listener) { + private void killProcess(ParentTaskAssigningClient parentTaskClient, String jobId, + ActionListener listener) { KillProcessAction.Request killRequest = new KillProcessAction.Request(jobId); - executeAsyncWithOrigin(client, ML_ORIGIN, KillProcessAction.INSTANCE, killRequest, listener); + executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, KillProcessAction.INSTANCE, killRequest, listener); } private void removePersistentTask(String jobId, ClusterState currentState, @@ -520,7 +560,7 @@ private void markJobAsDeleting(String jobId, ActionListener listener, b public ClusterState execute(ClusterState currentState) { PersistentTasksCustomMetaData tasks = currentState.metaData().custom(PersistentTasksCustomMetaData.TYPE); MlMetadata.Builder builder = new MlMetadata.Builder(MlMetadata.getMlMetadata(currentState)); - builder.markJobAsDeleted(jobId, tasks, force); + builder.markJobAsDeleting(jobId, tasks, force); return buildNewClusterState(currentState, builder); } @@ -537,32 +577,6 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS }); } - private void waitForDeletingJob(String jobId, TimeValue timeout, ActionListener listener) { - ClusterStateObserver stateObserver = new ClusterStateObserver(clusterService, timeout, logger, threadPool.getThreadContext()); - - ClusterState clusterState = stateObserver.setAndGetObservedState(); - if (jobIsDeletedFromState(jobId, clusterState)) { - listener.onResponse(new AcknowledgedResponse(true)); - } else { - stateObserver.waitForNextChange(new ClusterStateObserver.Listener() { - @Override - public void onNewClusterState(ClusterState state) { - listener.onResponse(new AcknowledgedResponse(true)); - } - - @Override - public void onClusterServiceClose() { - listener.onFailure(new NodeClosedException(clusterService.localNode())); - } - - @Override - public void onTimeout(TimeValue timeout) { - listener.onFailure(new TimeoutException("timed out after " + timeout)); - } - }, newClusterState -> jobIsDeletedFromState(jobId, newClusterState), timeout); - } - } - static boolean jobIsDeletedFromState(String jobId, ClusterState clusterState) { return !MlMetadata.getMlMetadata(clusterState).getJobs().containsKey(jobId); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index ab1ef73780e5e..7217fcc6ec9a7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -183,6 +183,6 @@ static List determineNonDeletedJobIdsWithoutLiveStats(MlMetadata mlMetad List stats) { Set excludeJobIds = stats.stream().map(GetJobsStatsAction.Response.JobStats::getJobId).collect(Collectors.toSet()); return requestedJobIds.stream().filter(jobId -> !excludeJobIds.contains(jobId) && - !mlMetadata.isJobDeleted(jobId)).collect(Collectors.toList()); + !mlMetadata.isJobDeleting(jobId)).collect(Collectors.toList()); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 512d8188abfac..42b67b2917387 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -127,8 +127,8 @@ static void validate(String jobId, MlMetadata mlMetadata) { if (job == null) { throw ExceptionsHelper.missingJobException(jobId); } - if (job.isDeleted()) { - throw ExceptionsHelper.conflictStatusException("Cannot open job [" + jobId + "] because it has been marked as deleted"); + if (job.isDeleting()) { + throw ExceptionsHelper.conflictStatusException("Cannot open job [" + jobId + "] because it is being deleted"); } if (job.getJobVersion() == null) { throw ExceptionsHelper.badRequestException("Cannot open job [" + jobId diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java index b1c73dc04dbf1..3a76b71980bec 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java @@ -7,10 +7,15 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskListener; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -37,6 +42,35 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient deleteJobRequest.setForce(restRequest.paramAsBoolean(CloseJobAction.Request.FORCE.getPreferredName(), deleteJobRequest.isForce())); deleteJobRequest.timeout(restRequest.paramAsTime("timeout", deleteJobRequest.timeout())); deleteJobRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", deleteJobRequest.masterNodeTimeout())); - return channel -> client.execute(DeleteJobAction.INSTANCE, deleteJobRequest, new RestToXContentListener<>(channel)); + + if (restRequest.paramAsBoolean("wait_for_completion", true)) { + return channel -> client.execute(DeleteJobAction.INSTANCE, deleteJobRequest, new RestToXContentListener<>(channel)); + } else { + deleteJobRequest.setShouldStoreResult(true); + + Task task = client.executeLocally(DeleteJobAction.INSTANCE, deleteJobRequest, nullTaskListener()); + // Send task description id instead of waiting for the message + return channel -> { + try (XContentBuilder builder = channel.newBuilder()) { + builder.startObject(); + builder.field("task", client.getLocalNodeId() + ":" + task.getId()); + builder.endObject(); + channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + } + }; + } + } + + // We do not want to log anything due to a delete action + // The response or error will be returned to the client when called synchronously + // or it will be stored in the task result when called asynchronously + private static TaskListener nullTaskListener() { + return new TaskListener() { + @Override + public void onResponse(Task task, Object o) {} + + @Override + public void onFailure(Task task, Throwable e) {} + }; } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index e16ac2f99700d..82478fbf5d337 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -124,7 +124,7 @@ public void testPutJob() { public void testRemoveJob() { Job.Builder jobBuilder = buildJobBuilder("1"); - jobBuilder.setDeleted(true); + jobBuilder.setDeleting(true); Job job1 = jobBuilder.build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); @@ -206,7 +206,7 @@ public void testPutDatafeed_failBecauseJobDoesNotExist() { } public void testPutDatafeed_failBecauseJobIsBeingDeleted() { - Job job1 = createDatafeedJob().setDeleted(true).build(new Date()); + Job job1 = createDatafeedJob().setDeleting(true).build(new Date()); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java index 2e00ad71251db..6d4b008570c72 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java @@ -28,7 +28,7 @@ public class TransportGetJobsStatsActionTests extends ESTestCase { public void testDetermineJobIds() { MlMetadata mlMetadata = mock(MlMetadata.class); - when(mlMetadata.isJobDeleted(eq("id4"))).thenReturn(true); + when(mlMetadata.isJobDeleting(eq("id4"))).thenReturn(true); List result = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, Collections.singletonList("id1"), Collections.emptyList()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index 58b60273b0e6d..4dd41363b73fe 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -79,14 +79,14 @@ public void testValidate_jobMissing() { expectThrows(ResourceNotFoundException.class, () -> TransportOpenJobAction.validate("job_id2", mlBuilder.build())); } - public void testValidate_jobMarkedAsDeleted() { + public void testValidate_jobMarkedAsDeleting() { MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); Job.Builder jobBuilder = buildJobBuilder("job_id"); - jobBuilder.setDeleted(true); + jobBuilder.setDeleting(true); mlBuilder.putJob(jobBuilder.build(), false); Exception e = expectThrows(ElasticsearchStatusException.class, () -> TransportOpenJobAction.validate("job_id", mlBuilder.build())); - assertEquals("Cannot open job [job_id] because it has been marked as deleted", e.getMessage()); + assertEquals("Cannot open job [job_id] because it is being deleted", e.getMessage()); } public void testValidate_jobWithoutVersion() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java deleted file mode 100644 index ed23a5328aec1..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.integration; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.core.ml.MlMetadata; -import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; -import org.elasticsearch.xpack.core.ml.action.PutJobAction; -import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -public class DeleteJobIT extends BaseMlIntegTestCase { - - public void testWaitForDelete() throws ExecutionException, InterruptedException { - final String jobId = "wait-for-delete-job"; - Job.Builder job = createJob(jobId); - PutJobAction.Request putJobRequest = new PutJobAction.Request(job); - client().execute(PutJobAction.INSTANCE, putJobRequest).get(); - - AtomicReference exceptionHolder = new AtomicReference<>(); - CountDownLatch markAsDeletedLatch = new CountDownLatch(1); - clusterService().submitStateUpdateTask("mark-job-as-deleted", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) { - return markJobAsDeleted(jobId, currentState); - } - - @Override - public void onFailure(String source, Exception e) { - markAsDeletedLatch.countDown(); - exceptionHolder.set(e); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - markAsDeletedLatch.countDown(); - } - }); - - assertTrue("Timed out waiting for state update", markAsDeletedLatch.await(5, TimeUnit.SECONDS)); - assertNull("mark-job-as-deleted task failed: " + exceptionHolder.get(), exceptionHolder.get()); - - // Job is marked as deleting so now a delete request should wait for it. - AtomicBoolean isDeleted = new AtomicBoolean(false); - AtomicReference deleteFailure = new AtomicReference<>(); - ActionListener deleteListener = new ActionListener() { - @Override - public void onResponse(AcknowledgedResponse response) { - isDeleted.compareAndSet(false, response.isAcknowledged()); - } - - @Override - public void onFailure(Exception e) { - deleteFailure.set(e); - } - }; - - client().execute(DeleteJobAction.INSTANCE, new DeleteJobAction.Request(jobId), deleteListener); - awaitBusy(isDeleted::get, 1, TimeUnit.SECONDS); - // still waiting - assertFalse(isDeleted.get()); - - CountDownLatch removeJobLatch = new CountDownLatch(1); - clusterService().submitStateUpdateTask("remove-job-from-state", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - assertFalse(isDeleted.get()); - return removeJobFromClusterState(jobId, currentState); - } - - @Override - public void onFailure(String source, Exception e) { - removeJobLatch.countDown(); - exceptionHolder.set(e); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - removeJobLatch.countDown(); - } - }); - - assertTrue("Timed out waiting for remove job from state response", removeJobLatch.await(5, TimeUnit.SECONDS)); - assertNull("remove-job-from-state task failed: " + exceptionHolder.get(), exceptionHolder.get()); - - assertNull("Job deletion failed: " + deleteFailure.get(), deleteFailure.get()); - assertTrue("Job was not deleted", isDeleted.get()); - } - - private ClusterState markJobAsDeleted(String jobId, ClusterState currentState) { - MlMetadata mlMetadata = MlMetadata.getMlMetadata(currentState); - assertNotNull(mlMetadata); - - MlMetadata.Builder builder = new MlMetadata.Builder(mlMetadata); - PersistentTasksCustomMetaData tasks = currentState.metaData().custom(PersistentTasksCustomMetaData.TYPE); - builder.markJobAsDeleted(jobId, tasks, true); - - ClusterState.Builder newState = ClusterState.builder(currentState); - return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()) - .build(); - } - - private ClusterState removeJobFromClusterState(String jobId, ClusterState currentState) { - MlMetadata.Builder builder = new MlMetadata.Builder(MlMetadata.getMlMetadata(currentState)); - builder.deleteJob(jobId, currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE)); - - ClusterState.Builder newState = ClusterState.builder(currentState); - return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()) - .build(); - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 76b1a87f682fa..2a49a1299943a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -307,7 +307,7 @@ public Security(Settings settings, final Path configPath) { new FIPS140LicenseBootstrapCheck())); checks.addAll(InternalRealms.getBootstrapChecks(settings, env)); this.bootstrapChecks = Collections.unmodifiableList(checks); - Automatons.updateMaxDeterminizedStates(settings); + Automatons.updateConfiguration(settings); } else { this.bootstrapChecks = Collections.emptyList(); } @@ -609,7 +609,7 @@ public static List> getSettings(boolean transportClientMode, List authenticatingListener = - new IteratingActionListener<>(ActionListener.wrap( - (user) -> consumeUser(user, messages), - (e) -> listener.onFailure(request.exceptionProcessingRequest(e, token))), + new IteratingActionListener<>(ContextPreservingActionListener.wrapPreservingContext(ActionListener.wrap( + (user) -> consumeUser(user, messages), + (e) -> listener.onFailure(request.exceptionProcessingRequest(e, token))), threadContext), realmAuthenticatingConsumer, realmsList, threadContext); try { authenticatingListener.run(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java index af93a180072aa..0d8609d61d9b2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ListenableFuture; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -30,6 +31,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm private final Cache> cache; private final ThreadPool threadPool; + private final boolean authenticationEnabled; final Hasher cacheHasher; protected CachingUsernamePasswordRealm(String type, RealmConfig config, ThreadPool threadPool) { @@ -45,6 +47,7 @@ protected CachingUsernamePasswordRealm(String type, RealmConfig config, ThreadPo } else { cache = null; } + this.authenticationEnabled = CachingUsernamePasswordRealmSettings.AUTHC_ENABLED_SETTING.get(config.settings()); } @Override @@ -63,15 +66,34 @@ public final void expireAll() { } } + @Override + public UsernamePasswordToken token(ThreadContext threadContext) { + if (authenticationEnabled == false) { + return null; + } + return super.token(threadContext); + } + + @Override + public boolean supports(AuthenticationToken token) { + return authenticationEnabled && super.supports(token); + } + /** * If the user exists in the cache (keyed by the principle name), then the password is validated * against a hash also stored in the cache. Otherwise the subclass authenticates the user via - * doAuthenticate + * doAuthenticate. + * This method will respond with {@link AuthenticationResult#notHandled()} if + * {@link CachingUsernamePasswordRealmSettings#AUTHC_ENABLED_SETTING authentication is not enabled}. * @param authToken The authentication token * @param listener to be called at completion */ @Override public final void authenticate(AuthenticationToken authToken, ActionListener listener) { + if (authenticationEnabled == false) { + listener.onResponse(AuthenticationResult.notHandled()); + return; + } final UsernamePasswordToken token = (UsernamePasswordToken) authToken; try { if (cache == null) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 65f69b397ba55..ef5b0386bc23f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -716,7 +716,7 @@ public void testRealmLookupThrowingExceptionRest() throws Exception { when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); mockRealmLookupReturnsNull(firstRealm, "run_as"); - doThrow(authenticationError("realm doesn't want to " + "lookup")) + doThrow(authenticationError("realm doesn't want to lookup")) .when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); try { @@ -1029,12 +1029,22 @@ void assertThreadContextContainsAuthentication(Authentication authentication) th @SuppressWarnings("unchecked") private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) { - doAnswer((i) -> { + final boolean separateThread = randomBoolean(); + doAnswer(i -> { ActionListener listener = (ActionListener) i.getArguments()[1]; - if (user == null) { - listener.onResponse(AuthenticationResult.notHandled()); + Runnable run = () -> { + if (user == null) { + listener.onResponse(AuthenticationResult.notHandled()); + } else { + listener.onResponse(AuthenticationResult.success(user)); + } + }; + if (separateThread) { + final Thread thread = new Thread(run); + thread.start(); + thread.join(); } else { - listener.onResponse(AuthenticationResult.success(user)); + run.run(); } return null; }).when(realm).authenticate(eq(token), any(ActionListener.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java index e9e8908c584a9..6d84dfb2a8048 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; @@ -62,7 +63,7 @@ public void stop() throws InterruptedException { } } - public void testSettings() throws Exception { + public void testCacheSettings() throws Exception { String cachingHashAlgo = Hasher.values()[randomIntBetween(0, Hasher.values().length - 1)].name().toLowerCase(Locale.ROOT); int maxUsers = randomIntBetween(10, 100); TimeValue ttl = TimeValue.timeValueMinutes(randomIntBetween(10, 20)); @@ -560,6 +561,33 @@ protected void doLookupUser(String username, ActionListener listener) { assertEquals(1, lookupCounter.get()); } + public void testAuthenticateDisabled() throws Exception { + final Settings settings = Settings.builder() + .put(CachingUsernamePasswordRealmSettings.AUTHC_ENABLED_SETTING.getKey(), false) + .build(); + final Environment env = TestEnvironment.newEnvironment(globalSettings); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final RealmConfig config = new RealmConfig("test_authentication_disabled", settings, globalSettings, env, threadContext); + final AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(config, threadPool); + + final UsernamePasswordToken token = new UsernamePasswordToken("phil", new SecureString("tahiti")); + UsernamePasswordToken.putTokenHeader(threadContext, token); + assertThat(realm.token(threadContext), nullValue()); + assertThat(realm.supports(token), equalTo(false)); + + PlainActionFuture authFuture = new PlainActionFuture<>(); + realm.authenticate(token, authFuture); + final AuthenticationResult authResult = authFuture.get(); + assertThat(authResult.isAuthenticated(), equalTo(false)); + assertThat(authResult.getStatus(), equalTo(AuthenticationResult.Status.CONTINUE)); + + PlainActionFuture lookupFuture = new PlainActionFuture<>(); + realm.lookupUser(token.principal(), lookupFuture); + final User user = lookupFuture.get(); + assertThat(user, notNullValue()); + assertThat(user.principal(), equalTo(token.principal())); + } + static class FailingAuthenticationRealm extends CachingUsernamePasswordRealm { FailingAuthenticationRealm(Settings settings, Settings global, ThreadPool threadPool) { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_job.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_job.json index 77eb89c00f92d..f93fff6eaab4e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_job.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_job.json @@ -15,8 +15,13 @@ "params": { "force": { "type": "boolean", - "required": false, - "description": "True if the job should be forcefully deleted" + "description": "True if the job should be forcefully deleted", + "default": false + }, + "wait_for_completion": { + "type": "boolean", + "description": "Should this request wait until the operation has completed before returning", + "default": true } } },