From 8062b4e0a096f8407f552cae2493ba79cc330943 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 26 Aug 2018 14:09:23 +0300 Subject: [PATCH 01/18] Refactor CachingUsernamePassword realm (#32646) Refactors the logic of authentication and lookup caching in `CachingUsernamePasswordRealm`. Nothing changed about the single-inflight-request or positive caching. --- .../support/CachingUsernamePasswordRealm.java | 223 +++++++++--------- 1 file changed, 109 insertions(+), 114 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java index 5eff3148b8f7b..95ebdfb453cf3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java @@ -5,11 +5,9 @@ */ package org.elasticsearch.xpack.security.authc.support; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ListenableFuture; @@ -31,7 +29,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm implements CachingRealm { - private final Cache>> cache; + private final Cache> cache; private final ThreadPool threadPool; final Hasher cacheHasher; @@ -39,9 +37,9 @@ protected CachingUsernamePasswordRealm(String type, RealmConfig config, ThreadPo super(type, config); cacheHasher = Hasher.resolve(CachingUsernamePasswordRealmSettings.CACHE_HASH_ALGO_SETTING.get(config.settings())); this.threadPool = threadPool; - TimeValue ttl = CachingUsernamePasswordRealmSettings.CACHE_TTL_SETTING.get(config.settings()); + final TimeValue ttl = CachingUsernamePasswordRealmSettings.CACHE_TTL_SETTING.get(config.settings()); if (ttl.getNanos() > 0) { - cache = CacheBuilder.>>builder() + cache = CacheBuilder.>builder() .setExpireAfterWrite(ttl) .setMaximumWeight(CachingUsernamePasswordRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings())) .build(); @@ -50,6 +48,7 @@ protected CachingUsernamePasswordRealm(String type, RealmConfig config, ThreadPo } } + @Override public final void expire(String username) { if (cache != null) { logger.trace("invalidating cache for user [{}] in realm [{}]", username, name()); @@ -57,6 +56,7 @@ public final void expire(String username) { } } + @Override public final void expireAll() { if (cache != null) { logger.trace("invalidating cache for all users in realm [{}]", name()); @@ -73,108 +73,84 @@ public final void expireAll() { */ @Override public final void authenticate(AuthenticationToken authToken, ActionListener listener) { - UsernamePasswordToken token = (UsernamePasswordToken) authToken; + final UsernamePasswordToken token = (UsernamePasswordToken) authToken; try { if (cache == null) { doAuthenticate(token, listener); } else { authenticateWithCache(token, listener); } - } catch (Exception e) { + } catch (final Exception e) { // each realm should handle exceptions, if we get one here it should be considered fatal listener.onFailure(e); } } + /** + * This validates the {@code token} while making sure there is only one inflight + * request to the authentication source. Only successful responses are cached + * and any subsequent requests, bearing the same password, will succeed + * without reaching to the authentication source. A different password in a + * subsequent request, however, will clear the cache and try to reach to + * the authentication source. + * + * @param token The authentication token + * @param listener to be called at completion + */ private void authenticateWithCache(UsernamePasswordToken token, ActionListener listener) { try { - final SetOnce authenticatedUser = new SetOnce<>(); - final AtomicBoolean createdAndStartedFuture = new AtomicBoolean(false); - final ListenableFuture> future = cache.computeIfAbsent(token.principal(), k -> { - final ListenableFuture> created = new ListenableFuture<>(); - if (createdAndStartedFuture.compareAndSet(false, true) == false) { - throw new IllegalStateException("something else already started this. how?"); - } - return created; + final AtomicBoolean authenticationInCache = new AtomicBoolean(true); + final ListenableFuture listenableCacheEntry = cache.computeIfAbsent(token.principal(), k -> { + authenticationInCache.set(false); + return new ListenableFuture<>(); }); - - if (createdAndStartedFuture.get()) { - doAuthenticate(token, ActionListener.wrap(result -> { - if (result.isAuthenticated()) { - final User user = result.getUser(); - authenticatedUser.set(user); - final UserWithHash userWithHash = new UserWithHash(user, token.credentials(), cacheHasher); - future.onResponse(new Tuple<>(result, userWithHash)); - } else { - future.onResponse(new Tuple<>(result, null)); - } - }, future::onFailure)); - } - - future.addListener(ActionListener.wrap(tuple -> { - if (tuple != null) { - final UserWithHash userWithHash = tuple.v2(); - final boolean performedAuthentication = createdAndStartedFuture.get() && userWithHash != null && - tuple.v2().user == authenticatedUser.get(); - handleResult(future, createdAndStartedFuture.get(), performedAuthentication, token, tuple, listener); - } else { - handleFailure(future, createdAndStartedFuture.get(), token, new IllegalStateException("unknown error authenticating"), - listener); - } - }, e -> handleFailure(future, createdAndStartedFuture.get(), token, e, listener)), - threadPool.executor(ThreadPool.Names.GENERIC)); - } catch (ExecutionException e) { - listener.onResponse(AuthenticationResult.unsuccessful("", e)); - } - } - - private void handleResult(ListenableFuture> future, boolean createdAndStartedFuture, - boolean performedAuthentication, UsernamePasswordToken token, - Tuple result, ActionListener listener) { - final AuthenticationResult authResult = result.v1(); - if (authResult == null) { - // this was from a lookup; clear and redo - cache.invalidate(token.principal(), future); - authenticateWithCache(token, listener); - } else if (authResult.isAuthenticated()) { - if (performedAuthentication) { - listener.onResponse(authResult); - } else { - UserWithHash userWithHash = result.v2(); - if (userWithHash.verify(token.credentials())) { - if (userWithHash.user.enabled()) { - User user = userWithHash.user; - logger.debug("realm [{}] authenticated user [{}], with roles [{}]", - name(), token.principal(), user.roles()); + if (authenticationInCache.get()) { + // there is a cached or an inflight authenticate request + listenableCacheEntry.addListener(ActionListener.wrap(authenticatedUserWithHash -> { + if (authenticatedUserWithHash != null && authenticatedUserWithHash.verify(token.credentials())) { + // cached credential hash matches the credential hash for this forestalled request + final User user = authenticatedUserWithHash.user; + logger.debug("realm [{}] authenticated user [{}], with roles [{}], from cache", name(), token.principal(), + user.roles()); listener.onResponse(AuthenticationResult.success(user)); } else { - // re-auth to see if user has been enabled - cache.invalidate(token.principal(), future); + // The inflight request has failed or its credential hash does not match the + // hash of the credential for this forestalled request. + // clear cache and try to reach the authentication source again because password + // might have changed there and the local cached hash got stale + cache.invalidate(token.principal(), listenableCacheEntry); authenticateWithCache(token, listener); } - } else { - // could be a password change? - cache.invalidate(token.principal(), future); + }, e -> { + // the inflight request failed, so try again, but first (always) make sure cache + // is cleared of the failed authentication + cache.invalidate(token.principal(), listenableCacheEntry); authenticateWithCache(token, listener); - } - } - } else { - cache.invalidate(token.principal(), future); - if (createdAndStartedFuture) { - listener.onResponse(authResult); + }), threadPool.executor(ThreadPool.Names.GENERIC)); } else { - authenticateWithCache(token, listener); + // attempt authentication against the authentication source + doAuthenticate(token, ActionListener.wrap(authResult -> { + if (authResult.isAuthenticated() && authResult.getUser().enabled()) { + // compute the credential hash of this successful authentication request + final UserWithHash userWithHash = new UserWithHash(authResult.getUser(), token.credentials(), cacheHasher); + // notify any forestalled request listeners; they will not reach to the + // authentication request and instead will use this hash for comparison + listenableCacheEntry.onResponse(userWithHash); + } else { + // notify any forestalled request listeners; they will retry the request + listenableCacheEntry.onResponse(null); + } + // notify the listener of the inflight authentication request; this request is not retried + listener.onResponse(authResult); + }, e -> { + // notify any staved off listeners; they will retry the request + listenableCacheEntry.onFailure(e); + // notify the listener of the inflight authentication request; this request is not retried + listener.onFailure(e); + })); } - } - } - - private void handleFailure(ListenableFuture> future, boolean createdAndStarted, - UsernamePasswordToken token, Exception e, ActionListener listener) { - cache.invalidate(token.principal(), future); - if (createdAndStarted) { + } catch (final ExecutionException e) { listener.onFailure(e); - } else { - authenticateWithCache(token, listener); } } @@ -194,38 +170,57 @@ protected int getCacheSize() { @Override public final void lookupUser(String username, ActionListener listener) { - if (cache != null) { - try { - ListenableFuture> future = cache.computeIfAbsent(username, key -> { - ListenableFuture> created = new ListenableFuture<>(); - doLookupUser(username, ActionListener.wrap(user -> { - if (user != null) { - UserWithHash userWithHash = new UserWithHash(user, null, null); - created.onResponse(new Tuple<>(null, userWithHash)); - } else { - created.onResponse(new Tuple<>(null, null)); - } - }, created::onFailure)); - return created; - }); - - future.addListener(ActionListener.wrap(tuple -> { - if (tuple != null) { - if (tuple.v2() == null) { - cache.invalidate(username, future); - listener.onResponse(null); - } else { - listener.onResponse(tuple.v2().user); - } + try { + if (cache == null) { + doLookupUser(username, listener); + } else { + lookupWithCache(username, listener); + } + } catch (final Exception e) { + // each realm should handle exceptions, if we get one here it should be + // considered fatal + listener.onFailure(e); + } + } + + private void lookupWithCache(String username, ActionListener listener) { + try { + final AtomicBoolean lookupInCache = new AtomicBoolean(true); + final ListenableFuture listenableCacheEntry = cache.computeIfAbsent(username, key -> { + lookupInCache.set(false); + return new ListenableFuture<>(); + }); + if (false == lookupInCache.get()) { + // attempt lookup against the user directory + doLookupUser(username, ActionListener.wrap(user -> { + if (user != null) { + // user found + final UserWithHash userWithHash = new UserWithHash(user, null, null); + // notify forestalled request listeners + listenableCacheEntry.onResponse(userWithHash); } else { - listener.onResponse(null); + // user not found, invalidate cache so that subsequent requests are forwarded to + // the user directory + cache.invalidate(username, listenableCacheEntry); + // notify forestalled request listeners + listenableCacheEntry.onResponse(null); } - }, listener::onFailure), threadPool.executor(ThreadPool.Names.GENERIC)); - } catch (ExecutionException e) { - listener.onFailure(e); + }, e -> { + // the next request should be forwarded, not halted by a failed lookup attempt + cache.invalidate(username, listenableCacheEntry); + // notify forestalled listeners + listenableCacheEntry.onFailure(e); + })); } - } else { - doLookupUser(username, listener); + listenableCacheEntry.addListener(ActionListener.wrap(userWithHash -> { + if (userWithHash != null) { + listener.onResponse(userWithHash.user); + } else { + listener.onResponse(null); + } + }, listener::onFailure), threadPool.executor(ThreadPool.Names.GENERIC)); + } catch (final ExecutionException e) { + listener.onFailure(e); } } From db75566d7a2cecc5a23cccbfc51385694f771749 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 26 Aug 2018 14:49:32 +0300 Subject: [PATCH 02/18] Reload Secure Settings REST specs & docs (#32990) This is a minimal REST API spec and docs for the REST handler for the `_nodes/reload_secure_settings endpoint`. Relates #29135 --- .../client/RestHighLevelClientTests.java | 1 + .../nodes-reload-secure-settings.asciidoc | 55 +++++++++++++++++++ .../api/nodes.reload_secure_settings.json | 23 ++++++++ .../nodes.reload_secure_settings/10_basic.yml | 8 +++ 4 files changed, 87 insertions(+) create mode 100644 docs/reference/cluster/nodes-reload-secure-settings.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/nodes.reload_secure_settings.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/nodes.reload_secure_settings/10_basic.yml diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index b9229d3d58e88..e82a1922b1344 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -703,6 +703,7 @@ public void testApiNamingConventions() throws Exception { "nodes.stats", "nodes.hot_threads", "nodes.usage", + "nodes.reload_secure_settings", "search_shards", }; Set deprecatedMethods = new HashSet<>(); diff --git a/docs/reference/cluster/nodes-reload-secure-settings.asciidoc b/docs/reference/cluster/nodes-reload-secure-settings.asciidoc new file mode 100644 index 0000000000000..f02ac8e46576b --- /dev/null +++ b/docs/reference/cluster/nodes-reload-secure-settings.asciidoc @@ -0,0 +1,55 @@ +[[cluster-nodes-reload-secure-settings]] +== Nodes Reload Secure Settings + +The cluster nodes reload secure settings API is used to re-read the +local node's encrypted keystore. Specifically, it will prompt the keystore +decryption and reading accross the cluster. The keystore's plain content is +used to reinitialize all compatible plugins. A compatible plugin can be +reinitilized without restarting the node. The operation is +complete when all compatible plugins have finished reinitilizing. Subsequently, +the keystore is closed and any changes to it will not be reflected on the node. + +[source,js] +-------------------------------------------------- +POST _nodes/reload_secure_settings +POST _nodes/nodeId1,nodeId2/reload_secure_settings +-------------------------------------------------- +// CONSOLE +// TEST[setup:node] +// TEST[s/nodeId1,nodeId2/*/] + +The first command reloads the keystore on each node. The seconds allows +to selectively target `nodeId1` and `nodeId2`. The node selection options are +detailed <>. + +Note: It is an error if secure settings are inconsistent across the cluster +nodes, yet this consistency is not enforced whatsoever. Hence, reloading specific +nodes is not standard. It is only justifiable when retrying failed reload operations. + +[float] +[[rest-reload-secure-settings]] +==== REST Reload Secure Settings Response + +The response contains the `nodes` object, which is a map, keyed by the +node id. Each value has the node `name` and an optional `reload_exception` +field. The `reload_exception` field is a serialization of the exception +that was thrown during the reload process, if any. + +[source,js] +-------------------------------------------------- +{ + "_nodes": { + "total": 1, + "successful": 1, + "failed": 0 + }, + "cluster_name": "my_cluster", + "nodes": { + "pQHNt5rXTTWNvUgOrdynKg": { + "name": "node-0" + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/"my_cluster"/$body.cluster_name/] +// TESTRESPONSE[s/"pQHNt5rXTTWNvUgOrdynKg"/\$node_name/] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.reload_secure_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.reload_secure_settings.json new file mode 100644 index 0000000000000..487beaba86520 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.reload_secure_settings.json @@ -0,0 +1,23 @@ +{ + "nodes.reload_secure_settings": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-nodes-reload-secure-settings.html", + "methods": ["POST"], + "url": { + "path": "/_nodes/reload_secure_settings", + "paths": ["/_nodes/reload_secure_settings", "/_nodes/{node_id}/reload_secure_settings"], + "parts": { + "node_id": { + "type": "list", + "description": "A comma-separated list of node IDs to span the reload/reinit call. Should stay empty because reloading usually involves all cluster nodes." + } + }, + "params": { + "timeout": { + "type" : "time", + "description" : "Explicit operation timeout" + } + } + }, + "body": null + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.reload_secure_settings/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.reload_secure_settings/10_basic.yml new file mode 100644 index 0000000000000..0a4cf0d64a001 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.reload_secure_settings/10_basic.yml @@ -0,0 +1,8 @@ +--- +"node_reload_secure_settings test": + + - do: + nodes.reload_secure_settings: {} + + - is_true: nodes + - is_true: cluster_name From 73cc92de1d0f430280b33ef0f24c313f7a382589 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 26 Aug 2018 09:36:17 -0400 Subject: [PATCH 03/18] Fix a mappings update test (#33146) This commit fixes a mappings update test. The test is broken in the sense that it passes, but for the wrong reason. The test here is testing that if we make a mapping update but do not commit that mapping update then the mapper service still maintains the previous document mapper. This was not the case long, long ago when a mapping update would update the in-memory state before the cluster state update was committed. This test was passing, but it was passing because the mapping update was never even updated. It was never even updated because it was encountering a null pointer exception. Of course the in-memory state is not going to be updated in that case, we are simply going to end up with a failed cluster state update. Fixing that leads to another issue which is that the mapping source does not even parse so again we would, of course, end up with the in-memory state not being modified. We fix these issues, assert that the result cluster state task completed successfully, and finally that the in-memory state was not updated since we never committed the resulting cluster state. --- .../metadata/MetaDataMappingServiceTests.java | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java index 428d9488dc2c6..a3fecc7eae0b9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java @@ -16,15 +16,18 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -37,6 +40,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; public class MetaDataMappingServiceTests extends ESSingleNodeTestCase { @@ -98,8 +102,18 @@ public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Ex final ClusterService clusterService = getInstanceFromNode(ClusterService.class); // TODO - it will be nice to get a random mapping generator final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); - request.source("{ \"properties\" { \"field\": { \"type\": \"text\" }}}"); - mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)); + request.indices(new Index[] {indexService.index()}); + request.source("{ \"properties\": { \"field\": { \"type\": \"text\" }}}"); + final ClusterStateTaskExecutor.ClusterTasksResult result = + mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)); + // the task completed successfully + assertThat(result.executionResults.size(), equalTo(1)); + assertTrue(result.executionResults.values().iterator().next().isSuccess()); + // the task really was a mapping update + assertThat( + indexService.mapperService().documentMapper("type").mappingSource(), + not(equalTo(result.resultingState.metaData().index("test").mapping("type").source()))); + // since we never committed the cluster state update, the in-memory state is unchanged assertThat(indexService.mapperService().documentMapper("type").mappingSource(), equalTo(currentMapping)); } @@ -120,4 +134,5 @@ public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception assertSame(result, result2); } + } From 77295cfafc7bcf8bb1cf478aa494bbe1d82259ee Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Mon, 27 Aug 2018 08:44:06 +0300 Subject: [PATCH 04/18] Apply publishing to genreate pom (#33094) --- x-pack/plugin/security/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 699d0dd614edb..4f670d29b7001 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -1,6 +1,7 @@ evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' +apply plugin: 'nebula.maven-scm' esplugin { name 'x-pack-security' description 'Elasticsearch Expanded Pack Plugin - Security' From 5a8427a6971212d5c2b799e63894eda99cf7e98c Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Wed, 22 Aug 2018 09:05:22 +0300 Subject: [PATCH 05/18] Run forbidden api checks with runtimeJavaVersion (#32947) Run forbidden APIs checks with runtime hava version --- .../gradle/precommit/PrecommitTasks.groovy | 109 +++++-------- ...ExportElasticsearchBuildResourcesTask.java | 3 +- .../precommit/ForbiddenApisCliTask.java | 154 ++++++++++++++++++ client/rest-high-level/build.gradle | 6 +- client/rest/build.gradle | 13 +- client/sniffer/build.gradle | 7 +- client/test/build.gradle | 8 +- client/transport/build.gradle | 6 +- .../tools/java-version-checker/build.gradle | 6 +- distribution/tools/launchers/build.gradle | 12 +- libs/cli/build.gradle | 5 +- libs/core/build.gradle | 4 +- libs/grok/build.gradle | 4 +- libs/secure-sm/build.gradle | 5 +- libs/x-content/build.gradle | 4 +- plugins/analysis-icu/build.gradle | 4 +- qa/vagrant/build.gradle | 6 +- test/framework/build.gradle | 7 +- test/logger-usage/build.gradle | 4 +- .../ml/log-structure-finder/build.gradle | 4 +- x-pack/plugin/security/build.gradle | 3 +- x-pack/plugin/sql/jdbc/build.gradle | 2 +- x-pack/plugin/sql/sql-action/build.gradle | 5 +- x-pack/plugin/sql/sql-cli/build.gradle | 8 +- x-pack/plugin/sql/sql-client/build.gradle | 2 +- x-pack/plugin/sql/sql-proto/build.gradle | 5 +- x-pack/qa/sql/build.gradle | 4 +- x-pack/transport-client/build.gradle | 5 +- 28 files changed, 239 insertions(+), 166 deletions(-) create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index a0c6fef4fe1f6..b63b1f40d8049 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -18,19 +18,12 @@ */ package org.elasticsearch.gradle.precommit -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis -import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin -import org.gradle.api.JavaVersion import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask -import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task -import org.gradle.api.file.FileCollection +import org.gradle.api.artifacts.Configuration import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.plugins.quality.Checkstyle -import org.gradle.api.tasks.JavaExec -import org.gradle.api.tasks.StopExecutionException - /** * Validation tasks which should be run before committing. These run before tests. */ @@ -39,8 +32,8 @@ class PrecommitTasks { /** Adds a precommit task, which depends on non-test verification tasks. */ public static Task create(Project project, boolean includeDependencyLicenses) { List precommitTasks = [ - configureForbiddenApis(project), configureCheckstyle(project), + configureForbiddenApisCli(project), configureNamingConventions(project), project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), project.tasks.create('licenseHeaders', LicenseHeadersTask.class), @@ -49,9 +42,6 @@ class PrecommitTasks { project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class) ] - // Configure it but don't add it as a dependency yet - configureForbiddenApisCli(project) - // tasks with just tests don't need dependency licenses, so this flag makes adding // the task optional if (includeDependencyLicenses) { @@ -85,77 +75,60 @@ class PrecommitTasks { return project.tasks.create(precommitOptions) } - private static Task configureForbiddenApis(Project project) { - project.pluginManager.apply(ForbiddenApisPlugin.class) - project.forbiddenApis { - failOnUnsupportedJava = false - bundledSignatures = ['jdk-unsafe', 'jdk-deprecated', 'jdk-non-portable', 'jdk-system-out'] - signaturesURLs = [getClass().getResource('/forbidden/jdk-signatures.txt'), - getClass().getResource('/forbidden/es-all-signatures.txt')] - suppressAnnotations = ['**.SuppressForbidden'] - } - project.tasks.withType(CheckForbiddenApis) { - // we do not use the += operator to add signatures, as conventionMappings of Gradle do not work when it's configured using withType: - if (name.endsWith('Test')) { - signaturesURLs = project.forbiddenApis.signaturesURLs + - [ getClass().getResource('/forbidden/es-test-signatures.txt'), getClass().getResource('/forbidden/http-signatures.txt') ] - } else { - signaturesURLs = project.forbiddenApis.signaturesURLs + - [ getClass().getResource('/forbidden/es-server-signatures.txt') ] - } - // forbidden apis doesn't support Java 11, so stop at 10 - String targetMajorVersion = (project.compilerJavaVersion.compareTo(JavaVersion.VERSION_1_10) > 0 ? - JavaVersion.VERSION_1_10 : - project.compilerJavaVersion).getMajorVersion() - targetCompatibility = Integer.parseInt(targetMajorVersion) >= 9 ?targetMajorVersion : "1.${targetMajorVersion}" - } - Task forbiddenApis = project.tasks.findByName('forbiddenApis') - forbiddenApis.group = "" // clear group, so this does not show up under verification tasks - - return forbiddenApis - } - private static Task configureForbiddenApisCli(Project project) { - project.configurations.create("forbiddenApisCliJar") + Configuration forbiddenApisConfiguration = project.configurations.create("forbiddenApisCliJar") project.dependencies { - forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.5' + forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.5') } - Task forbiddenApisCli = project.tasks.create('forbiddenApisCli') + Task forbiddenApisCli = project.tasks.create('forbiddenApis') project.sourceSets.forEach { sourceSet -> forbiddenApisCli.dependsOn( - project.tasks.create(sourceSet.getTaskName('forbiddenApisCli', null), JavaExec) { + project.tasks.create(sourceSet.getTaskName('forbiddenApis', null), ForbiddenApisCliTask) { ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') dependsOn(buildResources) - classpath = project.files( - project.configurations.forbiddenApisCliJar, + execAction = { spec -> + spec.classpath = project.files( + project.configurations.forbiddenApisCliJar, + sourceSet.compileClasspath, + sourceSet.runtimeClasspath + ) + spec.executable = "${project.runtimeJavaHome}/bin/java" + } + inputs.files( + forbiddenApisConfiguration, sourceSet.compileClasspath, sourceSet.runtimeClasspath ) - main = 'de.thetaphi.forbiddenapis.cli.CliMain' - executable = "${project.runtimeJavaHome}/bin/java" - args "-b", 'jdk-unsafe-1.8' - args "-b", 'jdk-deprecated-1.8' - args "-b", 'jdk-non-portable' - args "-b", 'jdk-system-out' - args "-f", buildResources.copy("forbidden/jdk-signatures.txt") - args "-f", buildResources.copy("forbidden/es-all-signatures.txt") - args "--suppressannotation", '**.SuppressForbidden' + + targetCompatibility = project.compilerJavaVersion + bundledSignatures = [ + "jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out" + ] + signaturesFiles = project.files( + buildResources.copy("forbidden/jdk-signatures.txt"), + buildResources.copy("forbidden/es-all-signatures.txt") + ) + suppressAnnotations = ['**.SuppressForbidden'] if (sourceSet.name == 'test') { - args "-f", buildResources.copy("forbidden/es-test-signatures.txt") - args "-f", buildResources.copy("forbidden/http-signatures.txt") + signaturesFiles += project.files( + buildResources.copy("forbidden/es-test-signatures.txt"), + buildResources.copy("forbidden/http-signatures.txt") + ) } else { - args "-f", buildResources.copy("forbidden/es-server-signatures.txt") + signaturesFiles += project.files(buildResources.copy("forbidden/es-server-signatures.txt")) } dependsOn sourceSet.classesTaskName - doFirst { - // Forbidden APIs expects only existing dirs, and requires at least one - FileCollection existingOutputs = sourceSet.output.classesDirs - .filter { it.exists() } - if (existingOutputs.isEmpty()) { - throw new StopExecutionException("${sourceSet.name} has no outputs") - } - existingOutputs.forEach { args "-d", it } + classesDirs = sourceSet.output.classesDirs + ext.replaceSignatureFiles = { String... names -> + signaturesFiles = project.files( + names.collect { buildResources.copy("forbidden/${it}.txt") } + ) + } + ext.addSignatureFiles = { String... names -> + signaturesFiles += project.files( + names.collect { buildResources.copy("forbidden/${it}.txt") } + ) } } ) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java index 03c18f54e67ef..4af104093a5cb 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java @@ -35,6 +35,7 @@ import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.Collections; import java.util.HashSet; import java.util.Set; @@ -105,7 +106,7 @@ public void doExport() { if (is == null) { throw new GradleException("Can't export `" + resourcePath + "` from build-tools: not found"); } - Files.copy(is, destination); + Files.copy(is, destination, StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination, e); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java new file mode 100644 index 0000000000000..e33f167096414 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.precommit; + +import de.thetaphi.forbiddenapis.cli.CliMain; +import org.gradle.api.Action; +import org.gradle.api.DefaultTask; +import org.gradle.api.JavaVersion; +import org.gradle.api.file.FileCollection; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.TaskAction; +import org.gradle.process.JavaExecSpec; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +public class ForbiddenApisCliTask extends DefaultTask { + + private FileCollection signaturesFiles; + private List signatures = new ArrayList<>(); + private Set bundledSignatures = new LinkedHashSet<>(); + private Set suppressAnnotations = new LinkedHashSet<>(); + private JavaVersion targetCompatibility; + private FileCollection classesDirs; + private Action execAction; + + public JavaVersion getTargetCompatibility() { + return targetCompatibility; + } + + public void setTargetCompatibility(JavaVersion targetCompatibility) { + this.targetCompatibility = targetCompatibility; + } + + public Action getExecAction() { + return execAction; + } + + public void setExecAction(Action execAction) { + this.execAction = execAction; + } + + @OutputFile + public File getMarkerFile() { + return new File( + new File(getProject().getBuildDir(), "precommit"), + getName() + ); + } + + @InputFiles + @SkipWhenEmpty + public FileCollection getClassesDirs() { + return classesDirs.filter(File::exists); + } + + public void setClassesDirs(FileCollection classesDirs) { + this.classesDirs = classesDirs; + } + + @InputFiles + public FileCollection getSignaturesFiles() { + return signaturesFiles; + } + + public void setSignaturesFiles(FileCollection signaturesFiles) { + this.signaturesFiles = signaturesFiles; + } + + @Input + public List getSignatures() { + return signatures; + } + + public void setSignatures(List signatures) { + this.signatures = signatures; + } + + @Input + public Set getBundledSignatures() { + return bundledSignatures; + } + + public void setBundledSignatures(Set bundledSignatures) { + this.bundledSignatures = bundledSignatures; + } + + @Input + public Set getSuppressAnnotations() { + return suppressAnnotations; + } + + public void setSuppressAnnotations(Set suppressAnnotations) { + this.suppressAnnotations = suppressAnnotations; + } + + @TaskAction + public void runForbiddenApisAndWriteMarker() throws IOException { + getProject().javaexec((JavaExecSpec spec) -> { + execAction.execute(spec); + spec.setMain(CliMain.class.getName()); + // build the command line + getSignaturesFiles().forEach(file -> spec.args("-f", file.getAbsolutePath())); + getSuppressAnnotations().forEach(annotation -> spec.args("--suppressannotation", annotation)); + getBundledSignatures().forEach(bundled -> { + // there's no option for target compatibility so we have to interpret it + final String prefix; + if (bundled.equals("jdk-system-out") || + bundled.equals("jdk-reflection") || + bundled.equals("jdk-non-portable")) { + prefix = ""; + } else { + prefix = "-" + ( + getTargetCompatibility().compareTo(JavaVersion.VERSION_1_9) >= 0 ? + getTargetCompatibility().getMajorVersion() : + "1." + getTargetCompatibility().getMajorVersion()) + ; + } + spec.args("-b", bundled + prefix); + } + ); + getClassesDirs().forEach(dir -> + spec.args("-d", dir) + ); + }); + Files.write(getMarkerFile().toPath(), Collections.emptyList()); + } + +} diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 48169faac2fcd..9acfc630f94f5 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -16,8 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.test.RestIntegTestTask import org.gradle.api.internal.provider.Providers @@ -75,8 +73,8 @@ dependencyLicenses { forbiddenApisMain { // core does not depend on the httpclient for compile so we add the signatures here. We don't add them for test as they are already // specified - signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')] - signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()] + addSignatureFiles 'http-signatures' + signaturesFiles += files('src/main/resources/forbidden/rest-high-level-signatures.txt') } integTestCluster { diff --git a/client/rest/build.gradle b/client/rest/build.gradle index fc2ab0bc4c05d..273836a31f0cb 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -16,9 +18,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -53,10 +52,9 @@ dependencies { testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" } -forbiddenApisMain { +tasks.withType(ForbiddenApisCliTask) { //client does not depend on server, so only jdk and http signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/http-signatures.txt')] + replaceSignatureFiles ('jdk-signatures', 'http-signatures') } forbiddenPatterns { @@ -67,9 +65,6 @@ forbiddenApisTest { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage bundledSignatures -= 'jdk-non-portable' bundledSignatures += 'jdk-internal' - //client does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/http-signatures.txt')] } // JarHell is part of es server, which we don't want to pull in diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 41146e0b7ec08..6ba69c5713c57 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -55,7 +52,7 @@ dependencies { forbiddenApisMain { //client does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } forbiddenApisTest { @@ -63,7 +60,7 @@ forbiddenApisTest { bundledSignatures -= 'jdk-non-portable' bundledSignatures += 'jdk-internal' //client does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } dependencyLicenses { diff --git a/client/test/build.gradle b/client/test/build.gradle index 59c45186fe76b..408653f064352 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -16,10 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.JavaVersion - apply plugin: 'elasticsearch.build' targetCompatibility = JavaVersion.VERSION_1_7 @@ -34,7 +30,7 @@ dependencies { forbiddenApisMain { //client does not depend on core, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } forbiddenApisTest { @@ -42,7 +38,7 @@ forbiddenApisTest { bundledSignatures -= 'jdk-non-portable' bundledSignatures += 'jdk-internal' //client does not depend on core, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } // JarHell is part of es server, which we don't want to pull in diff --git a/client/transport/build.gradle b/client/transport/build.gradle index 944a038edd97c..269a37105fb19 100644 --- a/client/transport/build.gradle +++ b/client/transport/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -47,8 +44,7 @@ dependencyLicenses { forbiddenApisTest { // we don't use the core test-framework, no lucene classes present so we don't want the es-test-signatures to // be pulled in - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-all-signatures.txt')] + replaceSignatureFiles 'jdk-signatures', 'es-all-signatures' } namingConventions { diff --git a/distribution/tools/java-version-checker/build.gradle b/distribution/tools/java-version-checker/build.gradle index ad9b56fec0502..6d18b79d4bddf 100644 --- a/distribution/tools/java-version-checker/build.gradle +++ b/distribution/tools/java-version-checker/build.gradle @@ -1,11 +1,11 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' targetCompatibility = JavaVersion.VERSION_1_7 // java_version_checker do not depend on core so only JDK signatures should be checked -forbiddenApisMain.signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] +forbiddenApisMain { + replaceSignatureFiles 'jdk-signatures' +} test.enabled = false namingConventions.enabled = false diff --git a/distribution/tools/launchers/build.gradle b/distribution/tools/launchers/build.gradle index a774691b2eb17..ca1aa6bcac9d6 100644 --- a/distribution/tools/launchers/build.gradle +++ b/distribution/tools/launchers/build.gradle @@ -17,8 +17,9 @@ * under the License. */ -import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.JavaVersion + + +import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask apply plugin: 'elasticsearch.build' @@ -31,10 +32,9 @@ dependencies { archivesBaseName = 'elasticsearch-launchers' -// java_version_checker do not depend on core so only JDK signatures should be checked -List jdkSignatures = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] -forbiddenApisMain.signaturesURLs = jdkSignatures -forbiddenApisTest.signaturesURLs = jdkSignatures +tasks.withType(ForbiddenApisCliTask) { + replaceSignatureFiles 'jdk-signatures' +} namingConventions { testClass = 'org.elasticsearch.tools.launchers.LaunchersTestCase' diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index 91fbca19eca99..1182fccffa6e4 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.optional-base' apply plugin: 'nebula.maven-base-publish' @@ -44,5 +41,5 @@ test.enabled = false jarHell.enabled = false forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/core/build.gradle b/libs/core/build.gradle index 2017c2a418ac4..cc5c1e20fc162 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -91,7 +89,7 @@ dependencies { forbiddenApisMain { // :libs:core does not depend on server // TODO: Need to decide how we want to handle for forbidden signatures with the changes to server - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle index 61437be6aff13..37b494624eddb 100644 --- a/libs/grok/build.gradle +++ b/libs/grok/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -34,7 +32,7 @@ dependencies { } forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/libs/secure-sm/build.gradle b/libs/secure-sm/build.gradle index 93fdfd01c8f0c..3baf3513b1206 100644 --- a/libs/secure-sm/build.gradle +++ b/libs/secure-sm/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -47,7 +44,7 @@ dependencies { } forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/libs/x-content/build.gradle b/libs/x-content/build.gradle index c8b37108ff93c..0ec4e0d6ad312 100644 --- a/libs/x-content/build.gradle +++ b/libs/x-content/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -57,7 +55,7 @@ dependencies { forbiddenApisMain { // x-content does not depend on server // TODO: Need to decide how we want to handle for forbidden signatures with the changes to core - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } if (isEclipse) { diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 1883e3bf1b9d6..676fd44813151 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -22,7 +24,7 @@ esplugin { classname 'org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin' } -forbiddenApis { +tasks.withType(ForbiddenApisCliTask) { signatures += [ "com.ibm.icu.text.Collator#getInstance() @ Don't use default locale, use getInstance(ULocale) instead" ] diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 4a0c91469629d..4c3b48cbac946 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -69,9 +67,7 @@ esvagrant { } forbiddenApisMain { - signaturesURLs = [ - PrecommitTasks.getResource('/forbidden/jdk-signatures.txt') - ] + replaceSignatureFiles 'jdk-signatures' } // we don't have additional tests for the tests themselves diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 88ff5b87acbb2..4eb514d8f6d35 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks; - dependencies { compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" compile "org.elasticsearch.client:elasticsearch-rest-client-sniffer:${version}" @@ -42,9 +39,7 @@ compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-all-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')] + replaceSignatureFiles 'jdk-signatures', 'es-all-signatures', 'es-test-signatures' } // TODO: should we have licenses for our test deps? diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index c16dab6a625c8..0f02283e53738 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -29,7 +27,7 @@ loggerUsageCheck.enabled = false forbiddenApisMain.enabled = true // disabled by parent project forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] // does not depend on core, only jdk signatures + replaceSignatureFiles 'jdk-signatures' // does not depend on core, only jdk signatures } jarHell.enabled = true // disabled by parent project diff --git a/x-pack/plugin/ml/log-structure-finder/build.gradle b/x-pack/plugin/ml/log-structure-finder/build.gradle index 9048a1c46860c..f5dff6dc8464d 100644 --- a/x-pack/plugin/ml/log-structure-finder/build.gradle +++ b/x-pack/plugin/ml/log-structure-finder/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' archivesBaseName = 'x-pack-log-structure-finder' @@ -31,6 +29,6 @@ artifacts { forbiddenApisMain { // log-structure-finder does not depend on server, so cannot forbid server methods - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 4f670d29b7001..a2d6ed8a49a55 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -158,8 +158,7 @@ forbiddenPatterns { } forbiddenApisMain { - signaturesURLs += file('forbidden/ldap-signatures.txt').toURI().toURL() - signaturesURLs += file('forbidden/xml-signatures.txt').toURI().toURL() + signaturesFiles += files('forbidden/ldap-signatures.txt', 'forbidden/xml-signatures.txt') } // classes are missing, e.g. com.ibm.icu.lang.UCharacter diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index a0d9b24c50729..1a7d6115e1556 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -8,7 +8,7 @@ archivesBaseName = "x-pack-sql-jdbc" forbiddenApisMain { // does not depend on core, so only jdk and http signatures should be checked - signaturesURLs = [this.class.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } dependencies { diff --git a/x-pack/plugin/sql/sql-action/build.gradle b/x-pack/plugin/sql/sql-action/build.gradle index bf79fd824ef8d..345318d20b803 100644 --- a/x-pack/plugin/sql/sql-action/build.gradle +++ b/x-pack/plugin/sql/sql-action/build.gradle @@ -2,9 +2,6 @@ /* * This project contains transport-level requests and responses that are shared between x-pack plugin and qa tests */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' description = 'Request and response objects shared by the cli, jdbc ' + @@ -34,7 +31,7 @@ dependencies { forbiddenApisMain { //sql does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } dependencyLicenses { diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index b90b07abad3d1..0b2559c6a84aa 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask /* * This project is named sql-cli because it is in the "org.elasticsearch.plugin" @@ -74,11 +75,8 @@ artifacts { } -forbiddenApisMain { - signaturesURLs += file('src/forbidden/cli-signatures.txt').toURI().toURL() -} -forbiddenApisTest { - signaturesURLs += file('src/forbidden/cli-signatures.txt').toURI().toURL() +tasks.withType(ForbiddenApisCliTask) { + signaturesFiles += files('src/forbidden/cli-signatures.txt') } thirdPartyAudit.excludes = [ diff --git a/x-pack/plugin/sql/sql-client/build.gradle b/x-pack/plugin/sql/sql-client/build.gradle index fbc411e44596d..c4ee030d4568f 100644 --- a/x-pack/plugin/sql/sql-client/build.gradle +++ b/x-pack/plugin/sql/sql-client/build.gradle @@ -26,7 +26,7 @@ dependencyLicenses { forbiddenApisMain { // does not depend on core, so only jdk and http signatures should be checked - signaturesURLs = [this.class.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } forbiddenApisTest { diff --git a/x-pack/plugin/sql/sql-proto/build.gradle b/x-pack/plugin/sql/sql-proto/build.gradle index 7f26176e3c7a7..7d28336bfc51f 100644 --- a/x-pack/plugin/sql/sql-proto/build.gradle +++ b/x-pack/plugin/sql/sql-proto/build.gradle @@ -2,9 +2,6 @@ /* * This project contains XContent protocol classes shared between server and http client */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' description = 'Request and response objects shared by the cli, jdbc ' + @@ -25,7 +22,7 @@ dependencies { forbiddenApisMain { //sql does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] + replaceSignatureFiles 'jdk-signatures' } dependencyLicenses { diff --git a/x-pack/qa/sql/build.gradle b/x-pack/qa/sql/build.gradle index 17a1d5acdc99f..baaf0451e51f2 100644 --- a/x-pack/qa/sql/build.gradle +++ b/x-pack/qa/sql/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.test.RunTask description = 'Integration tests for SQL' @@ -29,8 +28,7 @@ dependenciesInfo.enabled = false // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/es-all-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')] + replaceSignatureFiles 'es-all-signatures', 'es-test-signatures' } thirdPartyAudit.excludes = [ diff --git a/x-pack/transport-client/build.gradle b/x-pack/transport-client/build.gradle index 2e350ef98ff59..a96f4146fbf67 100644 --- a/x-pack/transport-client/build.gradle +++ b/x-pack/transport-client/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -22,8 +20,7 @@ dependencyLicenses.enabled = false forbiddenApisTest { // we don't use the core test-framework, no lucene classes present so we don't want the es-test-signatures to // be pulled in - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-all-signatures.txt')] + replaceSignatureFiles 'jdk-signatures', 'es-all-signatures' } namingConventions { From dcdbb92748e79850e6537d5471ac5c4a6536c3dc Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Mon, 27 Aug 2018 08:47:42 +0300 Subject: [PATCH 06/18] Fix forbiddenapis on java 11 (#33116) Cap forbiddenapis to java version 10 --- .../gradle/precommit/ForbiddenApisCliTask.java | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java index e33f167096414..21a0597b38afc 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ForbiddenApisCliTask.java @@ -23,6 +23,8 @@ import org.gradle.api.DefaultTask; import org.gradle.api.JavaVersion; import org.gradle.api.file.FileCollection; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.OutputFile; @@ -41,6 +43,7 @@ public class ForbiddenApisCliTask extends DefaultTask { + private final Logger logger = Logging.getLogger(ForbiddenApisCliTask.class); private FileCollection signaturesFiles; private List signatures = new ArrayList<>(); private Set bundledSignatures = new LinkedHashSet<>(); @@ -49,12 +52,21 @@ public class ForbiddenApisCliTask extends DefaultTask { private FileCollection classesDirs; private Action execAction; + @Input public JavaVersion getTargetCompatibility() { return targetCompatibility; } public void setTargetCompatibility(JavaVersion targetCompatibility) { - this.targetCompatibility = targetCompatibility; + if (targetCompatibility.compareTo(JavaVersion.VERSION_1_10) > 0) { + logger.warn( + "Target compatibility is set to {} but forbiddenapis only supports up to 10. Will cap at 10.", + targetCompatibility + ); + this.targetCompatibility = JavaVersion.VERSION_1_10; + } else { + this.targetCompatibility = targetCompatibility; + } } public Action getExecAction() { From ec9563e5fa72d267f88ac5c9032b2ec896073153 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Tue, 22 May 2018 07:21:16 +0300 Subject: [PATCH 07/18] Accept Gradle build scan agreement (#30645) * Accept Gradle build scan argreement Scans will be produced only when passing `--scan` * Condition TOS acceptance with property * Switch to boolean flags --- build.gradle | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/build.gradle b/build.gradle index ee3afa9388982..f8e2f312f18d5 100644 --- a/build.gradle +++ b/build.gradle @@ -33,6 +33,16 @@ import java.nio.file.Files import java.nio.file.Path import java.security.MessageDigest +plugins { + id 'com.gradle.build-scan' version '1.13.2' +} +if (properties.get("org.elasticsearch.acceptScanTOS", "false") == "true") { + buildScan { + termsOfServiceUrl = 'https://gradle.com/terms-of-service' + termsOfServiceAgree = 'yes' + } +} + // common maven publishing configuration subprojects { group = 'org.elasticsearch' From b0411f24038da4508a9bd635d0fdc6066b964bd8 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 27 Aug 2018 09:18:26 +0200 Subject: [PATCH 08/18] [Rollup] Move toBuilders() methods out of rollup config objects (#32585) --- .../rollup/job/DateHistogramGroupConfig.java | 18 --- .../core/rollup/job/HistogramGroupConfig.java | 24 --- .../xpack/core/rollup/job/MetricConfig.java | 59 +------ .../core/rollup/job/TermsGroupConfig.java | 19 --- .../job/TermsGroupConfigSerializingTests.java | 61 -------- .../xpack/rollup/job/RollupIndexer.java | 144 ++++++++++++++++-- .../rollup/action/job/RollupIndexTests.java | 83 ++++++++++ .../xpack/rollup/job/IndexerUtilsTests.java | 25 ++- 8 files changed, 236 insertions(+), 197 deletions(-) create mode 100644 x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/job/RollupIndexTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java index a9cc95bb07c9d..166322b93722c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java @@ -20,16 +20,11 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.xpack.core.rollup.RollupField; import org.joda.time.DateTimeZone; import java.io.IOException; -import java.util.Collections; -import java.util.List; import java.util.Map; import java.util.Objects; @@ -182,19 +177,6 @@ public Rounding createRounding() { return createRounding(interval.toString(), timeZone); } - /** - * This returns a set of aggregation builders which represent the configured - * set of date histograms. Used by the rollup indexer to iterate over historical data - */ - public List> toBuilders() { - DateHistogramValuesSourceBuilder vsBuilder = - new DateHistogramValuesSourceBuilder(RollupField.formatIndexerAggName(field, DateHistogramAggregationBuilder.NAME)); - vsBuilder.dateHistogramInterval(interval); - vsBuilder.field(field); - vsBuilder.timeZone(toDateTimeZone(timeZone)); - return Collections.singletonList(vsBuilder); - } - public void validateMappings(Map> fieldCapsResponse, ActionRequestValidationException validationException) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java index d1bc50566faff..a22d022ee2dbb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java @@ -16,18 +16,13 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.xpack.core.rollup.RollupField; import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -85,25 +80,6 @@ public String[] getFields() { return fields; } - /** - * This returns a set of aggregation builders which represent the configured - * set of histograms. Used by the rollup indexer to iterate over historical data - */ - public List> toBuilders() { - if (fields.length == 0) { - return Collections.emptyList(); - } - - return Arrays.stream(fields).map(f -> { - HistogramValuesSourceBuilder vsBuilder - = new HistogramValuesSourceBuilder(RollupField.formatIndexerAggName(f, HistogramAggregationBuilder.NAME)); - vsBuilder.interval(interval); - vsBuilder.field(f); - vsBuilder.missingBucket(true); - return vsBuilder; - }).collect(Collectors.toList()); - } - public void validateMappings(Map> fieldCapsResponse, ActionRequestValidationException validationException) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java index b4e022f55004c..3a267e4cfa47c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java @@ -16,18 +16,9 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; -import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.xpack.core.rollup.RollupField; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -53,11 +44,11 @@ public class MetricConfig implements Writeable, ToXContentObject { // TODO: replace these with an enum - private static final ParseField MIN = new ParseField("min"); - private static final ParseField MAX = new ParseField("max"); - private static final ParseField SUM = new ParseField("sum"); - private static final ParseField AVG = new ParseField("avg"); - private static final ParseField VALUE_COUNT = new ParseField("value_count"); + public static final ParseField MIN = new ParseField("min"); + public static final ParseField MAX = new ParseField("max"); + public static final ParseField SUM = new ParseField("sum"); + public static final ParseField AVG = new ParseField("avg"); + public static final ParseField VALUE_COUNT = new ParseField("value_count"); static final String NAME = "metrics"; private static final String FIELD = "field"; @@ -111,46 +102,6 @@ public List getMetrics() { return metrics; } - /** - * This returns a set of aggregation builders which represent the configured - * set of metrics. Used by the rollup indexer to iterate over historical data - */ - public List toBuilders() { - if (metrics.size() == 0) { - return Collections.emptyList(); - } - - List aggs = new ArrayList<>(metrics.size()); - for (String metric : metrics) { - ValuesSourceAggregationBuilder.LeafOnly newBuilder; - if (metric.equals(MIN.getPreferredName())) { - newBuilder = new MinAggregationBuilder(RollupField.formatFieldName(field, MinAggregationBuilder.NAME, RollupField.VALUE)); - } else if (metric.equals(MAX.getPreferredName())) { - newBuilder = new MaxAggregationBuilder(RollupField.formatFieldName(field, MaxAggregationBuilder.NAME, RollupField.VALUE)); - } else if (metric.equals(AVG.getPreferredName())) { - // Avgs are sum + count - newBuilder = new SumAggregationBuilder(RollupField.formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.VALUE)); - ValuesSourceAggregationBuilder.LeafOnly countBuilder - = new ValueCountAggregationBuilder( - RollupField.formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.COUNT_FIELD), ValueType.NUMERIC); - countBuilder.field(field); - aggs.add(countBuilder); - } else if (metric.equals(SUM.getPreferredName())) { - newBuilder = new SumAggregationBuilder(RollupField.formatFieldName(field, SumAggregationBuilder.NAME, RollupField.VALUE)); - } else if (metric.equals(VALUE_COUNT.getPreferredName())) { - // TODO allow non-numeric value_counts. - // Hardcoding this is fine for now since the job validation guarantees that all metric fields are numerics - newBuilder = new ValueCountAggregationBuilder( - RollupField.formatFieldName(field, ValueCountAggregationBuilder.NAME, RollupField.VALUE), ValueType.NUMERIC); - } else { - throw new IllegalArgumentException("Unsupported metric type [" + metric + "]"); - } - newBuilder.field(field); - aggs.add(newBuilder); - } - return aggs; - } - public void validateMappings(Map> fieldCapsResponse, ActionRequestValidationException validationException) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java index abd6825e9f7be..fbc039843258e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java @@ -18,16 +18,11 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.xpack.core.rollup.RollupField; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -79,20 +74,6 @@ public String[] getFields() { return fields; } - /** - * This returns a set of aggregation builders which represent the configured - * set of date histograms. Used by the rollup indexer to iterate over historical data - */ - public List> toBuilders() { - return Arrays.stream(fields).map(f -> { - TermsValuesSourceBuilder vsBuilder - = new TermsValuesSourceBuilder(RollupField.formatIndexerAggName(f, TermsAggregationBuilder.NAME)); - vsBuilder.field(f); - vsBuilder.missingBucket(true); - return vsBuilder; - }).collect(Collectors.toList()); - } - public void validateMappings(Map> fieldCapsResponse, ActionRequestValidationException validationException) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfigSerializingTests.java index ccdd616df7b51..b0e33579eb353 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfigSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfigSerializingTests.java @@ -9,19 +9,16 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomTermsGroupConfig; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCase { @@ -77,62 +74,4 @@ public void testValidateFieldWrongType() { assertThat(e.validationErrors().get(0), equalTo("The field referenced by a terms group must be a [numeric] or " + "[keyword/text] type, but found [geo_point] for field [my_field]")); } - - public void testValidateFieldMatchingNotAggregatable() { - ActionRequestValidationException e = new ActionRequestValidationException(); - Map> responseMap = new HashMap<>(); - - // Have to mock fieldcaps because the ctor's aren't public... - FieldCapabilities fieldCaps = mock(FieldCapabilities.class); - when(fieldCaps.isAggregatable()).thenReturn(false); - responseMap.put("my_field", Collections.singletonMap(getRandomType(), fieldCaps)); - - TermsGroupConfig config = new TermsGroupConfig("my_field"); - config.validateMappings(responseMap, e); - assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not.")); - } - - public void testValidateMatchingField() { - ActionRequestValidationException e = new ActionRequestValidationException(); - Map> responseMap = new HashMap<>(); - String type = getRandomType(); - - // Have to mock fieldcaps because the ctor's aren't public... - FieldCapabilities fieldCaps = mock(FieldCapabilities.class); - when(fieldCaps.isAggregatable()).thenReturn(true); - responseMap.put("my_field", Collections.singletonMap(type, fieldCaps)); - - TermsGroupConfig config = new TermsGroupConfig("my_field"); - config.validateMappings(responseMap, e); - if (e.validationErrors().size() != 0) { - fail(e.getMessage()); - } - - List> builders = config.toBuilders(); - assertThat(builders.size(), equalTo(1)); - } - - private String getRandomType() { - int n = randomIntBetween(0,8); - if (n == 0) { - return "keyword"; - } else if (n == 1) { - return "text"; - } else if (n == 2) { - return "long"; - } else if (n == 3) { - return "integer"; - } else if (n == 4) { - return "short"; - } else if (n == 5) { - return "float"; - } else if (n == 6) { - return "double"; - } else if (n == 7) { - return "scaled_float"; - } else if (n == 8) { - return "half_float"; - } - return "long"; - } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index d1db021361c8c..6abb7ffa56754 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -15,19 +15,35 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.IndexerState; +import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; +import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; +import org.joda.time.DateTimeZone; import java.util.ArrayList; import java.util.Arrays; @@ -38,6 +54,10 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import static java.util.Collections.singletonList; +import static java.util.Collections.unmodifiableList; +import static org.elasticsearch.xpack.core.rollup.RollupField.formatFieldName; + /** * An abstract class that builds a rollup index incrementally. A background job can be launched using {@link #maybeTriggerAsyncJob(long)}, * it will create the rollup index from the source index up to the last complete bucket that is allowed to be built (based on the current @@ -392,21 +412,12 @@ private SearchRequest buildSearchRequest() { */ private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig config) { final GroupConfig groupConfig = config.getGroupConfig(); - List> builders = new ArrayList<>(); - - // Add all the agg builders to our request in order: date_histo -> histo -> terms - if (groupConfig != null) { - builders.addAll(groupConfig.getDateHistogram().toBuilders()); - if (groupConfig.getHistogram() != null) { - builders.addAll(groupConfig.getHistogram().toBuilders()); - } - if (groupConfig.getTerms() != null) { - builders.addAll(groupConfig.getTerms().toBuilders()); - } - } + List> builders = createValueSourceBuilders(groupConfig); CompositeAggregationBuilder composite = new CompositeAggregationBuilder(AGGREGATION_NAME, builders); - config.getMetricsConfig().forEach(m -> m.toBuilders().forEach(composite::subAggregation)); + + List aggregations = createAggregationBuilders(config.getMetricsConfig()); + aggregations.forEach(composite::subAggregation); final Map metadata = createMetadata(groupConfig); if (metadata.isEmpty() == false) { @@ -456,5 +467,112 @@ static Map createMetadata(final GroupConfig groupConfig) { } return metadata; } + + public static List> createValueSourceBuilders(final GroupConfig groupConfig) { + final List> builders = new ArrayList<>(); + // Add all the agg builders to our request in order: date_histo -> histo -> terms + if (groupConfig != null) { + final DateHistogramGroupConfig dateHistogram = groupConfig.getDateHistogram(); + builders.addAll(createValueSourceBuilders(dateHistogram)); + + final HistogramGroupConfig histogram = groupConfig.getHistogram(); + builders.addAll(createValueSourceBuilders(histogram)); + + final TermsGroupConfig terms = groupConfig.getTerms(); + builders.addAll(createValueSourceBuilders(terms)); + } + return unmodifiableList(builders); + } + + public static List> createValueSourceBuilders(final DateHistogramGroupConfig dateHistogram) { + final String dateHistogramField = dateHistogram.getField(); + final String dateHistogramName = RollupField.formatIndexerAggName(dateHistogramField, DateHistogramAggregationBuilder.NAME); + final DateHistogramValuesSourceBuilder dateHistogramBuilder = new DateHistogramValuesSourceBuilder(dateHistogramName); + dateHistogramBuilder.dateHistogramInterval(dateHistogram.getInterval()); + dateHistogramBuilder.field(dateHistogramField); + dateHistogramBuilder.timeZone(toDateTimeZone(dateHistogram.getTimeZone())); + return singletonList(dateHistogramBuilder); + } + + public static List> createValueSourceBuilders(final HistogramGroupConfig histogram) { + final List> builders = new ArrayList<>(); + if (histogram != null) { + for (String field : histogram.getFields()) { + final String histogramName = RollupField.formatIndexerAggName(field, HistogramAggregationBuilder.NAME); + final HistogramValuesSourceBuilder histogramBuilder = new HistogramValuesSourceBuilder(histogramName); + histogramBuilder.interval(histogram.getInterval()); + histogramBuilder.field(field); + histogramBuilder.missingBucket(true); + builders.add(histogramBuilder); + } + } + return unmodifiableList(builders); + } + + public static List> createValueSourceBuilders(final TermsGroupConfig terms) { + final List> builders = new ArrayList<>(); + if (terms != null) { + for (String field : terms.getFields()) { + final String termsName = RollupField.formatIndexerAggName(field, TermsAggregationBuilder.NAME); + final TermsValuesSourceBuilder termsBuilder = new TermsValuesSourceBuilder(termsName); + termsBuilder.field(field); + termsBuilder.missingBucket(true); + builders.add(termsBuilder); + } + } + return unmodifiableList(builders); + } + + /** + * This returns a set of aggregation builders which represent the configured + * set of metrics. Used to iterate over historical data. + */ + static List createAggregationBuilders(final List metricsConfigs) { + final List builders = new ArrayList<>(); + if (metricsConfigs != null) { + for (MetricConfig metricConfig : metricsConfigs) { + final List metrics = metricConfig.getMetrics(); + if (metrics.isEmpty() == false) { + final String field = metricConfig.getField(); + for (String metric : metrics) { + ValuesSourceAggregationBuilder.LeafOnly newBuilder; + if (metric.equals(MetricConfig.MIN.getPreferredName())) { + newBuilder = new MinAggregationBuilder(formatFieldName(field, MinAggregationBuilder.NAME, RollupField.VALUE)); + } else if (metric.equals(MetricConfig.MAX.getPreferredName())) { + newBuilder = new MaxAggregationBuilder(formatFieldName(field, MaxAggregationBuilder.NAME, RollupField.VALUE)); + } else if (metric.equals(MetricConfig.AVG.getPreferredName())) { + // Avgs are sum + count + newBuilder = new SumAggregationBuilder(formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.VALUE)); + ValuesSourceAggregationBuilder.LeafOnly countBuilder + = new ValueCountAggregationBuilder( + formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.COUNT_FIELD), ValueType.NUMERIC); + countBuilder.field(field); + builders.add(countBuilder); + } else if (metric.equals(MetricConfig.SUM.getPreferredName())) { + newBuilder = new SumAggregationBuilder(formatFieldName(field, SumAggregationBuilder.NAME, RollupField.VALUE)); + } else if (metric.equals(MetricConfig.VALUE_COUNT.getPreferredName())) { + // TODO allow non-numeric value_counts. + // Hardcoding this is fine for now since the job validation guarantees that all metric fields are numerics + newBuilder = new ValueCountAggregationBuilder( + formatFieldName(field, ValueCountAggregationBuilder.NAME, RollupField.VALUE), ValueType.NUMERIC); + } else { + throw new IllegalArgumentException("Unsupported metric type [" + metric + "]"); + } + newBuilder.field(field); + builders.add(newBuilder); + } + } + } + } + return unmodifiableList(builders); + } + + private static DateTimeZone toDateTimeZone(final String timezone) { + try { + return DateTimeZone.forOffsetHours(Integer.parseInt(timezone)); + } catch (NumberFormatException e) { + return DateTimeZone.forID(timezone); + } + } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/job/RollupIndexTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/job/RollupIndexTests.java new file mode 100644 index 0000000000000..c0ba74e762de6 --- /dev/null +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/job/RollupIndexTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.rollup.action.job; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; +import org.elasticsearch.xpack.rollup.job.RollupIndexer; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class RollupIndexTests extends ESTestCase { + + public void testValidateMatchingField() { + ActionRequestValidationException e = new ActionRequestValidationException(); + Map> responseMap = new HashMap<>(); + String type = getRandomType(); + + // Have to mock fieldcaps because the ctor's aren't public... + FieldCapabilities fieldCaps = mock(FieldCapabilities.class); + when(fieldCaps.isAggregatable()).thenReturn(true); + responseMap.put("my_field", Collections.singletonMap(type, fieldCaps)); + + TermsGroupConfig config = new TermsGroupConfig("my_field"); + config.validateMappings(responseMap, e); + if (e.validationErrors().size() != 0) { + fail(e.getMessage()); + } + + List> builders = RollupIndexer.createValueSourceBuilders(config); + assertThat(builders.size(), equalTo(1)); + } + + public void testValidateFieldMatchingNotAggregatable() { + ActionRequestValidationException e = new ActionRequestValidationException(); + Map> responseMap = new HashMap<>(); + + // Have to mock fieldcaps because the ctor's aren't public... + FieldCapabilities fieldCaps = mock(FieldCapabilities.class); + when(fieldCaps.isAggregatable()).thenReturn(false); + responseMap.put("my_field", Collections.singletonMap(getRandomType(), fieldCaps)); + + TermsGroupConfig config = new TermsGroupConfig("my_field"); + config.validateMappings(responseMap, e); + assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not.")); + } + + private String getRandomType() { + int n = randomIntBetween(0,8); + if (n == 0) { + return "keyword"; + } else if (n == 1) { + return "text"; + } else if (n == 2) { + return "long"; + } else if (n == 3) { + return "integer"; + } else if (n == 4) { + return "short"; + } else if (n == 5) { + return "float"; + } else if (n == 6) { + return "double"; + } else if (n == 7) { + return "scaled_float"; + } else if (n == 8) { + return "half_float"; + } + return "long"; + } +} diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index e8c66f7e8c118..d74e7413d15b6 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -57,6 +58,7 @@ import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomDateHistogramGroupConfig; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomGroupConfig; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig; +import static org.elasticsearch.xpack.rollup.job.RollupIndexer.createAggregationBuilders; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -101,9 +103,11 @@ public void testMissingFields() throws IOException { //TODO swap this over to DateHistoConfig.Builder once DateInterval is in DateHistogramGroupConfig dateHistoGroupConfig = new DateHistogramGroupConfig(timestampField, DateHistogramInterval.DAY); CompositeAggregationBuilder compositeBuilder = - new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, dateHistoGroupConfig.toBuilders()); + new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, + RollupIndexer.createValueSourceBuilders(dateHistoGroupConfig)); MetricConfig metricConfig = new MetricConfig("does_not_exist", singletonList("max")); - metricConfig.toBuilders().forEach(compositeBuilder::subAggregation); + List metricAgg = createAggregationBuilders(singletonList(metricConfig)); + metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType); aggregator.preCollection(); @@ -170,7 +174,8 @@ public void testCorrectFields() throws IOException { singletonList(dateHisto)); MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max")); - metricConfig.toBuilders().forEach(compositeBuilder::subAggregation); + List metricAgg = createAggregationBuilders(singletonList(metricConfig)); + metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType); aggregator.preCollection(); @@ -226,7 +231,8 @@ public void testNumericTerms() throws IOException { singletonList(terms)); MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max")); - metricConfig.toBuilders().forEach(compositeBuilder::subAggregation); + List metricAgg = createAggregationBuilders(singletonList(metricConfig)); + metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType); aggregator.preCollection(); @@ -292,7 +298,8 @@ public void testEmptyCounts() throws IOException { singletonList(dateHisto)); MetricConfig metricConfig = new MetricConfig("another_field", Arrays.asList("avg", "sum")); - metricConfig.toBuilders().forEach(compositeBuilder::subAggregation); + List metricAgg = createAggregationBuilders(singletonList(metricConfig)); + metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType); aggregator.preCollection(); @@ -523,11 +530,13 @@ public void testMissingBuckets() throws IOException { // Setup the composite agg TermsGroupConfig termsGroupConfig = new TermsGroupConfig(valueField); - CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, - termsGroupConfig.toBuilders()).size(numDocs*2); + CompositeAggregationBuilder compositeBuilder = + new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, RollupIndexer.createValueSourceBuilders(termsGroupConfig)) + .size(numDocs*2); MetricConfig metricConfig = new MetricConfig(metricField, singletonList("max")); - metricConfig.toBuilders().forEach(compositeBuilder::subAggregation); + List metricAgg = createAggregationBuilders(singletonList(metricConfig)); + metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType, metricFieldType); aggregator.preCollection(); From a54ebd5d7869b537ab358da955770642ac5fb105 Mon Sep 17 00:00:00 2001 From: Mikita Karaliou Date: Mon, 27 Aug 2018 13:24:51 +0300 Subject: [PATCH 09/18] Support only string `format` in date, root object & date range (#28117) Limit date `format` attribute to String values only. Closes #23650 --- .../index/mapper/TypeParsers.java | 5 +++- .../index/mapper/DateFieldMapperTests.java | 18 +++++++++++++ .../index/mapper/RangeFieldMapperTests.java | 18 +++++++++++++ .../index/mapper/RootObjectMapperTests.java | 26 +++++++++++++++++++ 4 files changed, 66 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index ac7025c5b323d..0b1ee2c7f4b71 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -354,7 +354,10 @@ private static IndexOptions nodeIndexOptionValue(final Object propNode) { } public static FormatDateTimeFormatter parseDateTimeFormatter(Object node) { - return Joda.forPattern(node.toString()); + if (node instanceof String) { + return Joda.forPattern((String) node); + } + throw new IllegalArgumentException("Invalid format: [" + node.toString() + "]: expected string value"); } public static void parseTermVector(String fieldName, String termVector, FieldMapper.Builder builder) throws MapperParsingException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 97eb3454e90b1..a913a0f2ceb80 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -414,4 +414,22 @@ public void testMergeText() throws Exception { () -> mapper.merge(update.mapping(), randomBoolean())); assertEquals("mapper [date] of different type, current_type [date], merged_type [text]", e.getMessage()); } + + public void testIllegalFormatField() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "date") + .array("format", "test_format") + .endObject() + .endObject() + .endObject() + .endObject()); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java index 54418850e5d4f..00068f76e753d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -443,4 +443,22 @@ public void testSerializeDefaults() throws Exception { } } + public void testIllegalFormatField() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "date_range") + .array("format", "test_format") + .endObject() + .endObject() + .endObject() + .endObject()); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage()); + } + } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index e17fb9cc4b022..9be109c0b8420 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -159,4 +159,30 @@ public void testDynamicTemplates() throws Exception { mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping3, mapper.mappingSource().toString()); } + + public void testIllegalFormatField() throws Exception { + String dynamicMapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startArray("dynamic_date_formats") + .startArray().value("test_format").endArray() + .endArray() + .endObject() + .endObject()); + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startArray("date_formats") + .startArray().value("test_format").endArray() + .endArray() + .endObject() + .endObject()); + + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + for (String m : Arrays.asList(mapping, dynamicMapping)) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(m))); + assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage()); + } + } } From 100c1a024ad80abf67515f123f0a7731c21593c8 Mon Sep 17 00:00:00 2001 From: Shaunak Kashyap Date: Mon, 27 Aug 2018 08:42:40 -0400 Subject: [PATCH 10/18] APM server monitoring (#32515) * Adding new MonitoredSystem for APM server * Teaching Monitoring template utils about APM server monitoring indices * Documenting new monitoring index for APM server * Adding monitoring index template for APM server * Copy pasta typo * Removing metrics.libbeat.config section from mapping * Adding built-in user and role for APM server user * Actually define the role :) * Adding missing import * Removing index template and system ID for apm server * Shortening line lengths * Updating expected number of built-in users in integration test * Removing "system" from role and user names * Rearranging users to make tests pass --- .../commands/setup-passwords.asciidoc | 2 +- docs/reference/monitoring/exporters.asciidoc | 12 ++++----- .../docs/en/security/configuring-es.asciidoc | 4 +-- .../authc/esnative/ClientReservedRealm.java | 1 + .../authz/store/ReservedRolesStore.java | 2 ++ .../core/security/user/APMSystemUser.java | 25 ++++++++++++++++++ .../core/security/user/UsernamesField.java | 2 ++ .../authz/store/ReservedRolesStoreTests.java | 26 +++++++++++++++++++ .../authc/esnative/ReservedRealm.java | 8 ++++++ .../esnative/tool/SetupPasswordTool.java | 4 ++- .../test/NativeRealmIntegTestCase.java | 3 ++- .../authc/esnative/NativeUsersStoreTests.java | 7 +++-- .../esnative/ReservedRealmIntegTests.java | 13 +++++++--- .../authc/esnative/ReservedRealmTests.java | 16 +++++++++--- .../esnative/tool/SetupPasswordToolIT.java | 2 +- 15 files changed, 106 insertions(+), 21 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java diff --git a/docs/reference/commands/setup-passwords.asciidoc b/docs/reference/commands/setup-passwords.asciidoc index a7dcd25d65e0b..e2d4dfdc13d3d 100644 --- a/docs/reference/commands/setup-passwords.asciidoc +++ b/docs/reference/commands/setup-passwords.asciidoc @@ -4,7 +4,7 @@ == elasticsearch-setup-passwords The `elasticsearch-setup-passwords` command sets the passwords for the built-in -`elastic`, `kibana`, `logstash_system`, and `beats_system` users. +`elastic`, `kibana`, `logstash_system`, `beats_system`, and `apm_system` users. [float] === Synopsis diff --git a/docs/reference/monitoring/exporters.asciidoc b/docs/reference/monitoring/exporters.asciidoc index 2a7729eee9425..a1d4bc08ae73f 100644 --- a/docs/reference/monitoring/exporters.asciidoc +++ b/docs/reference/monitoring/exporters.asciidoc @@ -105,12 +105,12 @@ route monitoring data: [options="header"] |======================= -| Template | Purpose -| `.monitoring-alerts` | All cluster alerts for monitoring data. -| `.monitoring-beats` | All Beats monitoring data. -| `.monitoring-es` | All {es} monitoring data. -| `.monitoring-kibana` | All {kib} monitoring data. -| `.monitoring-logstash` | All Logstash monitoring data. +| Template | Purpose +| `.monitoring-alerts` | All cluster alerts for monitoring data. +| `.monitoring-beats` | All Beats monitoring data. +| `.monitoring-es` | All {es} monitoring data. +| `.monitoring-kibana` | All {kib} monitoring data. +| `.monitoring-logstash` | All Logstash monitoring data. |======================= The templates are ordinary {es} templates that control the default settings and diff --git a/x-pack/docs/en/security/configuring-es.asciidoc b/x-pack/docs/en/security/configuring-es.asciidoc index df4b055006212..9ef6b33685498 100644 --- a/x-pack/docs/en/security/configuring-es.asciidoc +++ b/x-pack/docs/en/security/configuring-es.asciidoc @@ -55,8 +55,8 @@ help you get up and running. The +elasticsearch-setup-passwords+ command is the simplest method to set the built-in users' passwords for the first time. For example, you can run the command in an "interactive" mode, which prompts you -to enter new passwords for the `elastic`, `kibana`, `beats_system`, and -`logstash_system` users: +to enter new passwords for the `elastic`, `kibana`, `beats_system`, +`logstash_system`, and `apm_system` users: [source,shell] -------------------------------------------------- diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/ClientReservedRealm.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/ClientReservedRealm.java index c9868f448b40f..5a228133073e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/ClientReservedRealm.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/ClientReservedRealm.java @@ -19,6 +19,7 @@ public static boolean isReserved(String username, Settings settings) { case UsernamesField.KIBANA_NAME: case UsernamesField.LOGSTASH_NAME: case UsernamesField.BEATS_NAME: + case UsernamesField.APM_NAME: return XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); default: return AnonymousUser.isAnonymousUsername(username, settings); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index edd85eb3ba20c..c335e0565c5dc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -104,6 +104,8 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put(UsernamesField.BEATS_ROLE, new RoleDescriptor(UsernamesField.BEATS_ROLE, new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put(UsernamesField.APM_ROLE, new RoleDescriptor(UsernamesField.APM_ROLE, + new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("machine_learning_user", new RoleDescriptor("machine_learning_user", new String[] { "monitor_ml" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".ml-anomalies*", ".ml-notifications").privileges("view_index_metadata", "read").build() }, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java new file mode 100644 index 0000000000000..48a72be5c1a85 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.user; + +import org.elasticsearch.Version; +import org.elasticsearch.protocol.xpack.security.User; +import org.elasticsearch.xpack.core.security.support.MetadataUtils; + +/** + * Built in user for APM server internals. Currently used for APM server monitoring. + */ +public class APMSystemUser extends User { + + public static final String NAME = UsernamesField.APM_NAME; + public static final String ROLE_NAME = UsernamesField.APM_ROLE; + public static final Version DEFINED_SINCE = Version.V_6_5_0; + public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE); + + public APMSystemUser(boolean enabled) { + super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java index 3b691b927b4a3..bd886567ed1b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java @@ -20,6 +20,8 @@ public final class UsernamesField { public static final String LOGSTASH_ROLE = "logstash_system"; public static final String BEATS_NAME = "beats_system"; public static final String BEATS_ROLE = "beats_system"; + public static final String APM_NAME = "apm_system"; + public static final String APM_ROLE = "apm_system"; private UsernamesField() {} } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index d78a87c19d71b..f822fec67e946 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -94,6 +94,7 @@ import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; import org.elasticsearch.xpack.core.security.user.SystemUser; @@ -146,6 +147,7 @@ public void testIsReserved() { assertThat(ReservedRolesStore.isReserved(XPackUser.ROLE_NAME), is(true)); assertThat(ReservedRolesStore.isReserved(LogstashSystemUser.ROLE_NAME), is(true)); assertThat(ReservedRolesStore.isReserved(BeatsSystemUser.ROLE_NAME), is(true)); + assertThat(ReservedRolesStore.isReserved(APMSystemUser.ROLE_NAME), is(true)); } public void testIngestAdminRole() { @@ -577,6 +579,30 @@ public void testBeatsSystemRole() { is(false)); } + public void testAPMSystemRole() { + final TransportRequest request = mock(TransportRequest.class); + + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor(APMSystemUser.ROLE_NAME); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role APMSystemRole = Role.builder(roleDescriptor, null).build(); + assertThat(APMSystemRole.cluster().check(ClusterHealthAction.NAME, request), is(true)); + assertThat(APMSystemRole.cluster().check(ClusterStateAction.NAME, request), is(true)); + assertThat(APMSystemRole.cluster().check(ClusterStatsAction.NAME, request), is(true)); + assertThat(APMSystemRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(APMSystemRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(APMSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(APMSystemRole.cluster().check(MonitoringBulkAction.NAME, request), is(true)); + + assertThat(APMSystemRole.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); + + assertThat(APMSystemRole.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); + assertThat(APMSystemRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false)); + assertThat(APMSystemRole.indices().allowedIndicesMatcher("indices:foo").test(randomAlphaOfLengthBetween(8, 24)), + is(false)); + } + public void testMachineLearningAdminRole() { final TransportRequest request = mock(TransportRequest.class); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index 99c138bbb121d..3f83997b34e50 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; @@ -149,6 +150,8 @@ private User getUser(String username, ReservedUserInfo userInfo) { return new LogstashSystemUser(userInfo.enabled); case BeatsSystemUser.NAME: return new BeatsSystemUser(userInfo.enabled); + case APMSystemUser.NAME: + return new APMSystemUser(userInfo.enabled); default: if (anonymousEnabled && anonymousUser.principal().equals(username)) { return anonymousUser; @@ -177,6 +180,9 @@ public void users(ActionListener> listener) { userInfo = reservedUserInfos.get(BeatsSystemUser.NAME); users.add(new BeatsSystemUser(userInfo == null || userInfo.enabled)); + userInfo = reservedUserInfos.get(APMSystemUser.NAME); + users.add(new APMSystemUser(userInfo == null || userInfo.enabled)); + if (anonymousEnabled) { users.add(anonymousUser); } @@ -230,6 +236,8 @@ private Version getDefinedVersion(String username) { return LogstashSystemUser.DEFINED_SINCE; case BeatsSystemUser.NAME: return BeatsSystemUser.DEFINED_SINCE; + case APMSystemUser.NAME: + return APMSystemUser.DEFINED_SINCE; default: return Version.V_5_0_0; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java index 336acbdb18175..fad10c821c85d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java @@ -27,6 +27,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.support.Validation; +import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; @@ -63,7 +64,8 @@ public class SetupPasswordTool extends LoggingAwareMultiCommand { private static final char[] CHARS = ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789").toCharArray(); - public static final List USERS = asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); + public static final List USERS = asList(ElasticUser.NAME, APMSystemUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, + BeatsSystemUser.NAME); private final BiFunction clientFunction; private final CheckedFunction keyStoreFunction; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java index 8461b762ad502..71e5d34f681ce 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.client.SecurityClient; +import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; @@ -93,7 +94,7 @@ public void setupReservedPasswords(RestClient restClient) throws IOException { RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, reservedPassword)); RequestOptions options = optionsBuilder.build(); - for (String username : Arrays.asList(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME)) { + for (String username : Arrays.asList(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME)) { Request request = new Request("PUT", "/_xpack/security/user/" + username + "/_password"); request.setJsonEntity("{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"); request.setOptions(options); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index bc168bf660d33..6fb2e6a7eecbf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.esnative.NativeUserStoreField; import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; @@ -89,7 +90,8 @@ > void doExecute( public void testPasswordUpsertWhenSetEnabledOnReservedUser() throws Exception { final NativeUsersStore nativeUsersStore = startNativeUsersStore(); - final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); + final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, + BeatsSystemUser.NAME, APMSystemUser.NAME); final PlainActionFuture future = new PlainActionFuture<>(); nativeUsersStore.setEnabled(user, true, WriteRequest.RefreshPolicy.IMMEDIATE, future); @@ -107,7 +109,8 @@ public void testPasswordUpsertWhenSetEnabledOnReservedUser() throws Exception { public void testBlankPasswordInIndexImpliesDefaultPassword() throws Exception { final NativeUsersStore nativeUsersStore = startNativeUsersStore(); - final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); + final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, + BeatsSystemUser.NAME, APMSystemUser.NAME); final Map values = new HashMap<>(); values.put(ENABLED_FIELD, Boolean.TRUE); values.put(PASSWORD_FIELD, BLANK_PASSWORD); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java index 1824597a6adc6..8f7116dd9718c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.core.security.action.user.ChangePasswordResponse; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.client.SecurityClient; +import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; @@ -20,6 +21,7 @@ import org.junit.BeforeClass; import java.util.Arrays; +import java.util.List; import static java.util.Collections.singletonMap; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; @@ -49,7 +51,9 @@ public Settings nodeSettings(int nodeOrdinal) { } public void testAuthenticate() { - for (String username : Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME)) { + final List usernames = Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, + BeatsSystemUser.NAME, APMSystemUser.NAME); + for (String username : usernames) { ClusterHealthResponse response = client() .filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword()))) .admin() @@ -67,7 +71,9 @@ public void testAuthenticate() { */ public void testAuthenticateAfterEnablingUser() { final SecurityClient c = securityClient(); - for (String username : Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME)) { + final List usernames = Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, + BeatsSystemUser.NAME, APMSystemUser.NAME); + for (String username : usernames) { c.prepareSetEnabled(username, true).get(); ClusterHealthResponse response = client() .filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword()))) @@ -81,7 +87,8 @@ public void testAuthenticateAfterEnablingUser() { } public void testChangingPassword() { - String username = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); + String username = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, + BeatsSystemUser.NAME, APMSystemUser.NAME); final char[] newPassword = "supersecretvalue".toCharArray(); if (randomBoolean()) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index 6344b020b1c74..0321b0d1668a8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; @@ -262,7 +263,8 @@ public void testGetUsers() { PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); assertThat(userFuture.actionGet(), - containsInAnyOrder(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true), new BeatsSystemUser(true))); + containsInAnyOrder(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true), + new BeatsSystemUser(true), new APMSystemUser((true)))); } public void testGetUsersDisabled() { @@ -394,7 +396,7 @@ public void testNonElasticUsersCannotUseBootstrapPasswordWhenSecurityIndexExists new AnonymousUser(Settings.EMPTY), securityIndex, threadPool); PlainActionFuture listener = new PlainActionFuture<>(); - final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); + final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME); doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; callback.onResponse(null); @@ -416,14 +418,15 @@ public void testNonElasticUsersCannotUseBootstrapPasswordWhenSecurityIndexDoesNo new AnonymousUser(Settings.EMPTY), securityIndex, threadPool); PlainActionFuture listener = new PlainActionFuture<>(); - final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); + final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME); reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, mockSecureSettings.getString("bootstrap.password")), listener); final AuthenticationResult result = listener.get(); assertThat(result.getStatus(), is(AuthenticationResult.Status.TERMINATE)); } private User randomReservedUser(boolean enabled) { - return randomFrom(new ElasticUser(enabled), new KibanaUser(enabled), new LogstashSystemUser(enabled), new BeatsSystemUser(enabled)); + return randomFrom(new ElasticUser(enabled), new KibanaUser(enabled), new LogstashSystemUser(enabled), + new BeatsSystemUser(enabled), new APMSystemUser(enabled)); } /* @@ -457,6 +460,11 @@ private void verifyVersionPredicate(String principal, Predicate version assertThat(versionPredicate.test(Version.V_6_2_3), is(false)); assertThat(versionPredicate.test(Version.V_6_3_0), is(true)); break; + case APMSystemUser.NAME: + assertThat(versionPredicate.test(Version.V_5_6_9), is(false)); + assertThat(versionPredicate.test(Version.V_6_4_0), is(false)); + assertThat(versionPredicate.test(Version.V_6_5_0), is(true)); + break; default: assertThat(versionPredicate.test(Version.V_5_0_0), is(true)); assertThat(versionPredicate.test(Version.V_5_1_1), is(true)); diff --git a/x-pack/qa/security-setup-password-tests/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolIT.java b/x-pack/qa/security-setup-password-tests/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolIT.java index 74f1223f4a6a1..f14474814664f 100644 --- a/x-pack/qa/security-setup-password-tests/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolIT.java +++ b/x-pack/qa/security-setup-password-tests/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolIT.java @@ -97,7 +97,7 @@ public void testSetupPasswordToolAutoSetup() throws Exception { } }); - assertEquals(4, userPasswordMap.size()); + assertEquals(5, userPasswordMap.size()); userPasswordMap.entrySet().forEach(entry -> { final String basicHeader = "Basic " + Base64.getEncoder().encodeToString((entry.getKey() + ":" + entry.getValue()).getBytes(StandardCharsets.UTF_8)); From ecb11d363122592748e50d73b7df0a350e6b9f71 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 27 Aug 2018 10:16:57 -0400 Subject: [PATCH 11/18] Fix grammar in contributing docs This commit fixes an instance of odd comma placement in the contributing docs. --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a209fa24b23b8..44343ca1fe298 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -95,7 +95,7 @@ Contributing to the Elasticsearch codebase JDK 10 is required to build Elasticsearch. You must have a JDK 10 installation with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 10 installation. By default, tests use the same runtime as `JAVA_HOME`. -However, since Elasticsearch, supports JDK 8 the build supports compiling with +However, since Elasticsearch supports JDK 8, the build supports compiling with JDK 10 and testing on a JDK 8 runtime; to do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of a JDK 8 installation. Note that this mechanism can be used to test against other JDKs as well, this is not only limited to JDK 8. From 6e9c2555a4437317e0b4cd739d919dc6e6fd780b Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 27 Aug 2018 17:52:04 +0300 Subject: [PATCH 12/18] [TEST] version guard for reload rest-api-spec Relates #32990 --- .../test/nodes.reload_secure_settings/10_basic.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.reload_secure_settings/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.reload_secure_settings/10_basic.yml index 0a4cf0d64a001..39325e6a65bc3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.reload_secure_settings/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.reload_secure_settings/10_basic.yml @@ -1,5 +1,8 @@ --- "node_reload_secure_settings test": + - skip: + version: " - 6.3.99" + reason: "reload API available 6.4.0 onwards" - do: nodes.reload_secure_settings: {} From 7d8780d7f54ca32654a0c46824fa6459b95f1d9f Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 27 Aug 2018 17:56:28 +0300 Subject: [PATCH 13/18] SQL: Enable aggregations to create a separate bucket for missing values (#32832) Enable aggregations to create a separate bucket for missing values. --- .../sql/querydsl/agg/GroupByColumnKey.java | 3 +- .../sql/querydsl/agg/GroupByDateKey.java | 3 +- .../sql/querydsl/agg/GroupByScriptKey.java | 3 +- .../xpack/qa/sql/jdbc/DataLoader.java | 20 ++-- .../xpack/qa/sql/jdbc/SqlSpecTestCase.java | 6 +- .../sql/src/main/resources/agg_nulls.sql-spec | 14 +++ .../qa/sql/src/main/resources/alias.csv-spec | 7 +- .../main/resources/employees_with_nulls.csv | 101 ++++++++++++++++++ .../resources/setup_test_emp_with_nulls.sql | 12 +++ 9 files changed, 154 insertions(+), 15 deletions(-) create mode 100644 x-pack/qa/sql/src/main/resources/agg_nulls.sql-spec create mode 100644 x-pack/qa/sql/src/main/resources/employees_with_nulls.csv create mode 100644 x-pack/qa/sql/src/main/resources/setup_test_emp_with_nulls.sql diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByColumnKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByColumnKey.java index e98770318d218..931eaee646478 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByColumnKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByColumnKey.java @@ -25,7 +25,8 @@ public GroupByColumnKey(String id, String fieldName, Direction direction) { public TermsValuesSourceBuilder asValueSource() { return new TermsValuesSourceBuilder(id()) .field(fieldName()) - .order(direction().asOrder()); + .order(direction().asOrder()) + .missingBucket(true); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateKey.java index 43c80e75057e9..61c00c706eeff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateKey.java @@ -44,7 +44,8 @@ public DateHistogramValuesSourceBuilder asValueSource() { return new DateHistogramValuesSourceBuilder(id()) .field(fieldName()) .dateHistogramInterval(new DateHistogramInterval(interval)) - .timeZone(DateTimeZone.forTimeZone(timeZone)); + .timeZone(DateTimeZone.forTimeZone(timeZone)) + .missingBucket(true); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java index a4af765d034bf..ccd2bf934ab61 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java @@ -36,7 +36,8 @@ public ScriptTemplate script() { public TermsValuesSourceBuilder asValueSource() { TermsValuesSourceBuilder builder = new TermsValuesSourceBuilder(id()) .script(script.toPainless()) - .order(direction().asOrder()); + .order(direction().asOrder()) + .missingBucket(true); if (script.outputType().isNumeric()) { builder.valueType(ValueType.NUMBER); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java index 05140577bcdf6..22ba2a1037d96 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java @@ -42,14 +42,15 @@ protected static void loadDatasetIntoEs(RestClient client) throws Exception { } protected static void loadEmpDatasetIntoEs(RestClient client) throws Exception { - loadEmpDatasetIntoEs(client, "test_emp"); - loadEmpDatasetIntoEs(client, "test_emp_copy"); + loadEmpDatasetIntoEs(client, "test_emp", "employees"); + loadEmpDatasetIntoEs(client, "test_emp_copy", "employees"); + loadEmpDatasetIntoEs(client, "test_emp_with_nulls", "employees_with_nulls"); makeAlias(client, "test_alias", "test_emp", "test_emp_copy"); makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy"); } public static void loadDocsDatasetIntoEs(RestClient client) throws Exception { - loadEmpDatasetIntoEs(client, "emp"); + loadEmpDatasetIntoEs(client, "emp", "employees"); loadLibDatasetIntoEs(client, "library"); makeAlias(client, "employees", "emp"); } @@ -62,7 +63,7 @@ private static void createString(String name, XContentBuilder builder) throws Ex .endObject(); } - protected static void loadEmpDatasetIntoEs(RestClient client, String index) throws Exception { + protected static void loadEmpDatasetIntoEs(RestClient client, String index, String fileName) throws Exception { Request request = new Request("PUT", "/" + index); XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); createIndex.startObject("settings"); @@ -129,15 +130,18 @@ protected static void loadEmpDatasetIntoEs(RestClient client, String index) thro request = new Request("POST", "/" + index + "/emp/_bulk"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); - csvToLines("employees", (titles, fields) -> { + csvToLines(fileName, (titles, fields) -> { bulk.append("{\"index\":{}}\n"); bulk.append('{'); String emp_no = fields.get(1); for (int f = 0; f < fields.size(); f++) { - if (f != 0) { - bulk.append(','); + // an empty value in the csv file is treated as 'null', thus skipping it in the bulk request + if (fields.get(f).trim().length() > 0) { + if (f != 0) { + bulk.append(','); + } + bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"'); } - bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"'); } // append department List> list = dep_emp.get(emp_no); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java index 4d90c9cce502b..b77820fc77e72 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java @@ -25,7 +25,10 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase { private String query; @ClassRule - public static LocalH2 H2 = new LocalH2((c) -> c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'")); + public static LocalH2 H2 = new LocalH2((c) -> { + c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'"); + c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp_with_nulls.sql'"); + }); @ParametersFactory(argumentFormatting = PARAM_FORMATTING) public static List readScriptSpec() throws Exception { @@ -39,6 +42,7 @@ public static List readScriptSpec() throws Exception { tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser)); tests.addAll(readScriptSpec("/string-functions.sql-spec", parser)); tests.addAll(readScriptSpec("/case-functions.sql-spec", parser)); + tests.addAll(readScriptSpec("/agg_nulls.sql-spec", parser)); return tests; } diff --git a/x-pack/qa/sql/src/main/resources/agg_nulls.sql-spec b/x-pack/qa/sql/src/main/resources/agg_nulls.sql-spec new file mode 100644 index 0000000000000..17fbb70a40bcb --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/agg_nulls.sql-spec @@ -0,0 +1,14 @@ +selectGenderWithNullsAndGroupByGender +SELECT gender, COUNT(*) count FROM test_emp_with_nulls GROUP BY gender ORDER BY gender; +selectFirstNameWithNullsAndGroupByFirstName +SELECT first_name FROM test_emp_with_nulls GROUP BY first_name ORDER BY first_name; +selectCountWhereIsNull +SELECT COUNT(*) count FROM test_emp_with_nulls WHERE first_name IS NULL; +selectLanguagesCountWithNullsAndGroupByLanguage +SELECT languages l, COUNT(*) c FROM test_emp_with_nulls GROUP BY languages ORDER BY languages; +selectHireDateGroupByHireDate +SELECT hire_date HD, COUNT(*) c FROM test_emp_with_nulls GROUP BY hire_date ORDER BY hire_date DESC; +selectHireDateGroupByHireDate +SELECT hire_date HD, COUNT(*) c FROM test_emp_with_nulls GROUP BY hire_date ORDER BY hire_date DESC; +selectSalaryGroupBySalary +SELECT salary, COUNT(*) c FROM test_emp_with_nulls GROUP BY salary ORDER BY salary DESC; \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/resources/alias.csv-spec b/x-pack/qa/sql/src/main/resources/alias.csv-spec index 839d2cba79451..f1fa900706a7d 100644 --- a/x-pack/qa/sql/src/main/resources/alias.csv-spec +++ b/x-pack/qa/sql/src/main/resources/alias.csv-spec @@ -86,6 +86,7 @@ test_alias | ALIAS test_alias_emp | ALIAS test_emp | BASE TABLE test_emp_copy | BASE TABLE +test_emp_with_nulls | BASE TABLE ; testGroupByOnAlias @@ -98,10 +99,10 @@ F | 10099.28 ; testGroupByOnPattern -SELECT gender, PERCENTILE(emp_no, 97) p1 FROM test_* GROUP BY gender; +SELECT gender, PERCENTILE(emp_no, 97) p1 FROM test_* WHERE gender is NOT NULL GROUP BY gender; gender:s | p1:d -F | 10099.28 -M | 10095.75 +F | 10099.32 +M | 10095.98 ; \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/resources/employees_with_nulls.csv b/x-pack/qa/sql/src/main/resources/employees_with_nulls.csv new file mode 100644 index 0000000000000..482da640470d0 --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/employees_with_nulls.csv @@ -0,0 +1,101 @@ +birth_date,emp_no,first_name,gender,hire_date,languages,last_name,salary +1953-09-02T00:00:00Z,10001,Georgi,,1986-06-26T00:00:00Z,2,Facello,57305 +1964-06-02T00:00:00Z,10002,Bezalel,,1985-11-21T00:00:00Z,5,Simmel,56371 +1959-12-03T00:00:00Z,10003,Parto,,1986-08-28T00:00:00Z,4,Bamford,61805 +1954-05-01T00:00:00Z,10004,Chirstian,,1986-12-01T00:00:00Z,5,Koblick,36174 +1955-01-21T00:00:00Z,10005,Kyoichi,,1989-09-12T00:00:00Z,1,Maliniak,63528 +1953-04-20T00:00:00Z,10006,Anneke,,1989-06-02T00:00:00Z,3,Preusig,60335 +1957-05-23T00:00:00Z,10007,Tzvetan,,1989-02-10T00:00:00Z,4,Zielinski,74572 +1958-02-19T00:00:00Z,10008,Saniya,,1994-09-15T00:00:00Z,2,Kalloufi,43906 +1952-04-19T00:00:00Z,10009,Sumant,,1985-02-18T00:00:00Z,1,Peac,66174 +1963-06-01T00:00:00Z,10010,Duangkaew,,1989-08-24T00:00:00Z,4,Piveteau,45797 +1953-11-07T00:00:00Z,10011,Mary,F,1990-01-22T00:00:00Z,5,Sluis,31120 +1960-10-04T00:00:00Z,10012,Patricio,M,1992-12-18T00:00:00Z,5,Bridgland,48942 +1963-06-07T00:00:00Z,10013,Eberhardt,M,1985-10-20T00:00:00Z,1,Terkki,48735 +1956-02-12T00:00:00Z,10014,Berni,M,1987-03-11T00:00:00Z,5,Genin,37137 +1959-08-19T00:00:00Z,10015,Guoxiang,M,1987-07-02T00:00:00Z,5,Nooteboom,25324 +1961-05-02T00:00:00Z,10016,Kazuhito,M,1995-01-27T00:00:00Z,2,Cappelletti,61358 +1958-07-06T00:00:00Z,10017,Cristinel,F,1993-08-03T00:00:00Z,2,Bouloucos,58715 +1954-06-19T00:00:00Z,10018,Kazuhide,F,1993-08-03T00:00:00Z,2,Peha,56760 +1953-01-23T00:00:00Z,10019,Lillian,M,1993-08-03T00:00:00Z,1,Haddadi,73717 +1952-12-24T00:00:00Z,10020,,M,1991-01-26T00:00:00Z,3,Warwick,40031 +1960-02-20T00:00:00Z,10021,,M,1989-12-17T00:00:00Z,5,Erde,60408 +1952-07-08T00:00:00Z,10022,,M,1995-08-22T00:00:00Z,3,Famili,48233 +1953-09-29T00:00:00Z,10023,,F,1989-12-17T00:00:00Z,2,Montemayor,47896 +1958-09-05T00:00:00Z,10024,,F,1997-05-19T00:00:00Z,3,Pettey,64675 +1958-10-31T00:00:00Z,10025,Prasadram,M,1987-08-17T00:00:00Z,5,Heyers,47411 +1953-04-03T00:00:00Z,10026,Yongqiao,M,1995-03-20T00:00:00Z,3,Berztiss,28336 +1962-07-10T00:00:00Z,10027,Divier,F,1989-07-07T00:00:00Z,5,Reistad,73851 +1963-11-26T00:00:00Z,10028,Domenick,M,1991-10-22T00:00:00Z,1,Tempesti,39356 +1956-12-13T00:00:00Z,10029,Otmar,M,1985-11-20T00:00:00Z,,Herbst,74999 +1958-07-14T00:00:00Z,10030,Elvis,M,1994-02-17T00:00:00Z,,Demeyer,67492 +1959-01-27T00:00:00Z,10031,Karsten,M,1994-02-17T00:00:00Z,,Joslin,37716 +1960-08-09T00:00:00Z,10032,Jeong,F,1990-06-20T00:00:00Z,,Reistad,62233 +1956-11-14T00:00:00Z,10033,Arif,M,1987-03-18T00:00:00Z,,Merlo,70011 +1962-12-29T00:00:00Z,10034,Bader,M,1988-09-05T00:00:00Z,,Swan,39878 +1953-02-08T00:00:00Z,10035,Alain,M,1988-09-05T00:00:00Z,,Chappelet,25945 +1959-08-10T00:00:00Z,10036,Adamantios,M,1992-01-03T00:00:00Z,,Portugali,60781 +1963-07-22T00:00:00Z,10037,Pradeep,M,1990-12-05T00:00:00Z,,Makrucki,37691 +1960-07-20T00:00:00Z,10038,Huan,M,1989-09-20T00:00:00Z,,Lortz,35222 +1959-10-01T00:00:00Z,10039,Alejandro,M,1988-01-19T00:00:00Z,,Brender,36051 +1959-09-13T00:00:00Z,10040,Weiyi,F,1993-02-14T00:00:00Z,,Meriste,37112 +1959-08-27T00:00:00Z,10041,Uri,F,1989-11-12T00:00:00Z,1,Lenart,56415 +1956-02-26T00:00:00Z,10042,Magy,F,1993-03-21T00:00:00Z,3,Stamatiou,30404 +1960-09-19T00:00:00Z,10043,Yishay,M,1990-10-20T00:00:00Z,1,Tzvieli,34341 +1961-09-21T00:00:00Z,10044,Mingsen,F,1994-05-21T00:00:00Z,1,Casley,39728 +1957-08-14T00:00:00Z,10045,Moss,M,1989-09-02T00:00:00Z,3,Shanbhogue,74970 +1960-07-23T00:00:00Z,10046,Lucien,M,1992-06-20T00:00:00Z,4,Rosenbaum,50064 +1952-06-29T00:00:00Z,10047,Zvonko,M,1989-03-31T00:00:00Z,4,Nyanchama,42716 +1963-07-11T00:00:00Z,10048,Florian,M,1985-02-24T00:00:00Z,3,Syrotiuk,26436 +1961-04-24T00:00:00Z,10049,Basil,F,1992-05-04T00:00:00Z,5,Tramer,37853 +1958-05-21T00:00:00Z,10050,Yinghua,M,1990-12-25T00:00:00Z,2,Dredge,43026 +1953-07-28T00:00:00Z,10051,Hidefumi,M,1992-10-15T00:00:00Z,3,Caine,58121 +1961-02-26T00:00:00Z,10052,Heping,M,1988-05-21T00:00:00Z,1,Nitsch,55360 +1954-09-13T00:00:00Z,10053,Sanjiv,F,1986-02-04T00:00:00Z,3,Zschoche,54462 +1957-04-04T00:00:00Z,10054,Mayumi,M,1995-03-13T00:00:00Z,4,Schueller,65367 +1956-06-06T00:00:00Z,10055,Georgy,M,1992-04-27T00:00:00Z,5,Dredge,49281 +1961-09-01T00:00:00Z,10056,Brendon,F,1990-02-01T00:00:00Z,2,Bernini,33370 +1954-05-30T00:00:00Z,10057,Ebbe,F,1992-01-15T00:00:00Z,4,Callaway,27215 +1954-10-01T00:00:00Z,10058,Berhard,M,1987-04-13T00:00:00Z,3,McFarlin,38376 +1953-09-19T00:00:00Z,10059,Alejandro,F,1991-06-26T00:00:00Z,2,McAlpine,44307 +1961-10-15T00:00:00Z,10060,Breannda,M,1987-11-02T00:00:00Z,2,Billingsley,29175 +1962-10-19T00:00:00Z,10061,Tse,M,1985-09-17T00:00:00Z,1,Herber,49095 +1961-11-02T00:00:00Z,10062,Anoosh,M,1991-08-30T00:00:00Z,3,Peyn,65030 +1952-08-06T00:00:00Z,10063,Gino,F,1989-04-08T00:00:00Z,3,Leonhardt,52121 +1959-04-07T00:00:00Z,10064,Udi,M,1985-11-20T00:00:00Z,5,Jansch,33956 +1963-04-14T00:00:00Z,10065,Satosi,M,1988-05-18T00:00:00Z,2,Awdeh,50249 +1952-11-13T00:00:00Z,10066,Kwee,M,1986-02-26T00:00:00Z,5,Schusler,31897 +1953-01-07T00:00:00Z,10067,Claudi,M,1987-03-04T00:00:00Z,2,Stavenow,52044 +1962-11-26T00:00:00Z,10068,Charlene,M,1987-08-07T00:00:00Z,3,Brattka,28941 +1960-09-06T00:00:00Z,10069,Margareta,F,1989-11-05T00:00:00Z,5,Bierman,41933 +1955-08-20T00:00:00Z,10070,Reuven,M,1985-10-14T00:00:00Z,3,Garigliano,54329 +1958-01-21T00:00:00Z,10071,Hisao,M,1987-10-01T00:00:00Z,2,Lipner,40612 +1952-05-15T00:00:00Z,10072,Hironoby,F,1988-07-21T00:00:00Z,5,Sidou,54518 +1954-02-23T00:00:00Z,10073,Shir,M,1991-12-01T00:00:00Z,4,McClurg,32568 +1955-08-28T00:00:00Z,10074,Mokhtar,F,1990-08-13T00:00:00Z,5,Bernatsky,38992 +1960-03-09T00:00:00Z,10075,Gao,F,1987-03-19T00:00:00Z,5,Dolinsky,51956 +1952-06-13T00:00:00Z,10076,Erez,F,1985-07-09T00:00:00Z,3,Ritzmann,62405 +1964-04-18T00:00:00Z,10077,Mona,M,1990-03-02T00:00:00Z,5,Azuma,46595 +1959-12-25T00:00:00Z,10078,Danel,F,1987-05-26T00:00:00Z,2,Mondadori,69904 +1961-10-05T00:00:00Z,10079,Kshitij,F,1986-03-27T00:00:00Z,2,Gils,32263 +1957-12-03T00:00:00Z,10080,Premal,M,1985-11-19T00:00:00Z,5,Baek,52833 +1960-12-17T00:00:00Z,10081,Zhongwei,M,1986-10-30T00:00:00Z,2,Rosen,50128 +1963-09-09T00:00:00Z,10082,Parviz,M,1990-01-03T00:00:00Z,4,Lortz,49818 +1959-07-23T00:00:00Z,10083,Vishv,M,1987-03-31T00:00:00Z,1,Zockler, +1960-05-25T00:00:00Z,10084,Tuval,M,1995-12-15T00:00:00Z,1,Kalloufi, +1962-11-07T00:00:00Z,10085,Kenroku,M,1994-04-09T00:00:00Z,5,Malabarba, +1962-11-19T00:00:00Z,10086,Somnath,M,1990-02-16T00:00:00Z,1,Foote, +1959-07-23T00:00:00Z,10087,Xinglin,F,1986-09-08T00:00:00Z,5,Eugenio, +1954-02-25T00:00:00Z,10088,Jungsoon,F,1988-09-02T00:00:00Z,5,Syrzycki, +1963-03-21T00:00:00Z,10089,Sudharsan,F,1986-08-12T00:00:00Z,4,Flasterstein, +1961-05-30T00:00:00Z,10090,Kendra,M,1986-03-14T00:00:00Z,2,Hofting,44956 +1955-10-04T00:00:00Z,10091,Amabile,M,1992-11-18T00:00:00Z,3,Gomatam,38645 +1964-10-18T00:00:00Z,10092,Valdiodio,F,1989-09-22T00:00:00Z,1,Niizuma,25976 +1964-06-11T00:00:00Z,10093,Sailaja,M,1996-11-05T00:00:00Z,3,Desikan,45656 +1957-05-25T00:00:00Z,10094,Arumugam,F,1987-04-18T00:00:00Z,5,Ossenbruggen,66817 +1965-01-03T00:00:00Z,10095,Hilari,M,1986-07-15T00:00:00Z,4,Morton,37702 +1954-09-16T00:00:00Z,10096,Jayson,M,1990-01-14T00:00:00Z,4,Mandell,43889 +1952-02-27T00:00:00Z,10097,Remzi,M,1990-09-15T00:00:00Z,3,Waschkowski,71165 +1961-09-23T00:00:00Z,10098,Sreekrishna,F,1985-05-13T00:00:00Z,4,Servieres,44817 +1956-05-25T00:00:00Z,10099,Valter,F,1988-10-18T00:00:00Z,2,Sullins,73578 +1953-04-21T00:00:00Z,10100,Hironobu,F,1987-09-21T00:00:00Z,4,Haraldson,68431 diff --git a/x-pack/qa/sql/src/main/resources/setup_test_emp_with_nulls.sql b/x-pack/qa/sql/src/main/resources/setup_test_emp_with_nulls.sql new file mode 100644 index 0000000000000..c6afaa9018aa1 --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/setup_test_emp_with_nulls.sql @@ -0,0 +1,12 @@ +DROP TABLE IF EXISTS "test_emp_with_nulls"; +CREATE TABLE "test_emp_with_nulls" ( + "birth_date" TIMESTAMP WITH TIME ZONE, + "emp_no" INT, + "first_name" VARCHAR(50), + "gender" VARCHAR(1), + "hire_date" TIMESTAMP WITH TIME ZONE, + "languages" TINYINT, + "last_name" VARCHAR(50), + "salary" INT + ) + AS SELECT * FROM CSVREAD('classpath:/employees_with_nulls.csv'); \ No newline at end of file From b56c38dadfde1a69e0087f57b44ec1f19cb1b8f2 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 27 Aug 2018 18:49:16 +0300 Subject: [PATCH 14/18] * Added breaking change section for GROUP BY behavior: now it considers null or empty values as a separate group/bucket. Previously, they were ignored. * This is part of backporting of https://github.com/elastic/elasticsearch/pull/32832 --- docs/reference/migration/migrate_6_5.asciidoc | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/reference/migration/migrate_6_5.asciidoc b/docs/reference/migration/migrate_6_5.asciidoc index 913d9567cb850..a6b22cf38d356 100644 --- a/docs/reference/migration/migrate_6_5.asciidoc +++ b/docs/reference/migration/migrate_6_5.asciidoc @@ -5,6 +5,7 @@ This section discusses the changes that you need to be aware of when migrating your application to Elasticsearch 6.5. * <> +* <> See also <> and <>. @@ -23,3 +24,13 @@ Elasticsearch will log a warning on startup and log with the new pattern. It will not change the logging configuration files though. You should make this change before 7.0 because in 7.0 Elasticsearch will no longer automatically add the node name to the logging configuration if it isn't already present. + +[[breaking_65_sql_changes]] +=== SQL plugin changes + +==== Grouping by columns with missing values will create an additional group + +An additional group will be present in the result of requests containing a +`GROUP BY` for a column that has missing values in the returned documents. +The records with missing values in the grouped by column will be collectively +considered a single bucket. From 49942c030ead60297cf28aecd2f50dafbc7f579c Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Mon, 27 Aug 2018 10:26:25 -0600 Subject: [PATCH 15/18] Build: forked compiler max memory matches jvmArgs (#33138) This commit removes the setting of the fork options maximum memory size in our build plugin and instead adds the value in the gradle.properties file to be alongside the value set in jvmArgs. This change is necessary when using parallel compilation as 512m is not sufficient for parallel compilation on some machines. --- .../src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 1 - gradle.properties | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 4881987ad61b6..688efb843a8f1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -601,7 +601,6 @@ class BuildPlugin implements Plugin { } else { options.fork = true options.forkOptions.javaHome = compilerJavaHomeFile - options.forkOptions.memoryMaximumSize = "512m" } if (targetCompatibilityVersion == JavaVersion.VERSION_1_8) { // compile with compact 3 profile by default diff --git a/gradle.properties b/gradle.properties index 08b03629ad53a..6b04e99c20441 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,3 @@ org.gradle.daemon=false org.gradle.jvmargs=-Xmx2g +options.forkOptions.memoryMaximumSize=2g From 5b3a874ee536986dd13b004cc9a721c30f1737da Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Wed, 22 Aug 2018 09:19:58 -0500 Subject: [PATCH 16/18] HLRC: Create server agnostic request and response (#32912) The HLRC has historically reused the same Request and Response classes that the server module uses. This commit deprecates the use of any server module Request and Response classes, and adds a small bit of validation logic that differs from server slightly, in that it does not assume a check for a null ValidationException class is not enough to determine if validation failed. --- .../client/RestHighLevelClient.java | 126 ++++++++++++++++-- .../org/elasticsearch/client/Validatable.java | 41 ++++++ .../client/ValidationException.java | 55 ++++++++ 3 files changed, 208 insertions(+), 14 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 6502052f108d3..5d1cc3e405dbe 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -1275,6 +1275,11 @@ protected final Resp performRequestAndParseEnt return performRequest(request, requestConverter, (response) -> parseEntity(response.getEntity(), entityParser), ignores, headers); } + /** + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation + * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + */ + @Deprecated protected final Resp performRequestAndParseEntity(Req request, CheckedFunction requestConverter, RequestOptions options, @@ -1284,6 +1289,18 @@ protected final Resp performRequestAndParseEnt response -> parseEntity(response.getEntity(), entityParser), ignores); } + /** + * Defines a helper method for performing a request and then parsing the returned entity using the provided entityParser. + */ + protected final Resp performRequestAndParseEntity(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction entityParser, + Set ignores) throws IOException { + return performRequest(request, requestConverter, options, + response -> parseEntity(response.getEntity(), entityParser), ignores); + } + @Deprecated protected final Resp performRequest(Req request, CheckedFunction requestConverter, @@ -1292,15 +1309,46 @@ protected final Resp performRequest(Req reques return performRequest(request, requestConverter, optionsForHeaders(headers), responseConverter, ignores); } + /** + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation + * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + */ + @Deprecated protected final Resp performRequest(Req request, - CheckedFunction requestConverter, - RequestOptions options, - CheckedFunction responseConverter, - Set ignores) throws IOException { + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + Set ignores) throws IOException { ActionRequestValidationException validationException = request.validate(); - if (validationException != null) { + if (validationException != null && validationException.validationErrors().isEmpty() == false) { throw validationException; } + return internalPerformRequest(request, requestConverter, options, responseConverter, ignores); + } + + /** + * Defines a helper method for performing a request. + */ + protected final Resp performRequest(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + Set ignores) throws IOException { + ValidationException validationException = request.validate(); + if (validationException != null && validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + return internalPerformRequest(request, requestConverter, options, responseConverter, ignores); + } + + /** + * Provides common functionality for performing a request. + */ + private Resp internalPerformRequest(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + Set ignores) throws IOException { Request req = requestConverter.apply(request); req.setOptions(options); Response response; @@ -1337,15 +1385,32 @@ protected final void performRequestAsyncAndPar listener, ignores, headers); } + /** + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation + * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + */ + @Deprecated protected final void performRequestAsyncAndParseEntity(Req request, - CheckedFunction requestConverter, - RequestOptions options, - CheckedFunction entityParser, - ActionListener listener, Set ignores) { + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction entityParser, + ActionListener listener, Set ignores) { performRequestAsync(request, requestConverter, options, response -> parseEntity(response.getEntity(), entityParser), listener, ignores); } + /** + * Defines a helper method for asynchronously performing a request. + */ + protected final void performRequestAsyncAndParseEntity(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction entityParser, + ActionListener listener, Set ignores) { + performRequestAsync(request, requestConverter, options, + response -> parseEntity(response.getEntity(), entityParser), listener, ignores); + } + @Deprecated protected final void performRequestAsync(Req request, CheckedFunction requestConverter, @@ -1354,16 +1419,48 @@ protected final void performRequestAsync(Req r performRequestAsync(request, requestConverter, optionsForHeaders(headers), responseConverter, listener, ignores); } + /** + * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation + * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}. + */ + @Deprecated protected final void performRequestAsync(Req request, - CheckedFunction requestConverter, - RequestOptions options, - CheckedFunction responseConverter, - ActionListener listener, Set ignores) { + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + ActionListener listener, Set ignores) { ActionRequestValidationException validationException = request.validate(); - if (validationException != null) { + if (validationException != null && validationException.validationErrors().isEmpty() == false) { listener.onFailure(validationException); return; } + internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores); + } + + /** + * Defines a helper method for asynchronously performing a request. + */ + protected final void performRequestAsync(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + ActionListener listener, Set ignores) { + ValidationException validationException = request.validate(); + if (validationException != null && validationException.validationErrors().isEmpty() == false) { + listener.onFailure(validationException); + return; + } + internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores); + } + + /** + * Provides common functionality for asynchronously performing a request. + */ + private void internalPerformRequestAsync(Req request, + CheckedFunction requestConverter, + RequestOptions options, + CheckedFunction responseConverter, + ActionListener listener, Set ignores) { Request req; try { req = requestConverter.apply(request); @@ -1377,6 +1474,7 @@ protected final void performRequestAsync(Req r client.performRequestAsync(req, responseListener); } + final ResponseListener wrapResponseListener(CheckedFunction responseConverter, ActionListener actionListener, Set ignores) { return new ResponseListener() { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java new file mode 100644 index 0000000000000..2efff4d3663b8 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +/** + * Defines a validation layer for Requests. + */ +public interface Validatable { + ValidationException EMPTY_VALIDATION = new ValidationException() { + @Override + public void addValidationError(String error) { + throw new UnsupportedOperationException("Validation messages should not be added to the empty validation"); + } + }; + + /** + * Perform validation. This method does not have to be overridden in the event that no validation needs to be done. + * + * @return potentially null, in the event of older actions, an empty {@link ValidationException} in newer actions, or finally a + * {@link ValidationException} that contains a list of all failed validation. + */ + default ValidationException validate() { + return EMPTY_VALIDATION; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java new file mode 100644 index 0000000000000..6b5d738d67565 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import java.util.ArrayList; +import java.util.List; + +/** + * Encapsulates an accumulation of validation errors + */ +public class ValidationException extends IllegalArgumentException { + private final List validationErrors = new ArrayList<>(); + + /** + * Add a new validation error to the accumulating validation errors + * @param error the error to add + */ + public void addValidationError(String error) { + validationErrors.add(error); + } + + /** + * Returns the validation errors accumulated + */ + public final List validationErrors() { + return validationErrors; + } + + @Override + public final String getMessage() { + StringBuilder sb = new StringBuilder(); + sb.append("Validation Failed: "); + int index = 0; + for (String error : validationErrors) { + sb.append(++index).append(": ").append(error).append(";"); + } + return sb.toString(); + } +} From 35b4bdaf254baf1be6ff6c8eac88c49aaacf3c17 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Thu, 23 Aug 2018 09:48:53 -0500 Subject: [PATCH 17/18] Move non duplicated actions back into xpack core (#32952) Most actions' request and response were moved from xpack core into protocol. We have decided to instead duplicate the actions in the HLRC instead of trying to reuse them. This commit moves the non duplicated actions back into xpack core and severs the tie between xpack core and protocol so no other actions can be moved and not duplicated. --- x-pack/plugin/core/build.gradle | 7 +- .../protocol/xpack/XPackInfoRequest.java | 86 ++++ .../protocol/xpack/XPackInfoResponse.java | 487 ++++++++++++++++++ .../protocol/xpack/XPackUsageRequest.java | 18 + .../protocol/xpack/XPackUsageResponse.java | 43 ++ .../protocol/xpack/common/ProtocolUtils.java | 58 +++ .../protocol/xpack/graph/Connection.java | 216 ++++++++ .../xpack/graph/GraphExploreRequest.java | 388 ++++++++++++++ .../xpack/graph/GraphExploreResponse.java | 248 +++++++++ .../protocol/xpack/graph/Hop.java | 160 ++++++ .../protocol/xpack/graph/Vertex.java | 255 +++++++++ .../protocol/xpack/graph/VertexRequest.java | 235 +++++++++ .../protocol/xpack/graph/package-info.java | 11 + .../xpack/license/DeleteLicenseRequest.java | 18 + .../xpack/license/GetLicenseRequest.java | 21 + .../xpack/license/GetLicenseResponse.java | 25 + .../protocol/xpack/license/LicenseStatus.java | 54 ++ .../xpack/license/LicensesStatus.java | 55 ++ .../xpack/license/PutLicenseRequest.java | 40 ++ .../xpack/license/PutLicenseResponse.java | 195 +++++++ .../protocol/xpack/license/package-info.java | 11 + .../migration/IndexUpgradeInfoRequest.java | 81 +++ .../migration/IndexUpgradeInfoResponse.java | 120 +++++ .../migration/UpgradeActionRequired.java | 42 ++ .../xpack/migration/package-info.java | 11 + .../protocol/xpack/package-info.java | 10 + .../protocol/xpack/security/User.java | 246 +++++++++ .../protocol/xpack/security/package-info.java | 11 + .../xpack/watcher/DeleteWatchRequest.java | 80 +++ .../xpack/watcher/DeleteWatchResponse.java | 110 ++++ .../xpack/watcher/PutWatchRequest.java | 162 ++++++ .../xpack/watcher/PutWatchResponse.java | 111 ++++ .../protocol/xpack/watcher/package-info.java | 11 + .../xpack/XPackInfoResponseTests.java | 146 ++++++ .../xpack/common/ProtocolUtilsTests.java | 58 +++ .../graph/GraphExploreResponseTests.java | 123 +++++ .../xpack/license/LicenseStatusTests.java | 17 + .../license/PutLicenseResponseTests.java | 112 ++++ .../IndexUpgradeInfoRequestTests.java | 36 ++ .../IndexUpgradeInfoResponseTests.java | 54 ++ .../protocol/xpack/security/UserTests.java | 25 + .../watcher/DeleteWatchResponseTests.java | 32 ++ .../xpack/watcher/PutWatchResponseTests.java | 32 ++ 43 files changed, 4256 insertions(+), 5 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index e5e3ba43f74f0..402b87272ae5d 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -8,7 +8,6 @@ import java.nio.file.StandardCopyOption apply plugin: 'elasticsearch.esplugin' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -apply plugin: 'com.github.johnrengelman.shadow' archivesBaseName = 'x-pack-core' @@ -27,7 +26,6 @@ dependencyLicenses { dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - bundle project(':x-pack:protocol') compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" @@ -108,8 +106,7 @@ test { // TODO: don't publish test artifacts just to run messy tests, fix the tests! // https://github.com/elastic/x-plugins/issues/724 configurations { - testArtifacts.extendsFrom(testRuntime, shadow) - testArtifacts.exclude(group: project(':x-pack:protocol').group, module: project(':x-pack:protocol').name) + testArtifacts.extendsFrom testRuntime } task testJar(type: Jar) { appendix 'test' @@ -117,7 +114,7 @@ task testJar(type: Jar) { } artifacts { // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions - archives shadowJar + archives jar testArtifacts testJar } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java new file mode 100644 index 0000000000000..41f066daf93d3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.EnumSet; +import java.util.Locale; + +/** + * Fetch information about X-Pack from the cluster. + */ +public class XPackInfoRequest extends ActionRequest { + + public enum Category { + BUILD, LICENSE, FEATURES; + + public static EnumSet toSet(String... categories) { + EnumSet set = EnumSet.noneOf(Category.class); + for (String category : categories) { + switch (category) { + case "_all": + return EnumSet.allOf(Category.class); + case "_none": + return EnumSet.noneOf(Category.class); + default: + set.add(Category.valueOf(category.toUpperCase(Locale.ROOT))); + } + } + return set; + } + } + + private boolean verbose; + private EnumSet categories = EnumSet.noneOf(Category.class); + + public XPackInfoRequest() {} + + public void setVerbose(boolean verbose) { + this.verbose = verbose; + } + + public boolean isVerbose() { + return verbose; + } + + public void setCategories(EnumSet categories) { + this.categories = categories; + } + + public EnumSet getCategories() { + return categories; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.verbose = in.readBoolean(); + EnumSet categories = EnumSet.noneOf(Category.class); + int size = in.readVInt(); + for (int i = 0; i < size; i++) { + categories.add(Category.valueOf(in.readString())); + } + this.categories = categories; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(verbose); + out.writeVInt(categories.size()); + for (Category category : categories) { + out.writeString(category.name()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java new file mode 100644 index 0000000000000..2a7eddcf35395 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -0,0 +1,487 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class XPackInfoResponse extends ActionResponse implements ToXContentObject { + /** + * Value of the license's expiration time if it should never expire. + */ + public static final long BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS = Long.MAX_VALUE - TimeUnit.HOURS.toMillis(24 * 365); + // TODO move this constant to License.java once we move License.java to the protocol jar + + @Nullable private BuildInfo buildInfo; + @Nullable private LicenseInfo licenseInfo; + @Nullable private FeatureSetsInfo featureSetsInfo; + + public XPackInfoResponse() {} + + public XPackInfoResponse(@Nullable BuildInfo buildInfo, @Nullable LicenseInfo licenseInfo, @Nullable FeatureSetsInfo featureSetsInfo) { + this.buildInfo = buildInfo; + this.licenseInfo = licenseInfo; + this.featureSetsInfo = featureSetsInfo; + } + + /** + * @return The build info (incl. build hash and timestamp) + */ + public BuildInfo getBuildInfo() { + return buildInfo; + } + + /** + * @return The current license info (incl. UID, type/mode. status and expiry date). May return {@code null} when no + * license is currently installed. + */ + public LicenseInfo getLicenseInfo() { + return licenseInfo; + } + + /** + * @return The current status of the feature sets in X-Pack. Feature sets describe the features available/enabled in X-Pack. + */ + public FeatureSetsInfo getFeatureSetsInfo() { + return featureSetsInfo; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalWriteable(buildInfo); + out.writeOptionalWriteable(licenseInfo); + out.writeOptionalWriteable(featureSetsInfo); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.buildInfo = in.readOptionalWriteable(BuildInfo::new); + this.licenseInfo = in.readOptionalWriteable(LicenseInfo::new); + this.featureSetsInfo = in.readOptionalWriteable(FeatureSetsInfo::new); + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + XPackInfoResponse rhs = (XPackInfoResponse) other; + return Objects.equals(buildInfo, rhs.buildInfo) + && Objects.equals(licenseInfo, rhs.licenseInfo) + && Objects.equals(featureSetsInfo, rhs.featureSetsInfo); + } + + @Override + public int hashCode() { + return Objects.hash(buildInfo, licenseInfo, featureSetsInfo); + } + + @Override + public String toString() { + return Strings.toString(this, true, false); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "xpack_info_response", true, (a, v) -> { + BuildInfo buildInfo = (BuildInfo) a[0]; + LicenseInfo licenseInfo = (LicenseInfo) a[1]; + @SuppressWarnings("unchecked") // This is how constructing object parser works + List featureSets = (List) a[2]; + FeatureSetsInfo featureSetsInfo = featureSets == null ? null : new FeatureSetsInfo(new HashSet<>(featureSets)); + return new XPackInfoResponse(buildInfo, licenseInfo, featureSetsInfo); + }); + static { + PARSER.declareObject(optionalConstructorArg(), BuildInfo.PARSER, new ParseField("build")); + /* + * licenseInfo is sort of "double optional" because it is + * optional but it can also be send as `null`. + */ + PARSER.declareField(optionalConstructorArg(), (p, v) -> { + if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + return LicenseInfo.PARSER.parse(p, v); + }, + new ParseField("license"), ValueType.OBJECT_OR_NULL); + PARSER.declareNamedObjects(optionalConstructorArg(), + (p, c, name) -> FeatureSetsInfo.FeatureSet.PARSER.parse(p, name), + new ParseField("features")); + } + public static XPackInfoResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (buildInfo != null) { + builder.field("build", buildInfo, params); + } + + EnumSet categories = XPackInfoRequest.Category + .toSet(Strings.splitStringByCommaToArray(params.param("categories", "_all"))); + if (licenseInfo != null) { + builder.field("license", licenseInfo, params); + } else if (categories.contains(XPackInfoRequest.Category.LICENSE)) { + // if the user requested the license info, and there is no license, we should send + // back an explicit null value (indicating there is no license). This is different + // than not adding the license info at all + builder.nullField("license"); + } + + if (featureSetsInfo != null) { + builder.field("features", featureSetsInfo, params); + } + + if (params.paramAsBoolean("human", true)) { + builder.field("tagline", "You know, for X"); + } + + return builder.endObject(); + } + + public static class LicenseInfo implements ToXContentObject, Writeable { + private final String uid; + private final String type; + private final String mode; + private final LicenseStatus status; + private final long expiryDate; + + public LicenseInfo(String uid, String type, String mode, LicenseStatus status, long expiryDate) { + this.uid = uid; + this.type = type; + this.mode = mode; + this.status = status; + this.expiryDate = expiryDate; + } + + public LicenseInfo(StreamInput in) throws IOException { + this(in.readString(), in.readString(), in.readString(), LicenseStatus.readFrom(in), in.readLong()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uid); + out.writeString(type); + out.writeString(mode); + status.writeTo(out); + out.writeLong(expiryDate); + } + + public String getUid() { + return uid; + } + + public String getType() { + return type; + } + + public String getMode() { + return mode; + } + + public long getExpiryDate() { + return expiryDate; + } + + public LicenseStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + LicenseInfo rhs = (LicenseInfo) other; + return Objects.equals(uid, rhs.uid) + && Objects.equals(type, rhs.type) + && Objects.equals(mode, rhs.mode) + && Objects.equals(status, rhs.status) + && expiryDate == rhs.expiryDate; + } + + @Override + public int hashCode() { + return Objects.hash(uid, type, mode, status, expiryDate); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "license_info", true, (a, v) -> { + String uid = (String) a[0]; + String type = (String) a[1]; + String mode = (String) a[2]; + LicenseStatus status = LicenseStatus.fromString((String) a[3]); + Long expiryDate = (Long) a[4]; + long primitiveExpiryDate = expiryDate == null ? BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS : expiryDate; + return new LicenseInfo(uid, type, mode, status, primitiveExpiryDate); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("uid")); + PARSER.declareString(constructorArg(), new ParseField("type")); + PARSER.declareString(constructorArg(), new ParseField("mode")); + PARSER.declareString(constructorArg(), new ParseField("status")); + PARSER.declareLong(optionalConstructorArg(), new ParseField("expiry_date_in_millis")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + .field("uid", uid) + .field("type", type) + .field("mode", mode) + .field("status", status.label()); + if (expiryDate != BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) { + builder.timeField("expiry_date_in_millis", "expiry_date", expiryDate); + } + return builder.endObject(); + } + } + + public static class BuildInfo implements ToXContentObject, Writeable { + private final String hash; + private final String timestamp; + + public BuildInfo(String hash, String timestamp) { + this.hash = hash; + this.timestamp = timestamp; + } + + public BuildInfo(StreamInput input) throws IOException { + this(input.readString(), input.readString()); + } + + @Override + public void writeTo(StreamOutput output) throws IOException { + output.writeString(hash); + output.writeString(timestamp); + } + + public String getHash() { + return hash; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + BuildInfo rhs = (BuildInfo) other; + return Objects.equals(hash, rhs.hash) + && Objects.equals(timestamp, rhs.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(hash, timestamp); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "build_info", true, (a, v) -> new BuildInfo((String) a[0], (String) a[1])); + static { + PARSER.declareString(constructorArg(), new ParseField("hash")); + PARSER.declareString(constructorArg(), new ParseField("date")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("hash", hash) + .field("date", timestamp) + .endObject(); + } + } + + public static class FeatureSetsInfo implements ToXContentObject, Writeable { + private final Map featureSets; + + public FeatureSetsInfo(Set featureSets) { + Map map = new HashMap<>(featureSets.size()); + for (FeatureSet featureSet : featureSets) { + map.put(featureSet.name, featureSet); + } + this.featureSets = Collections.unmodifiableMap(map); + } + + public FeatureSetsInfo(StreamInput in) throws IOException { + int size = in.readVInt(); + Map featureSets = new HashMap<>(size); + for (int i = 0; i < size; i++) { + FeatureSet featureSet = new FeatureSet(in); + featureSets.put(featureSet.name, featureSet); + } + this.featureSets = Collections.unmodifiableMap(featureSets); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(featureSets.size()); + for (FeatureSet featureSet : featureSets.values()) { + featureSet.writeTo(out); + } + } + + public Map getFeatureSets() { + return featureSets; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + FeatureSetsInfo rhs = (FeatureSetsInfo) other; + return Objects.equals(featureSets, rhs.featureSets); + } + + @Override + public int hashCode() { + return Objects.hash(featureSets); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + List names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().collect(Collectors.toList()); + for (String name : names) { + builder.field(name, featureSets.get(name), params); + } + return builder.endObject(); + } + + public static class FeatureSet implements ToXContentObject, Writeable { + private final String name; + @Nullable private final String description; + private final boolean available; + private final boolean enabled; + @Nullable private final Map nativeCodeInfo; + + public FeatureSet(String name, @Nullable String description, boolean available, boolean enabled, + @Nullable Map nativeCodeInfo) { + this.name = name; + this.description = description; + this.available = available; + this.enabled = enabled; + this.nativeCodeInfo = nativeCodeInfo; + } + + public FeatureSet(StreamInput in) throws IOException { + this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), + in.getVersion().onOrAfter(Version.V_5_4_0) ? in.readMap() : null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeOptionalString(description); + out.writeBoolean(available); + out.writeBoolean(enabled); + if (out.getVersion().onOrAfter(Version.V_5_4_0)) { + out.writeMap(nativeCodeInfo); + } + } + + public String name() { + return name; + } + + @Nullable + public String description() { + return description; + } + + public boolean available() { + return available; + } + + public boolean enabled() { + return enabled; + } + + @Nullable + public Map nativeCodeInfo() { + return nativeCodeInfo; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + FeatureSet rhs = (FeatureSet) other; + return Objects.equals(name, rhs.name) + && Objects.equals(description, rhs.description) + && available == rhs.available + && enabled == rhs.enabled + && Objects.equals(nativeCodeInfo, rhs.nativeCodeInfo); + } + + @Override + public int hashCode() { + return Objects.hash(name, description, available, enabled, nativeCodeInfo); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "feature_set", true, (a, name) -> { + String description = (String) a[0]; + boolean available = (Boolean) a[1]; + boolean enabled = (Boolean) a[2]; + @SuppressWarnings("unchecked") // Matches up with declaration below + Map nativeCodeInfo = (Map) a[3]; + return new FeatureSet(name, description, available, enabled, nativeCodeInfo); + }); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("description")); + PARSER.declareBoolean(constructorArg(), new ParseField("available")); + PARSER.declareBoolean(constructorArg(), new ParseField("enabled")); + PARSER.declareObject(optionalConstructorArg(), (p, name) -> p.map(), new ParseField("native_code_info")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (description != null) { + builder.field("description", description); + } + builder.field("available", available); + builder.field("enabled", enabled); + if (nativeCodeInfo != null) { + builder.field("native_code_info", nativeCodeInfo); + } + return builder.endObject(); + } + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java new file mode 100644 index 0000000000000..83621a9ac3d41 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeRequest; + +public class XPackUsageRequest extends MasterNodeRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java new file mode 100644 index 0000000000000..ccf681837fdcd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Response object from calling the xpack usage api. + * + * Usage information for each feature is accessible through {@link #getUsages()}. + */ +public class XPackUsageResponse { + + private final Map> usages; + + private XPackUsageResponse(Map> usages) throws IOException { + this.usages = usages; + } + + @SuppressWarnings("unchecked") + private static Map castMap(Object value) { + return (Map)value; + } + + /** Return a map from feature name to usage information for that feature. */ + public Map> getUsages() { + return usages; + } + + public static XPackUsageResponse fromXContent(XContentParser parser) throws IOException { + Map rawMap = parser.map(); + Map> usages = rawMap.entrySet().stream().collect( + Collectors.toMap(Map.Entry::getKey, e -> castMap(e.getValue()))); + return new XPackUsageResponse(usages); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java new file mode 100644 index 0000000000000..3934095512120 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.common; + +import java.util.Arrays; +import java.util.Map; + +/** + * Common utilities used for XPack protocol classes + */ +public final class ProtocolUtils { + + /** + * Implements equals for a map of string arrays + * + * The map of string arrays is used in some XPack protocol classes but does't work with equal. + */ + public static boolean equals(Map a, Map b) { + if (a == null) { + return b == null; + } + if (b == null) { + return false; + } + if (a.size() != b.size()) { + return false; + } + for (Map.Entry entry : a.entrySet()) { + String[] val = entry.getValue(); + String key = entry.getKey(); + if (val == null) { + if (b.get(key) != null || b.containsKey(key) == false) { + return false; + } + } else { + if (Arrays.equals(val, b.get(key)) == false) { + return false; + } + } + } + return true; + } + + /** + * Implements hashCode for map of string arrays + * + * The map of string arrays does't work with hashCode. + */ + public static int hashCode(Map a) { + int hash = 0; + for (Map.Entry entry : a.entrySet()) + hash += Arrays.hashCode(entry.getValue()); + return hash; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java new file mode 100644 index 0000000000000..994c7e2c2d5a3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java @@ -0,0 +1,216 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import com.carrotsearch.hppc.ObjectIntHashMap; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * A Connection links exactly two {@link Vertex} objects. The basis of a + * connection is one or more documents have been found that contain + * this pair of terms and the strength of the connection is recorded + * as a weight. + */ +public class Connection { + private Vertex from; + private Vertex to; + private double weight; + private long docCount; + + public Connection(Vertex from, Vertex to, double weight, long docCount) { + this.from = from; + this.to = to; + this.weight = weight; + this.docCount = docCount; + } + + public Connection(StreamInput in, Map vertices) throws IOException { + from = vertices.get(new VertexId(in.readString(), in.readString())); + to = vertices.get(new VertexId(in.readString(), in.readString())); + weight = in.readDouble(); + docCount = in.readVLong(); + } + + Connection() { + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(from.getField()); + out.writeString(from.getTerm()); + out.writeString(to.getField()); + out.writeString(to.getTerm()); + out.writeDouble(weight); + out.writeVLong(docCount); + } + + public ConnectionId getId() { + return new ConnectionId(from.getId(), to.getId()); + } + + public Vertex getFrom() { + return from; + } + + public Vertex getTo() { + return to; + } + + /** + * @return a measure of the relative connectedness between a pair of {@link Vertex} objects + */ + public double getWeight() { + return weight; + } + + /** + * @return the number of documents in the sampled set that contained this + * pair of {@link Vertex} objects. + */ + public long getDocCount() { + return docCount; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Connection other = (Connection) obj; + return docCount == other.docCount && + weight == other.weight && + Objects.equals(to, other.to) && + Objects.equals(from, other.from); + } + + @Override + public int hashCode() { + return Objects.hash(docCount, weight, from, to); + } + + + private static final ParseField SOURCE = new ParseField("source"); + private static final ParseField TARGET = new ParseField("target"); + private static final ParseField WEIGHT = new ParseField("weight"); + private static final ParseField DOC_COUNT = new ParseField("doc_count"); + + + void toXContent(XContentBuilder builder, Params params, ObjectIntHashMap vertexNumbers) throws IOException { + builder.field(SOURCE.getPreferredName(), vertexNumbers.get(from)); + builder.field(TARGET.getPreferredName(), vertexNumbers.get(to)); + builder.field(WEIGHT.getPreferredName(), weight); + builder.field(DOC_COUNT.getPreferredName(), docCount); + } + + //When deserializing from XContent we need to wait for all vertices to be loaded before + // Connection objects can be created that reference them. This class provides the interim + // state for connections. + static class UnresolvedConnection { + int fromIndex; + int toIndex; + double weight; + long docCount; + UnresolvedConnection(int fromIndex, int toIndex, double weight, long docCount) { + super(); + this.fromIndex = fromIndex; + this.toIndex = toIndex; + this.weight = weight; + this.docCount = docCount; + } + public Connection resolve(List vertices) { + return new Connection(vertices.get(fromIndex), vertices.get(toIndex), weight, docCount); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "ConnectionParser", true, + args -> { + int source = (Integer) args[0]; + int target = (Integer) args[1]; + double weight = (Double) args[2]; + long docCount = (Long) args[3]; + return new UnresolvedConnection(source, target, weight, docCount); + }); + + static { + PARSER.declareInt(constructorArg(), SOURCE); + PARSER.declareInt(constructorArg(), TARGET); + PARSER.declareDouble(constructorArg(), WEIGHT); + PARSER.declareLong(constructorArg(), DOC_COUNT); + } + static UnresolvedConnection fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + } + + + /** + * An identifier (implements hashcode and equals) that represents a + * unique key for a {@link Connection} + */ + public static class ConnectionId { + private final VertexId source; + private final VertexId target; + + public ConnectionId(VertexId source, VertexId target) { + this.source = source; + this.target = target; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ConnectionId vertexId = (ConnectionId) o; + + if (source != null ? !source.equals(vertexId.source) : vertexId.source != null) + return false; + if (target != null ? !target.equals(vertexId.target) : vertexId.target != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = source != null ? source.hashCode() : 0; + result = 31 * result + (target != null ? target.hashCode() : 0); + return result; + } + + public VertexId getSource() { + return source; + } + + public VertexId getTarget() { + return target; + } + + @Override + public String toString() { + return getSource() + "->" + getTarget(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java new file mode 100644 index 0000000000000..196982c0a35fb --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java @@ -0,0 +1,388 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +/** + * Holds the criteria required to guide the exploration of connected terms which + * can be returned as a graph. + */ +public class GraphExploreRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject { + + public static final String NO_HOPS_ERROR_MESSAGE = "Graph explore request must have at least one hop"; + public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest"; + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); + private String[] types = Strings.EMPTY_ARRAY; + private String routing; + private TimeValue timeout; + + private int sampleSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE; + private String sampleDiversityField; + private int maxDocsPerDiversityValue; + private boolean useSignificance = true; + private boolean returnDetailedInfo; + + private List hops = new ArrayList<>(); + + public GraphExploreRequest() { + } + + /** + * Constructs a new graph request to run against the provided indices. No + * indices means it will run against all indices. + */ + public GraphExploreRequest(String... indices) { + this.indices = indices; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (hops.size() == 0) { + validationException = ValidateActions.addValidationError(NO_HOPS_ERROR_MESSAGE, validationException); + } + for (Hop hop : hops) { + validationException = hop.validate(validationException); + } + return validationException; + } + + @Override + public String[] indices() { + return this.indices; + } + + @Override + public GraphExploreRequest indices(String... indices) { + this.indices = indices; + return this; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public GraphExploreRequest indicesOptions(IndicesOptions indicesOptions) { + if (indicesOptions == null) { + throw new IllegalArgumentException("IndicesOptions must not be null"); + } + this.indicesOptions = indicesOptions; + return this; + } + + public String[] types() { + return this.types; + } + + public GraphExploreRequest types(String... types) { + this.types = types; + return this; + } + + public String routing() { + return this.routing; + } + + public GraphExploreRequest routing(String routing) { + this.routing = routing; + return this; + } + + public GraphExploreRequest routing(String... routings) { + this.routing = Strings.arrayToCommaDelimitedString(routings); + return this; + } + + public TimeValue timeout() { + return timeout; + } + + /** + * Graph exploration can be set to timeout after the given period. Search + * operations involved in each hop are limited to the remaining time + * available but can still overrun due to the nature of their "best efforts" + * timeout support. When a timeout occurs partial results are returned. + * + * @param timeout + * a {@link TimeValue} object which determines the maximum length + * of time to spend exploring + */ + public GraphExploreRequest timeout(TimeValue timeout) { + if (timeout == null) { + throw new IllegalArgumentException("timeout must not be null"); + } + this.timeout = timeout; + return this; + } + + public GraphExploreRequest timeout(String timeout) { + timeout(TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout")); + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + types = in.readStringArray(); + routing = in.readOptionalString(); + timeout = in.readOptionalTimeValue(); + sampleSize = in.readInt(); + sampleDiversityField = in.readOptionalString(); + maxDocsPerDiversityValue = in.readInt(); + + useSignificance = in.readBoolean(); + returnDetailedInfo = in.readBoolean(); + + int numHops = in.readInt(); + Hop parentHop = null; + for (int i = 0; i < numHops; i++) { + Hop hop = new Hop(parentHop); + hop.readFrom(in); + hops.add(hop); + parentHop = hop; + } + + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + out.writeStringArray(types); + out.writeOptionalString(routing); + out.writeOptionalTimeValue(timeout); + + out.writeInt(sampleSize); + out.writeOptionalString(sampleDiversityField); + out.writeInt(maxDocsPerDiversityValue); + + out.writeBoolean(useSignificance); + out.writeBoolean(returnDetailedInfo); + out.writeInt(hops.size()); + for (Iterator iterator = hops.iterator(); iterator.hasNext();) { + Hop hop = iterator.next(); + hop.writeTo(out); + } + } + + @Override + public String toString() { + return "graph explore [" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "]"; + } + + /** + * The number of top-matching documents that are considered during each hop + * (default is {@link SamplerAggregationBuilder#DEFAULT_SHARD_SAMPLE_SIZE} + * Very small values (less than 50) may not provide sufficient + * weight-of-evidence to identify significant connections between terms. + *

+ * Very large values (many thousands) are not recommended with loosely + * defined queries (fuzzy queries or those with many OR clauses). This is + * because any useful signals in the best documents are diluted with + * irrelevant noise from low-quality matches. Performance is also typically + * better with smaller samples as there are less look-ups required for + * background frequencies of terms found in the documents + *

+ * + * @param maxNumberOfDocsPerHop + * shard-level sample size in documents + */ + public void sampleSize(int maxNumberOfDocsPerHop) { + sampleSize = maxNumberOfDocsPerHop; + } + + public int sampleSize() { + return sampleSize; + } + + /** + * Optional choice of single-value field on which to diversify sampled + * search results + */ + public void sampleDiversityField(String name) { + sampleDiversityField = name; + } + + public String sampleDiversityField() { + return sampleDiversityField; + } + + /** + * Optional number of permitted docs with same value in sampled search + * results. Must also declare which field using sampleDiversityField + */ + public void maxDocsPerDiversityValue(int maxDocs) { + this.maxDocsPerDiversityValue = maxDocs; + } + + public int maxDocsPerDiversityValue() { + return maxDocsPerDiversityValue; + } + + /** + * Controls the choice of algorithm used to select interesting terms. The + * default value is true which means terms are selected based on + * significance (see the {@link SignificantTerms} aggregation) rather than + * popularity (using the {@link TermsAggregator}). + * + * @param value + * true if the significant_terms algorithm should be used. + */ + public void useSignificance(boolean value) { + this.useSignificance = value; + } + + public boolean useSignificance() { + return useSignificance; + } + + /** + * Return detailed information about vertex frequencies as part of JSON + * results - defaults to false + * + * @param value + * true if detailed information is required in JSON responses + */ + public void returnDetailedInfo(boolean value) { + this.returnDetailedInfo = value; + } + + public boolean returnDetailedInfo() { + return returnDetailedInfo; + } + + /** + * Add a stage in the graph exploration. Each hop represents a stage of + * querying elasticsearch to identify terms which can then be connnected to + * other terms in a subsequent hop. + * + * @param guidingQuery + * optional choice of query which influences which documents are + * considered in this stage + * @return a {@link Hop} object that holds settings for a stage in the graph + * exploration + */ + public Hop createNextHop(QueryBuilder guidingQuery) { + Hop parent = null; + if (hops.size() > 0) { + parent = hops.get(hops.size() - 1); + } + Hop newHop = new Hop(parent); + newHop.guidingQuery = guidingQuery; + hops.add(newHop); + return newHop; + } + + public int getHopNumbers() { + return hops.size(); + } + + public Hop getHop(int hopNumber) { + return hops.get(hopNumber); + } + + public static class TermBoost { + String term; + float boost; + + public TermBoost(String term, float boost) { + super(); + this.term = term; + if (boost <= 0) { + throw new IllegalArgumentException("Boosts must be a positive non-zero number"); + } + this.boost = boost; + } + + TermBoost() { + } + + public String getTerm() { + return term; + } + + public float getBoost() { + return boost; + } + + void readFrom(StreamInput in) throws IOException { + this.term = in.readString(); + this.boost = in.readFloat(); + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(term); + out.writeFloat(boost); + } + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.startObject("controls"); + { + if (sampleSize != SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE) { + builder.field("sample_size", sampleSize); + } + if (sampleDiversityField != null) { + builder.startObject("sample_diversity"); + builder.field("field", sampleDiversityField); + builder.field("max_docs_per_value", maxDocsPerDiversityValue); + builder.endObject(); + } + builder.field("use_significance", useSignificance); + if (returnDetailedInfo) { + builder.field("return_detailed_stats", returnDetailedInfo); + } + } + builder.endObject(); + + for (Hop hop : hops) { + if (hop.parentHop != null) { + builder.startObject("connections"); + } + hop.toXContent(builder, params); + } + for (Hop hop : hops) { + if (hop.parentHop != null) { + builder.endObject(); + } + } + builder.endObject(); + + return builder; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java new file mode 100644 index 0000000000000..12eb20617ff0f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import com.carrotsearch.hppc.ObjectIntHashMap; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId; +import org.elasticsearch.protocol.xpack.graph.Connection.UnresolvedConnection; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Graph explore response holds a graph of {@link Vertex} and {@link Connection} objects + * (nodes and edges in common graph parlance). + * + * @see GraphExploreRequest + */ +public class GraphExploreResponse extends ActionResponse implements ToXContentObject { + + private long tookInMillis; + private boolean timedOut = false; + private ShardOperationFailedException[] shardFailures = ShardSearchFailure.EMPTY_ARRAY; + private Map vertices; + private Map connections; + private boolean returnDetailedInfo; + static final String RETURN_DETAILED_INFO_PARAM = "returnDetailedInfo"; + + public GraphExploreResponse() { + } + + public GraphExploreResponse(long tookInMillis, boolean timedOut, ShardOperationFailedException[] shardFailures, + Map vertices, Map connections, boolean returnDetailedInfo) { + this.tookInMillis = tookInMillis; + this.timedOut = timedOut; + this.shardFailures = shardFailures; + this.vertices = vertices; + this.connections = connections; + this.returnDetailedInfo = returnDetailedInfo; + } + + + public TimeValue getTook() { + return new TimeValue(tookInMillis); + } + + public long getTookInMillis() { + return tookInMillis; + } + + /** + * @return true if the time stated in {@link GraphExploreRequest#timeout(TimeValue)} was exceeded + * (not all hops may have been completed in this case) + */ + public boolean isTimedOut() { + return this.timedOut; + } + public ShardOperationFailedException[] getShardFailures() { + return shardFailures; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + tookInMillis = in.readVLong(); + timedOut = in.readBoolean(); + + int size = in.readVInt(); + if (size == 0) { + shardFailures = ShardSearchFailure.EMPTY_ARRAY; + } else { + shardFailures = new ShardSearchFailure[size]; + for (int i = 0; i < shardFailures.length; i++) { + shardFailures[i] = readShardSearchFailure(in); + } + } + // read vertices + size = in.readVInt(); + vertices = new HashMap<>(); + for (int i = 0; i < size; i++) { + Vertex n = Vertex.readFrom(in); + vertices.put(n.getId(), n); + } + + size = in.readVInt(); + + connections = new HashMap<>(); + for (int i = 0; i < size; i++) { + Connection e = new Connection(in, vertices); + connections.put(e.getId(), e); + } + + returnDetailedInfo = in.readBoolean(); + + } + + public Collection getConnections() { + return connections.values(); + } + + public Collection getVertices() { + return vertices.values(); + } + + public Vertex getVertex(VertexId id) { + return vertices.get(id); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVLong(tookInMillis); + out.writeBoolean(timedOut); + + out.writeVInt(shardFailures.length); + for (ShardOperationFailedException shardSearchFailure : shardFailures) { + shardSearchFailure.writeTo(out); + } + + out.writeVInt(vertices.size()); + for (Vertex vertex : vertices.values()) { + vertex.writeTo(out); + } + + out.writeVInt(connections.size()); + for (Connection connection : connections.values()) { + connection.writeTo(out); + } + + out.writeBoolean(returnDetailedInfo); + + } + + private static final ParseField TOOK = new ParseField("took"); + private static final ParseField TIMED_OUT = new ParseField("timed_out"); + private static final ParseField VERTICES = new ParseField("vertices"); + private static final ParseField CONNECTIONS = new ParseField("connections"); + private static final ParseField FAILURES = new ParseField("failures"); + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TOOK.getPreferredName(), tookInMillis); + builder.field(TIMED_OUT.getPreferredName(), timedOut); + + builder.startArray(FAILURES.getPreferredName()); + if (shardFailures != null) { + for (ShardOperationFailedException shardFailure : shardFailures) { + builder.startObject(); + shardFailure.toXContent(builder, params); + builder.endObject(); + } + } + builder.endArray(); + + ObjectIntHashMap vertexNumbers = new ObjectIntHashMap<>(vertices.size()); + + Map extraParams = new HashMap<>(); + extraParams.put(RETURN_DETAILED_INFO_PARAM, Boolean.toString(returnDetailedInfo)); + Params extendedParams = new DelegatingMapParams(extraParams, params); + + builder.startArray(VERTICES.getPreferredName()); + for (Vertex vertex : vertices.values()) { + builder.startObject(); + vertexNumbers.put(vertex, vertexNumbers.size()); + vertex.toXContent(builder, extendedParams); + builder.endObject(); + } + builder.endArray(); + + builder.startArray(CONNECTIONS.getPreferredName()); + for (Connection connection : connections.values()) { + builder.startObject(); + connection.toXContent(builder, extendedParams, vertexNumbers); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "GraphExploreResponsenParser", true, + args -> { + GraphExploreResponse result = new GraphExploreResponse(); + result.vertices = new HashMap<>(); + result.connections = new HashMap<>(); + + result.tookInMillis = (Long) args[0]; + result.timedOut = (Boolean) args[1]; + + @SuppressWarnings("unchecked") + List vertices = (List) args[2]; + @SuppressWarnings("unchecked") + List unresolvedConnections = (List) args[3]; + @SuppressWarnings("unchecked") + List failures = (List) args[4]; + for (Vertex vertex : vertices) { + // reverse-engineer if detailed stats were requested - + // mainly here for testing framework's equality tests + result.returnDetailedInfo = result.returnDetailedInfo || vertex.getFg() > 0; + result.vertices.put(vertex.getId(), vertex); + } + for (UnresolvedConnection unresolvedConnection : unresolvedConnections) { + Connection resolvedConnection = unresolvedConnection.resolve(vertices); + result.connections.put(resolvedConnection.getId(), resolvedConnection); + } + if (failures.size() > 0) { + result.shardFailures = failures.toArray(new ShardSearchFailure[failures.size()]); + } + return result; + }); + + static { + PARSER.declareLong(constructorArg(), TOOK); + PARSER.declareBoolean(constructorArg(), TIMED_OUT); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> Vertex.fromXContent(p), VERTICES); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> UnresolvedConnection.fromXContent(p), CONNECTIONS); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ShardSearchFailure.fromXContent(p), FAILURES); + } + + public static GraphExploreResponse fromXContext(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java new file mode 100644 index 0000000000000..e61403e8b37a8 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * A Hop represents one of potentially many stages in a graph exploration. + * Each Hop identifies one or more fields in which it will attempt to find + * terms that are significantly connected to the previous Hop. Each field is identified + * using a {@link VertexRequest} + * + *

An example series of Hops on webserver logs would be: + *

    + *
  1. an initial Hop to find + * the top ten IPAddresses trying to access urls containing the word "admin"
  2. + *
  3. a secondary Hop to see which other URLs those IPAddresses were trying to access
  4. + *
+ * + *

+ * Optionally, each hop can contain a "guiding query" that further limits the set of documents considered. + * In our weblog example above we might choose to constrain the second hop to only look at log records that + * had a reponse code of 404. + *

+ *

+ * If absent, the list of {@link VertexRequest}s is inherited from the prior Hop's list to avoid repeating + * the fields that will be examined at each stage. + *

+ * + */ +public class Hop implements ToXContentFragment{ + final Hop parentHop; + List vertices = null; + QueryBuilder guidingQuery = null; + + public Hop(Hop parent) { + this.parentHop = parent; + } + + public ActionRequestValidationException validate(ActionRequestValidationException validationException) { + + if (getEffectiveVertexRequests().size() == 0) { + validationException = ValidateActions.addValidationError(GraphExploreRequest.NO_VERTICES_ERROR_MESSAGE, validationException); + } + return validationException; + + } + + public Hop getParentHop() { + return parentHop; + } + + void writeTo(StreamOutput out) throws IOException { + out.writeOptionalNamedWriteable(guidingQuery); + if (vertices == null) { + out.writeVInt(0); + } else { + out.writeVInt(vertices.size()); + for (VertexRequest vr : vertices) { + vr.writeTo(out); + } + } + } + + void readFrom(StreamInput in) throws IOException { + guidingQuery = in.readOptionalNamedWriteable(QueryBuilder.class); + int size = in.readVInt(); + if (size > 0) { + vertices = new ArrayList<>(); + for (int i = 0; i < size; i++) { + VertexRequest vr = new VertexRequest(); + vr.readFrom(in); + vertices.add(vr); + } + } + } + + public QueryBuilder guidingQuery() { + if (guidingQuery != null) { + return guidingQuery; + } + return QueryBuilders.matchAllQuery(); + } + + /** + * Add a field in which this {@link Hop} will look for terms that are highly linked to + * previous hops and optionally the guiding query. + * + * @param fieldName a field in the chosen index + */ + public VertexRequest addVertexRequest(String fieldName) { + if (vertices == null) { + vertices = new ArrayList<>(); + } + VertexRequest vr = new VertexRequest(); + vr.fieldName(fieldName); + vertices.add(vr); + return vr; + } + + /** + * An optional parameter that focuses the exploration on documents that + * match the given query. + * + * @param queryBuilder any query + */ + public void guidingQuery(QueryBuilder queryBuilder) { + guidingQuery = queryBuilder; + } + + protected List getEffectiveVertexRequests() { + if (vertices != null) { + return vertices; + } + if (parentHop == null) { + return Collections.emptyList(); + } + // otherwise inherit settings from parent + return parentHop.getEffectiveVertexRequests(); + } + + public int getNumberVertexRequests() { + return getEffectiveVertexRequests().size(); + } + + public VertexRequest getVertexRequest(int requestNumber) { + return getEffectiveVertexRequests().get(requestNumber); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (guidingQuery != null) { + builder.field("query"); + guidingQuery.toXContent(builder, params); + } + if(vertices != null && vertices.size()>0) { + builder.startArray("vertices"); + for (VertexRequest vertexRequest : vertices) { + vertexRequest.toXContent(builder, params); + } + builder.endArray(); + } + return builder; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java new file mode 100644 index 0000000000000..f17812a6396a0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java @@ -0,0 +1,255 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * A vertex in a graph response represents a single term (a field and value pair) + * which appears in one or more documents found as part of the graph exploration. + * + * A vertex term could be a bank account number, an email address, a hashtag or any + * other term that appears in documents and is interesting to represent in a network. + */ +public class Vertex implements ToXContentFragment { + + private final String field; + private final String term; + private double weight; + private final int depth; + private final long bg; + private long fg; + private static final ParseField FIELD = new ParseField("field"); + private static final ParseField TERM = new ParseField("term"); + private static final ParseField WEIGHT = new ParseField("weight"); + private static final ParseField DEPTH = new ParseField("depth"); + private static final ParseField FG = new ParseField("fg"); + private static final ParseField BG = new ParseField("bg"); + + + public Vertex(String field, String term, double weight, int depth, long bg, long fg) { + super(); + this.field = field; + this.term = term; + this.weight = weight; + this.depth = depth; + this.bg = bg; + this.fg = fg; + } + + static Vertex readFrom(StreamInput in) throws IOException { + return new Vertex(in.readString(), in.readString(), in.readDouble(), in.readVInt(), in.readVLong(), in.readVLong()); + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeString(term); + out.writeDouble(weight); + out.writeVInt(depth); + out.writeVLong(bg); + out.writeVLong(fg); + } + + @Override + public int hashCode() { + return Objects.hash(field, term, weight, depth, bg, fg); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Vertex other = (Vertex) obj; + return depth == other.depth && + weight == other.weight && + bg == other.bg && + fg == other.fg && + Objects.equals(field, other.field) && + Objects.equals(term, other.term); + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + boolean returnDetailedInfo = params.paramAsBoolean(GraphExploreResponse.RETURN_DETAILED_INFO_PARAM, false); + builder.field(FIELD.getPreferredName(), field); + builder.field(TERM.getPreferredName(), term); + builder.field(WEIGHT.getPreferredName(), weight); + builder.field(DEPTH.getPreferredName(), depth); + if (returnDetailedInfo) { + builder.field(FG.getPreferredName(), fg); + builder.field(BG.getPreferredName(), bg); + } + return builder; + } + + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "VertexParser", true, + args -> { + String field = (String) args[0]; + String term = (String) args[1]; + double weight = (Double) args[2]; + int depth = (Integer) args[3]; + Long optionalBg = (Long) args[4]; + Long optionalFg = (Long) args[5]; + long bg = optionalBg == null ? 0 : optionalBg; + long fg = optionalFg == null ? 0 : optionalFg; + return new Vertex(field, term, weight, depth, bg, fg); + }); + + static { + PARSER.declareString(constructorArg(), FIELD); + PARSER.declareString(constructorArg(), TERM); + PARSER.declareDouble(constructorArg(), WEIGHT); + PARSER.declareInt(constructorArg(), DEPTH); + PARSER.declareLong(optionalConstructorArg(), BG); + PARSER.declareLong(optionalConstructorArg(), FG); + } + + static Vertex fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + + + /** + * @return a {@link VertexId} object that uniquely identifies this Vertex + */ + public VertexId getId() { + return createId(field, term); + } + + /** + * A convenience method for creating a {@link VertexId} + * @param field the field + * @param term the term + * @return a {@link VertexId} that can be used for looking up vertices + */ + public static VertexId createId(String field, String term) { + return new VertexId(field,term); + } + + @Override + public String toString() { + return getId().toString(); + } + + public String getField() { + return field; + } + + public String getTerm() { + return term; + } + + /** + * The weight of a vertex is an accumulation of all of the {@link Connection}s + * that are linked to this {@link Vertex} as part of a graph exploration. + * It is used internally to identify the most interesting vertices to be returned. + * @return a measure of the {@link Vertex}'s relative importance. + */ + public double getWeight() { + return weight; + } + + public void setWeight(final double weight) { + this.weight = weight; + } + + /** + * If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default) + * this statistic is available. + * @return the number of documents in the index that contain this term (see bg_count in + * + * the significant_terms aggregation) + */ + public long getBg() { + return bg; + } + + /** + * If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default) + * this statistic is available. + * Together with {@link #getBg()} these numbers are used to derive the significance of a term. + * @return the number of documents in the sample of best matching documents that contain this term (see fg_count in + * + * the significant_terms aggregation) + */ + public long getFg() { + return fg; + } + + public void setFg(final long fg) { + this.fg = fg; + } + + /** + * @return the sequence number in the series of hops where this Vertex term was first encountered + */ + public int getHopDepth() { + return depth; + } + + /** + * An identifier (implements hashcode and equals) that represents a + * unique key for a {@link Vertex} + */ + public static class VertexId { + private final String field; + private final String term; + + public VertexId(String field, String term) { + this.field = field; + this.term = term; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + VertexId vertexId = (VertexId) o; + + if (field != null ? !field.equals(vertexId.field) : vertexId.field != null) + return false; + if (term != null ? !term.equals(vertexId.term) : vertexId.term != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = field != null ? field.hashCode() : 0; + result = 31 * result + (term != null ? term.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return field + ":" + term; + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java new file mode 100644 index 0000000000000..63d2c616547d4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java @@ -0,0 +1,235 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * A request to identify terms from a choice of field as part of a {@link Hop}. + * Optionally, a set of terms can be provided that are used as an exclusion or + * inclusion list to filter which terms are considered. + * + */ +public class VertexRequest implements ToXContentObject { + private String fieldName; + private int size = DEFAULT_SIZE; + public static final int DEFAULT_SIZE = 5; + private Map includes; + private Set excludes; + public static final int DEFAULT_MIN_DOC_COUNT = 3; + private int minDocCount = DEFAULT_MIN_DOC_COUNT; + public static final int DEFAULT_SHARD_MIN_DOC_COUNT = 2; + private int shardMinDocCount = DEFAULT_SHARD_MIN_DOC_COUNT; + + + public VertexRequest() { + + } + + void readFrom(StreamInput in) throws IOException { + fieldName = in.readString(); + size = in.readVInt(); + minDocCount = in.readVInt(); + shardMinDocCount = in.readVInt(); + + int numIncludes = in.readVInt(); + if (numIncludes > 0) { + includes = new HashMap<>(); + for (int i = 0; i < numIncludes; i++) { + TermBoost tb = new TermBoost(); + tb.readFrom(in); + includes.put(tb.term, tb); + } + } + + int numExcludes = in.readVInt(); + if (numExcludes > 0) { + excludes = new HashSet<>(); + for (int i = 0; i < numExcludes; i++) { + excludes.add(in.readString()); + } + } + + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeVInt(size); + out.writeVInt(minDocCount); + out.writeVInt(shardMinDocCount); + + if (includes != null) { + out.writeVInt(includes.size()); + for (TermBoost tb : includes.values()) { + tb.writeTo(out); + } + } else { + out.writeVInt(0); + } + + if (excludes != null) { + out.writeVInt(excludes.size()); + for (String term : excludes) { + out.writeString(term); + } + } else { + out.writeVInt(0); + } + } + + public String fieldName() { + return fieldName; + } + + public VertexRequest fieldName(String fieldName) { + this.fieldName = fieldName; + return this; + } + + public int size() { + return size; + } + + /** + * @param size The maximum number of terms that should be returned from this field as part of this {@link Hop} + */ + public VertexRequest size(int size) { + this.size = size; + return this; + } + + public boolean hasIncludeClauses() { + return includes != null && includes.size() > 0; + } + + public boolean hasExcludeClauses() { + return excludes != null && excludes.size() > 0; + } + + /** + * Adds a term that should be excluded from results + * @param term A term to be excluded + */ + public void addExclude(String term) { + if (includes != null) { + throw new IllegalArgumentException("Cannot have both include and exclude clauses"); + } + if (excludes == null) { + excludes = new HashSet<>(); + } + excludes.add(term); + } + + /** + * Adds a term to the set of allowed values - the boost defines the relative + * importance when pursuing connections in subsequent {@link Hop}s. The boost value + * appears as part of the query. + * @param term a required term + * @param boost an optional boost + */ + public void addInclude(String term, float boost) { + if (excludes != null) { + throw new IllegalArgumentException("Cannot have both include and exclude clauses"); + } + if (includes == null) { + includes = new HashMap<>(); + } + includes.put(term, new TermBoost(term, boost)); + } + + public TermBoost[] includeValues() { + return includes.values().toArray(new TermBoost[includes.size()]); + } + + public String[] includeValuesAsStringArray() { + String[] result = new String[includes.size()]; + int i = 0; + for (TermBoost tb : includes.values()) { + result[i++] = tb.term; + } + return result; + } + + public String[] excludesAsArray() { + return excludes.toArray(new String[excludes.size()]); + } + + public int minDocCount() { + return minDocCount; + } + + /** + * A "certainty" threshold which defines the weight-of-evidence required before + * a term found in this field is identified as a useful connection + * + * @param value The minimum number of documents that contain this term found in the samples used across all shards + */ + public VertexRequest minDocCount(int value) { + minDocCount = value; + return this; + } + + + public int shardMinDocCount() { + return Math.min(shardMinDocCount, minDocCount); + } + + /** + * A "certainty" threshold which defines the weight-of-evidence required before + * a term found in this field is identified as a useful connection + * + * @param value The minimum number of documents that contain this term found in the samples used across all shards + */ + public VertexRequest shardMinDocCount(int value) { + shardMinDocCount = value; + return this; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("field", fieldName); + if (size != DEFAULT_SIZE) { + builder.field("size", size); + } + if (minDocCount != DEFAULT_MIN_DOC_COUNT) { + builder.field("min_doc_count", minDocCount); + } + if (shardMinDocCount != DEFAULT_SHARD_MIN_DOC_COUNT) { + builder.field("shard_min_doc_count", shardMinDocCount); + } + if(includes!=null) { + builder.startArray("include"); + for (TermBoost tb : includes.values()) { + builder.startObject(); + builder.field("term", tb.term); + builder.field("boost", tb.boost); + builder.endObject(); + } + builder.endArray(); + } + if(excludes!=null) { + builder.startArray("exclude"); + for (String value : excludes) { + builder.value(value); + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java new file mode 100644 index 0000000000000..5d5dd0f5ef61d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Graph + * APIs. + */ +package org.elasticsearch.protocol.xpack.graph; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java new file mode 100644 index 0000000000000..62353b093b5b5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; + + +public class DeleteLicenseRequest extends AcknowledgedRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java new file mode 100644 index 0000000000000..971e181ee13d4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; + + +public class GetLicenseRequest extends MasterNodeReadRequest { + + public GetLicenseRequest() { + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java new file mode 100644 index 0000000000000..6d5e1b5653fe7 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionResponse; + +public class GetLicenseResponse extends ActionResponse { + + private String license; + + GetLicenseResponse() { + } + + public GetLicenseResponse(String license) { + this.license = license; + } + + public String getLicenseDefinition() { + return license; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java new file mode 100644 index 0000000000000..5bc66ab745e49 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.io.IOException; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +/** + * Status of an X-Pack license. + */ +public enum LicenseStatus implements Writeable { + + ACTIVE("active"), + INVALID("invalid"), + EXPIRED("expired"); + + private final String label; + + LicenseStatus(String label) { + this.label = label; + } + + public String label() { + return label; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(label); + } + + public static LicenseStatus readFrom(StreamInput in) throws IOException { + return fromString(in.readString()); + } + + public static LicenseStatus fromString(String value) { + switch (value) { + case "active": + return ACTIVE; + case "invalid": + return INVALID; + case "expired": + return EXPIRED; + default: + throw new IllegalArgumentException("unknown license status [" + value + "]"); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java new file mode 100644 index 0000000000000..18745653e761e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.util.Locale; + +public enum LicensesStatus { + VALID((byte) 0), + INVALID((byte) 1), + EXPIRED((byte) 2); + + private final byte id; + + LicensesStatus(byte id) { + this.id = id; + } + + public int id() { + return id; + } + + public static LicensesStatus fromId(int id) { + if (id == 0) { + return VALID; + } else if (id == 1) { + return INVALID; + } else if (id == 2) { + return EXPIRED; + } else { + throw new IllegalStateException("no valid LicensesStatus for id=" + id); + } + } + + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + public static LicensesStatus fromString(String value) { + switch (value) { + case "valid": + return VALID; + case "invalid": + return INVALID; + case "expired": + return EXPIRED; + default: + throw new IllegalArgumentException("unknown licenses status [" + value + "]"); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java new file mode 100644 index 0000000000000..342e6c296e7ed --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; + +public class PutLicenseRequest extends AcknowledgedRequest { + + private String licenseDefinition; + private boolean acknowledge = false; + + public PutLicenseRequest() { + + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public void setLicenseDefinition(String licenseDefinition) { + this.licenseDefinition = licenseDefinition; + } + + public String getLicenseDefinition() { + return licenseDefinition; + } + + public void setAcknowledge(boolean acknowledge) { + this.acknowledge = acknowledge; + } + + public boolean isAcknowledge() { + return acknowledge; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java new file mode 100644 index 0000000000000..206c5a3b38366 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.common.ProtocolUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class PutLicenseResponse extends AcknowledgedResponse { + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "put_license_response", true, (a, v) -> { + boolean acknowledged = (Boolean) a[0]; + LicensesStatus licensesStatus = LicensesStatus.fromString((String) a[1]); + @SuppressWarnings("unchecked") Tuple> acknowledgements = (Tuple>) a[2]; + if (acknowledgements == null) { + return new PutLicenseResponse(acknowledged, licensesStatus); + } else { + return new PutLicenseResponse(acknowledged, licensesStatus, acknowledgements.v1(), acknowledgements.v2()); + } + + }); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged")); + PARSER.declareString(constructorArg(), new ParseField("license_status")); + PARSER.declareObject(optionalConstructorArg(), (parser, v) -> { + Map acknowledgeMessages = new HashMap<>(); + String message = null; + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else { + if (currentFieldName == null) { + throw new XContentParseException(parser.getTokenLocation(), "expected message header or acknowledgement"); + } + if ("message".equals(currentFieldName)) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected message header type"); + } + message = parser.text(); + } else { + if (token != XContentParser.Token.START_ARRAY) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement type"); + } + List acknowledgeMessagesList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement text"); + } + acknowledgeMessagesList.add(parser.text()); + } + acknowledgeMessages.put(currentFieldName, acknowledgeMessagesList.toArray(new String[0])); + } + } + } + return new Tuple<>(message, acknowledgeMessages); + }, + new ParseField("acknowledge")); + } + + private LicensesStatus status; + private Map acknowledgeMessages; + private String acknowledgeHeader; + + public PutLicenseResponse() { + } + + public PutLicenseResponse(boolean acknowledged, LicensesStatus status) { + this(acknowledged, status, null, Collections.emptyMap()); + } + + public PutLicenseResponse(boolean acknowledged, LicensesStatus status, String acknowledgeHeader, + Map acknowledgeMessages) { + super(acknowledged); + this.status = status; + this.acknowledgeHeader = acknowledgeHeader; + this.acknowledgeMessages = acknowledgeMessages; + } + + public LicensesStatus status() { + return status; + } + + public Map acknowledgeMessages() { + return acknowledgeMessages; + } + + public String acknowledgeHeader() { + return acknowledgeHeader; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + status = LicensesStatus.fromId(in.readVInt()); + acknowledgeHeader = in.readOptionalString(); + int size = in.readVInt(); + Map acknowledgeMessages = new HashMap<>(size); + for (int i = 0; i < size; i++) { + String feature = in.readString(); + int nMessages = in.readVInt(); + String[] messages = new String[nMessages]; + for (int j = 0; j < nMessages; j++) { + messages[j] = in.readString(); + } + acknowledgeMessages.put(feature, messages); + } + this.acknowledgeMessages = acknowledgeMessages; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(status.id()); + out.writeOptionalString(acknowledgeHeader); + out.writeVInt(acknowledgeMessages.size()); + for (Map.Entry entry : acknowledgeMessages.entrySet()) { + out.writeString(entry.getKey()); + out.writeVInt(entry.getValue().length); + for (String message : entry.getValue()) { + out.writeString(message); + } + } + } + + @Override + protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { + builder.field("license_status", status.toString()); + if (!acknowledgeMessages.isEmpty()) { + builder.startObject("acknowledge"); + builder.field("message", acknowledgeHeader); + for (Map.Entry entry : acknowledgeMessages.entrySet()) { + builder.startArray(entry.getKey()); + for (String message : entry.getValue()) { + builder.value(message); + } + builder.endArray(); + } + builder.endObject(); + } + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + public static PutLicenseResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + PutLicenseResponse that = (PutLicenseResponse) o; + + return status == that.status && + ProtocolUtils.equals(acknowledgeMessages, that.acknowledgeMessages) && + Objects.equals(acknowledgeHeader, that.acknowledgeHeader); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), status, ProtocolUtils.hashCode(acknowledgeMessages), acknowledgeHeader); + } + + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java new file mode 100644 index 0000000000000..a0a80a9958b95 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's License + * APIs. + */ +package org.elasticsearch.protocol.xpack.license; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java new file mode 100644 index 0000000000000..3ae952fb618e2 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +public class IndexUpgradeInfoRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { + + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true); + + public IndexUpgradeInfoRequest(String... indices) { + indices(indices); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndexUpgradeInfoRequest indices(String... indices) { + this.indices = Objects.requireNonNull(indices, "indices cannot be null"); + return this; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public void indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexUpgradeInfoRequest request = (IndexUpgradeInfoRequest) o; + return Arrays.equals(indices, request.indices) && + Objects.equals(indicesOptions.toString(), request.indicesOptions.toString()); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(indices), indicesOptions.toString()); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java new file mode 100644 index 0000000000000..17115ac9b1711 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class IndexUpgradeInfoResponse extends ActionResponse implements ToXContentObject { + + private static final ParseField INDICES = new ParseField("indices"); + private static final ParseField ACTION_REQUIRED = new ParseField("action_required"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("IndexUpgradeInfoResponse", + true, + (a, c) -> { + @SuppressWarnings("unchecked") + Map map = (Map)a[0]; + Map actionsRequired = map.entrySet().stream() + .filter(e -> { + if (e.getValue() instanceof Map == false) { + return false; + } + @SuppressWarnings("unchecked") + Map value =(Map)e.getValue(); + return value.containsKey(ACTION_REQUIRED.getPreferredName()); + }) + .collect(Collectors.toMap( + Map.Entry::getKey, + e -> { + @SuppressWarnings("unchecked") + Map value = (Map) e.getValue(); + return UpgradeActionRequired.fromString((String)value.get(ACTION_REQUIRED.getPreferredName())); + } + )); + return new IndexUpgradeInfoResponse(actionsRequired); + }); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> p.map(), INDICES); + } + + + private Map actions; + + public IndexUpgradeInfoResponse() { + + } + + public IndexUpgradeInfoResponse(Map actions) { + this.actions = actions; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + actions = in.readMap(StreamInput::readString, UpgradeActionRequired::readFromStream); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(actions, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + public Map getActions() { + return actions; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.startObject(INDICES.getPreferredName()); + for (Map.Entry entry : actions.entrySet()) { + builder.startObject(entry.getKey()); + { + builder.field(ACTION_REQUIRED.getPreferredName(), entry.getValue().toString()); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexUpgradeInfoResponse response = (IndexUpgradeInfoResponse) o; + return Objects.equals(actions, response.actions); + } + + @Override + public int hashCode() { + return Objects.hash(actions); + } + + public static IndexUpgradeInfoResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java new file mode 100644 index 0000000000000..dce1c7d18f50e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Locale; + +/** + * Indicates the type of the upgrade required for the index + */ +public enum UpgradeActionRequired implements Writeable { + NOT_APPLICABLE, // Indicates that the check is not applicable to this index type, the next check will be performed + UP_TO_DATE, // Indicates that the check finds this index to be up to date - no additional checks are required + REINDEX, // The index should be reindex + UPGRADE; // The index should go through the upgrade procedure + + public static UpgradeActionRequired fromString(String value) { + return UpgradeActionRequired.valueOf(value.toUpperCase(Locale.ROOT)); + } + + public static UpgradeActionRequired readFromStream(StreamInput in) throws IOException { + return in.readEnum(UpgradeActionRequired.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(this); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java new file mode 100644 index 0000000000000..7c52f6a8fd4f1 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Migration + * APIs. + */ +package org.elasticsearch.protocol.xpack.migration; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java new file mode 100644 index 0000000000000..3ed877d08cccd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for miscellaneous X-Pack APIs. + */ +package org.elasticsearch.protocol.xpack; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java new file mode 100644 index 0000000000000..e5b116a3a7a98 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java @@ -0,0 +1,246 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.security; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; + +/** + * An authenticated user + */ +public class User implements ToXContentObject { + + private final String username; + private final String[] roles; + private final User authenticatedUser; + private final Map metadata; + private final boolean enabled; + + @Nullable private final String fullName; + @Nullable private final String email; + + public User(String username, String... roles) { + this(username, roles, null, null, null, true); + } + + public User(String username, String[] roles, User authenticatedUser) { + this(username, roles, null, null, null, true, authenticatedUser); + } + + public User(User user, User authenticatedUser) { + this(user.principal(), user.roles(), user.fullName(), user.email(), user.metadata(), user.enabled(), authenticatedUser); + } + + public User(String username, String[] roles, String fullName, String email, Map metadata, boolean enabled) { + this(username, roles, fullName, email, metadata, enabled, null); + } + + private User(String username, String[] roles, String fullName, String email, Map metadata, boolean enabled, + User authenticatedUser) { + this.username = username; + this.roles = roles == null ? Strings.EMPTY_ARRAY : roles; + this.metadata = metadata != null ? Collections.unmodifiableMap(metadata) : Collections.emptyMap(); + this.fullName = fullName; + this.email = email; + this.enabled = enabled; + assert (authenticatedUser == null || authenticatedUser.isRunAs() == false) : "the authenticated user should not be a run_as user"; + this.authenticatedUser = authenticatedUser; + } + + /** + * @return The principal of this user - effectively serving as the + * unique identity of of the user. + */ + public String principal() { + return this.username; + } + + /** + * @return The roles this user is associated with. The roles are + * identified by their unique names and each represents as + * set of permissions + */ + public String[] roles() { + return this.roles; + } + + /** + * @return The metadata that is associated with this user. Can never be {@code null}. + */ + public Map metadata() { + return metadata; + } + + /** + * @return The full name of this user. May be {@code null}. + */ + public String fullName() { + return fullName; + } + + /** + * @return The email of this user. May be {@code null}. + */ + public String email() { + return email; + } + + /** + * @return whether the user is enabled or not + */ + public boolean enabled() { + return enabled; + } + + /** + * @return The user that was originally authenticated. + * This may be the user itself, or a different user which used runAs. + */ + public User authenticatedUser() { + return authenticatedUser == null ? this : authenticatedUser; + } + + /** Return true if this user was not the originally authenticated user, false otherwise. */ + public boolean isRunAs() { + return authenticatedUser != null; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("User[username=").append(username); + sb.append(",roles=[").append(Strings.arrayToCommaDelimitedString(roles)).append("]"); + sb.append(",fullName=").append(fullName); + sb.append(",email=").append(email); + sb.append(",metadata="); + sb.append(metadata); + if (authenticatedUser != null) { + sb.append(",authenticatedUser=[").append(authenticatedUser.toString()).append("]"); + } + sb.append("]"); + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o instanceof User == false) return false; + + User user = (User) o; + + if (!username.equals(user.username)) return false; + // Probably incorrect - comparing Object[] arrays with Arrays.equals + if (!Arrays.equals(roles, user.roles)) return false; + if (authenticatedUser != null ? !authenticatedUser.equals(user.authenticatedUser) : user.authenticatedUser != null) return false; + if (!metadata.equals(user.metadata)) return false; + if (fullName != null ? !fullName.equals(user.fullName) : user.fullName != null) return false; + return !(email != null ? !email.equals(user.email) : user.email != null); + + } + + @Override + public int hashCode() { + int result = username.hashCode(); + result = 31 * result + Arrays.hashCode(roles); + result = 31 * result + (authenticatedUser != null ? authenticatedUser.hashCode() : 0); + result = 31 * result + metadata.hashCode(); + result = 31 * result + (fullName != null ? fullName.hashCode() : 0); + result = 31 * result + (email != null ? email.hashCode() : 0); + return result; + } + + @Override + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.USERNAME.getPreferredName(), principal()); + builder.array(Fields.ROLES.getPreferredName(), roles()); + builder.field(Fields.FULL_NAME.getPreferredName(), fullName()); + builder.field(Fields.EMAIL.getPreferredName(), email()); + builder.field(Fields.METADATA.getPreferredName(), metadata()); + builder.field(Fields.ENABLED.getPreferredName(), enabled()); + return builder.endObject(); + } + + public static User partialReadFrom(String username, StreamInput input) throws IOException { + String[] roles = input.readStringArray(); + Map metadata = input.readMap(); + String fullName = input.readOptionalString(); + String email = input.readOptionalString(); + boolean enabled = input.readBoolean(); + User outerUser = new User(username, roles, fullName, email, metadata, enabled, null); + boolean hasInnerUser = input.readBoolean(); + if (hasInnerUser) { + User innerUser = readFrom(input); + if (input.getVersion().onOrBefore(Version.V_5_4_0)) { + // backcompat: runas user was read first, so reverse outer and inner + return new User(innerUser, outerUser); + } else { + return new User(outerUser, innerUser); + } + } else { + return outerUser; + } + } + + public static User readFrom(StreamInput input) throws IOException { + final boolean isInternalUser = input.readBoolean(); + assert isInternalUser == false: "should always return false. Internal users should use the InternalUserSerializationHelper"; + final String username = input.readString(); + return partialReadFrom(username, input); + } + + public static void writeTo(User user, StreamOutput output) throws IOException { + if (user.authenticatedUser == null) { + // no backcompat necessary, since there is no inner user + writeUser(user, output); + } else if (output.getVersion().onOrBefore(Version.V_5_4_0)) { + // backcompat: write runas user as the "inner" user + writeUser(user.authenticatedUser, output); + output.writeBoolean(true); + writeUser(user, output); + } else { + writeUser(user, output); + output.writeBoolean(true); + writeUser(user.authenticatedUser, output); + } + output.writeBoolean(false); // last user written, regardless of bwc, does not have an inner user + } + + /** Write just the given {@link User}, but not the inner {@link #authenticatedUser}. */ + private static void writeUser(User user, StreamOutput output) throws IOException { + output.writeBoolean(false); // not a system user + output.writeString(user.username); + output.writeStringArray(user.roles); + output.writeMap(user.metadata); + output.writeOptionalString(user.fullName); + output.writeOptionalString(user.email); + output.writeBoolean(user.enabled); + } + + public interface Fields { + ParseField USERNAME = new ParseField("username"); + ParseField PASSWORD = new ParseField("password"); + ParseField PASSWORD_HASH = new ParseField("password_hash"); + ParseField ROLES = new ParseField("roles"); + ParseField FULL_NAME = new ParseField("full_name"); + ParseField EMAIL = new ParseField("email"); + ParseField METADATA = new ParseField("metadata"); + ParseField ENABLED = new ParseField("enabled"); + ParseField TYPE = new ParseField("type"); + } +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java new file mode 100644 index 0000000000000..ce627b267f31e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Security + * APIs. + */ +package org.elasticsearch.protocol.xpack.security; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java new file mode 100644 index 0000000000000..3480d8485f069 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.unit.TimeValue; + +import java.io.IOException; + +/** + * A delete watch request to delete an watch by name (id) + */ +public class DeleteWatchRequest extends MasterNodeRequest { + + private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(10); + + private String id; + private long version = Versions.MATCH_ANY; + + public DeleteWatchRequest() { + this(null); + } + + public DeleteWatchRequest(String id) { + this.id = id; + masterNodeTimeout(DEFAULT_TIMEOUT); + } + + /** + * @return The name of the watch to be deleted + */ + public String getId() { + return id; + } + + /** + * Sets the name of the watch to be deleted + */ + public void setId(String id) { + this.id = id; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (id == null){ + validationException = ValidateActions.addValidationError("watch id is missing", validationException); + } else if (PutWatchRequest.isValidId(id) == false) { + validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeLong(version); + } + + @Override + public String toString() { + return "delete [" + id + "]"; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java new file mode 100644 index 0000000000000..39cd5e966fa12 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_delete_watch_response", DeleteWatchResponse::new); + static { + PARSER.declareString(DeleteWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(DeleteWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(DeleteWatchResponse::setFound, new ParseField("found")); + } + + private String id; + private long version; + private boolean found; + + public DeleteWatchResponse() { + } + + public DeleteWatchResponse(String id, long version, boolean found) { + this.id = id; + this.version = version; + this.found = found; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isFound() { + return found; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setFound(boolean found) { + this.found = found; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + DeleteWatchResponse that = (DeleteWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(found, that.found); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, found); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + found = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(found); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("found", found) + .endObject(); + } + + public static DeleteWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java new file mode 100644 index 0000000000000..abc42b149194b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.regex.Pattern; + +/** + * This request class contains the data needed to create a watch along with the name of the watch. + * The name of the watch will become the ID of the indexed document. + */ +public class PutWatchRequest extends MasterNodeRequest { + + private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(10); + private static final Pattern NO_WS_PATTERN = Pattern.compile("\\S+"); + + private String id; + private BytesReference source; + private XContentType xContentType = XContentType.JSON; + private boolean active = true; + private long version = Versions.MATCH_ANY; + + public PutWatchRequest() {} + + public PutWatchRequest(StreamInput in) throws IOException { + readFrom(in); + } + + public PutWatchRequest(String id, BytesReference source, XContentType xContentType) { + this.id = id; + this.source = source; + this.xContentType = xContentType; + masterNodeTimeout(DEFAULT_TIMEOUT); + } + + /** + * @return The name that will be the ID of the indexed document + */ + public String getId() { + return id; + } + + /** + * Set the watch name + */ + public void setId(String id) { + this.id = id; + } + + /** + * @return The source of the watch + */ + public BytesReference getSource() { + return source; + } + + /** + * Set the source of the watch + */ + public void setSource(BytesReference source, XContentType xContentType) { + this.source = source; + this.xContentType = xContentType; + } + + /** + * @return The initial active state of the watch (defaults to {@code true}, e.g. "active") + */ + public boolean isActive() { + return active; + } + + /** + * Sets the initial active state of the watch + */ + public void setActive(boolean active) { + this.active = active; + } + + /** + * Get the content type for the source + */ + public XContentType xContentType() { + return xContentType; + } + + public long getVersion() { + return version; + } + + public void setVersion(long version) { + this.version = version; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (id == null) { + validationException = ValidateActions.addValidationError("watch id is missing", validationException); + } else if (isValidId(id) == false) { + validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); + } + if (source == null) { + validationException = ValidateActions.addValidationError("watch source is missing", validationException); + } + if (xContentType == null) { + validationException = ValidateActions.addValidationError("request body is missing", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + source = in.readBytesReference(); + active = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_5_3_0)) { + xContentType = in.readEnum(XContentType.class); + } else { + xContentType = XContentHelper.xContentType(source); + } + if (in.getVersion().onOrAfter(Version.V_6_3_0)) { + version = in.readZLong(); + } else { + version = Versions.MATCH_ANY; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeBytesReference(source); + out.writeBoolean(active); + if (out.getVersion().onOrAfter(Version.V_5_3_0)) { + out.writeEnum(xContentType); + } + if (out.getVersion().onOrAfter(Version.V_6_3_0)) { + out.writeZLong(version); + } + } + + public static boolean isValidId(String id) { + return Strings.isEmpty(id) == false && NO_WS_PATTERN.matcher(id).matches(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java new file mode 100644 index 0000000000000..f6e55ff555339 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class PutWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_put_watch_response", PutWatchResponse::new); + static { + PARSER.declareString(PutWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(PutWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(PutWatchResponse::setCreated, new ParseField("created")); + } + + private String id; + private long version; + private boolean created; + + public PutWatchResponse() { + } + + public PutWatchResponse(String id, long version, boolean created) { + this.id = id; + this.version = version; + this.created = created; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setCreated(boolean created) { + this.created = created; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isCreated() { + return created; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PutWatchResponse that = (PutWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(created, that.created); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, created); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(created); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + created = in.readBoolean(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("created", created) + .endObject(); + } + + public static PutWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java new file mode 100644 index 0000000000000..0d9edf3b5c035 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Watcher + * APIs. + */ +package org.elasticsearch.protocol.xpack.watcher; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java new file mode 100644 index 0000000000000..fac99959c536a --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; +import java.io.IOException; + +public class XPackInfoResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected XPackInfoResponse doParseInstance(XContentParser parser) throws IOException { + return XPackInfoResponse.fromXContent(parser); + } + + @Override + protected XPackInfoResponse createBlankInstance() { + return new XPackInfoResponse(); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return path -> path.equals("features") + || (path.startsWith("features") && path.endsWith("native_code_info")); + } + + @Override + protected ToXContent.Params getToXContentParams() { + Map params = new HashMap<>(); + if (randomBoolean()) { + params.put("human", randomBoolean() ? "true" : "false"); + } + if (randomBoolean()) { + params.put("categories", "_none"); + } + return new ToXContent.MapParams(params); + } + + @Override + protected XPackInfoResponse createTestInstance() { + return new XPackInfoResponse( + randomBoolean() ? null : randomBuildInfo(), + randomBoolean() ? null : randomLicenseInfo(), + randomBoolean() ? null : randomFeatureSetsInfo()); + } + + @Override + protected XPackInfoResponse mutateInstance(XPackInfoResponse response) { + @SuppressWarnings("unchecked") + Function mutator = randomFrom( + r -> new XPackInfoResponse( + mutateBuildInfo(r.getBuildInfo()), + r.getLicenseInfo(), + r.getFeatureSetsInfo()), + r -> new XPackInfoResponse( + r.getBuildInfo(), + mutateLicenseInfo(r.getLicenseInfo()), + r.getFeatureSetsInfo()), + r -> new XPackInfoResponse( + r.getBuildInfo(), + r.getLicenseInfo(), + mutateFeatureSetsInfo(r.getFeatureSetsInfo()))); + return mutator.apply(response); + } + + private BuildInfo randomBuildInfo() { + return new BuildInfo( + randomAlphaOfLength(10), + randomAlphaOfLength(15)); + } + + private BuildInfo mutateBuildInfo(BuildInfo buildInfo) { + if (buildInfo == null) { + return randomBuildInfo(); + } + return null; + } + + private LicenseInfo randomLicenseInfo() { + return new LicenseInfo( + randomAlphaOfLength(10), + randomAlphaOfLength(4), + randomAlphaOfLength(5), + randomFrom(LicenseStatus.values()), + randomLong()); + } + + private LicenseInfo mutateLicenseInfo(LicenseInfo licenseInfo) { + if (licenseInfo == null) { + return randomLicenseInfo(); + } + return null; + } + + private FeatureSetsInfo randomFeatureSetsInfo() { + int size = between(0, 10); + Set featureSets = new HashSet<>(size); + while (featureSets.size() < size) { + featureSets.add(randomFeatureSet()); + } + return new FeatureSetsInfo(featureSets); + } + + private FeatureSetsInfo mutateFeatureSetsInfo(FeatureSetsInfo featureSetsInfo) { + if (featureSetsInfo == null) { + return randomFeatureSetsInfo(); + } + return null; + } + + private FeatureSet randomFeatureSet() { + return new FeatureSet( + randomAlphaOfLength(5), + randomBoolean() ? null : randomAlphaOfLength(20), + randomBoolean(), + randomBoolean(), + randomNativeCodeInfo()); + } + + private Map randomNativeCodeInfo() { + if (randomBoolean()) { + return null; + } + int size = between(0, 10); + Map nativeCodeInfo = new HashMap<>(size); + while (nativeCodeInfo.size() < size) { + nativeCodeInfo.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); + } + return nativeCodeInfo; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java new file mode 100644 index 0000000000000..c4e29d7c23005 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.common; + +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +public class ProtocolUtilsTests extends ESTestCase { + + public void testMapStringEqualsAndHash() { + assertTrue(ProtocolUtils.equals(null, null)); + assertFalse(ProtocolUtils.equals(null, new HashMap<>())); + assertFalse(ProtocolUtils.equals(new HashMap<>(), null)); + + Map a = new HashMap<>(); + a.put("foo", new String[] { "a", "b" }); + a.put("bar", new String[] { "b", "c" }); + + Map b = new HashMap<>(); + b.put("foo", new String[] { "a", "b" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("bar", new String[] { "c", "b" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("bar", new String[] { "b", "c" }); + + assertTrue(ProtocolUtils.equals(a, b)); + assertTrue(ProtocolUtils.equals(b, a)); + assertEquals(ProtocolUtils.hashCode(a), ProtocolUtils.hashCode(b)); + + b.put("baz", new String[] { "b", "c" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + a.put("non", null); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("non", null); + b.remove("baz"); + + assertTrue(ProtocolUtils.equals(a, b)); + assertTrue(ProtocolUtils.equals(b, a)); + assertEquals(ProtocolUtils.hashCode(a), ProtocolUtils.hashCode(b)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java new file mode 100644 index 0000000000000..3226198191cef --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class GraphExploreResponseTests extends AbstractXContentTestCase< GraphExploreResponse> { + + @Override + protected GraphExploreResponse createTestInstance() { + return createInstance(0); + } + private static GraphExploreResponse createInstance(int numFailures) { + int numItems = randomIntBetween(4, 128); + boolean timedOut = randomBoolean(); + boolean showDetails = randomBoolean(); + long overallTookInMillis = randomNonNegativeLong(); + Map vertices = new HashMap<>(); + Map connections = new HashMap<>(); + ShardOperationFailedException [] failures = new ShardOperationFailedException [numFailures]; + for (int i = 0; i < failures.length; i++) { + failures[i] = new ShardSearchFailure(new ElasticsearchException("an error")); + } + + //Create random set of vertices + for (int i = 0; i < numItems; i++) { + Vertex v = new Vertex("field1", randomAlphaOfLength(5), randomDouble(), 0, + showDetails?randomIntBetween(100, 200):0, + showDetails?randomIntBetween(1, 100):0); + vertices.put(v.getId(), v); + } + + //Wire up half the vertices randomly + Vertex[] vs = vertices.values().toArray(new Vertex[vertices.size()]); + for (int i = 0; i < numItems/2; i++) { + Vertex v1 = vs[randomIntBetween(0, vs.length-1)]; + Vertex v2 = vs[randomIntBetween(0, vs.length-1)]; + if(v1 != v2) { + Connection conn = new Connection(v1, v2, randomDouble(), randomLongBetween(1, 10)); + connections.put(conn.getId(), conn); + } + } + return new GraphExploreResponse(overallTookInMillis, timedOut, failures, vertices, connections, showDetails); + } + + + @Override + protected String[] getShuffleFieldsExceptions() { + return new String[]{"vertices"}; + } + + private static GraphExploreResponse createTestInstanceWithFailures() { + return createInstance(randomIntBetween(1, 128)); + } + + @Override + protected GraphExploreResponse doParseInstance(XContentParser parser) throws IOException { + return GraphExploreResponse.fromXContext(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + protected Predicate getRandomFieldsExcludeFilterWhenResultHasErrors() { + return field -> field.startsWith("responses"); + } + + @Override + protected void assertEqualInstances( GraphExploreResponse expectedInstance, GraphExploreResponse newInstance) { + assertThat(newInstance.getTook(), equalTo(expectedInstance.getTook())); + assertThat(newInstance.isTimedOut(), equalTo(expectedInstance.isTimedOut())); + + Connection[] newConns = newInstance.getConnections().toArray(new Connection[0]); + Connection[] expectedConns = expectedInstance.getConnections().toArray(new Connection[0]); + assertArrayEquals(expectedConns, newConns); + + Vertex[] newVertices = newInstance.getVertices().toArray(new Vertex[0]); + Vertex[] expectedVertices = expectedInstance.getVertices().toArray(new Vertex[0]); + assertArrayEquals(expectedVertices, newVertices); + + ShardOperationFailedException[] newFailures = newInstance.getShardFailures(); + ShardOperationFailedException[] expectedFailures = expectedInstance.getShardFailures(); + assertEquals(expectedFailures.length, newFailures.length); + + } + + /** + * Test parsing {@link GraphExploreResponse} with inner failures as they don't support asserting on xcontent equivalence, given + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier< GraphExploreResponse> instanceSupplier = GraphExploreResponseTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY, + getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java new file mode 100644 index 0000000000000..7149477d00765 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.io.IOException; + +import org.elasticsearch.test.ESTestCase; + +public class LicenseStatusTests extends ESTestCase { + public void testSerialization() throws IOException { + LicenseStatus status = randomFrom(LicenseStatus.values()); + assertSame(status, copyWriteable(status, writableRegistry(), LicenseStatus::readFrom)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java new file mode 100644 index 0000000000000..a09fd6fb99b45 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; + +public class PutLicenseResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // The structure of the response is such that unknown fields inside acknowledge cannot be supported since they + // are treated as messages from new services + return p -> p.startsWith("acknowledge"); + } + + @Override + protected PutLicenseResponse createTestInstance() { + boolean acknowledged = randomBoolean(); + LicensesStatus status = randomFrom(LicensesStatus.VALID, LicensesStatus.INVALID, LicensesStatus.EXPIRED); + String messageHeader; + Map ackMessages; + if (randomBoolean()) { + messageHeader = randomAlphaOfLength(10); + ackMessages = randomAckMessages(); + } else { + messageHeader = null; + ackMessages = Collections.emptyMap(); + } + + return new PutLicenseResponse(acknowledged, status, messageHeader, ackMessages); + } + + private static Map randomAckMessages() { + int nFeatures = randomIntBetween(1, 5); + + Map ackMessages = new HashMap<>(); + + for (int i = 0; i < nFeatures; i++) { + String feature = randomAlphaOfLengthBetween(9, 15); + int nMessages = randomIntBetween(1, 5); + String[] messages = new String[nMessages]; + for (int j = 0; j < nMessages; j++) { + messages[j] = randomAlphaOfLengthBetween(10, 30); + } + ackMessages.put(feature, messages); + } + + return ackMessages; + } + + @Override + protected PutLicenseResponse doParseInstance(XContentParser parser) throws IOException { + return PutLicenseResponse.fromXContent(parser); + } + + @Override + protected PutLicenseResponse createBlankInstance() { + return new PutLicenseResponse(); + } + + @Override + protected PutLicenseResponse mutateInstance(PutLicenseResponse response) { + @SuppressWarnings("unchecked") + Function mutator = randomFrom( + r -> new PutLicenseResponse( + r.isAcknowledged() == false, + r.status(), + r.acknowledgeHeader(), + r.acknowledgeMessages()), + r -> new PutLicenseResponse( + r.isAcknowledged(), + mutateStatus(r.status()), + r.acknowledgeHeader(), + r.acknowledgeMessages()), + r -> { + if (r.acknowledgeMessages().isEmpty()) { + return new PutLicenseResponse( + r.isAcknowledged(), + r.status(), + randomAlphaOfLength(10), + randomAckMessages() + ); + } else { + return new PutLicenseResponse(r.isAcknowledged(), r.status()); + } + } + + ); + return mutator.apply(response); + } + + private LicensesStatus mutateStatus(LicensesStatus status) { + return randomValueOtherThan(status, () -> randomFrom(LicensesStatus.values())); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java new file mode 100644 index 0000000000000..ba87cca84ee85 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.test.AbstractStreamableTestCase; + +public class IndexUpgradeInfoRequestTests extends AbstractStreamableTestCase { + @Override + protected IndexUpgradeInfoRequest createTestInstance() { + int indexCount = randomInt(4); + String[] indices = new String[indexCount]; + for (int i = 0; i < indexCount; i++) { + indices[i] = randomAlphaOfLength(10); + } + IndexUpgradeInfoRequest request = new IndexUpgradeInfoRequest(indices); + if (randomBoolean()) { + request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + } + return request; + } + + + public void testNullIndices() { + expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest((String[])null)); + expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest().indices((String[])null)); + } + + @Override + protected IndexUpgradeInfoRequest createBlankInstance() { + return new IndexUpgradeInfoRequest(); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java new file mode 100644 index 0000000000000..57f01a4454e02 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +public class IndexUpgradeInfoResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected IndexUpgradeInfoResponse doParseInstance(XContentParser parser) { + return IndexUpgradeInfoResponse.fromXContent(parser); + } + + @Override + protected IndexUpgradeInfoResponse createBlankInstance() { + return new IndexUpgradeInfoResponse(); + } + + @Override + protected IndexUpgradeInfoResponse createTestInstance() { + return randomIndexUpgradeInfoResponse(randomIntBetween(0, 10)); + } + + private static IndexUpgradeInfoResponse randomIndexUpgradeInfoResponse(int numIndices) { + Map actions = new HashMap<>(); + for (int i = 0; i < numIndices; i++) { + actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values())); + } + return new IndexUpgradeInfoResponse(actions); + } + + @Override + protected IndexUpgradeInfoResponse mutateInstance(IndexUpgradeInfoResponse instance) { + if (instance.getActions().size() == 0) { + return randomIndexUpgradeInfoResponse(1); + } + Map actions = new HashMap<>(instance.getActions()); + if (randomBoolean()) { + Iterator> iterator = actions.entrySet().iterator(); + iterator.next(); + iterator.remove(); + } else { + actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values())); + } + return new IndexUpgradeInfoResponse(actions); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java new file mode 100644 index 0000000000000..28a27e639985d --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.security; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +import static org.hamcrest.Matchers.is; + +public class UserTests extends ESTestCase { + + public void testUserToString() { + User user = new User("u1", "r1"); + assertThat(user.toString(), is("User[username=u1,roles=[r1],fullName=null,email=null,metadata={}]")); + user = new User("u1", new String[] { "r1", "r2" }, "user1", "user1@domain.com", Collections.singletonMap("key", "val"), true); + assertThat(user.toString(), is("User[username=u1,roles=[r1,r2],fullName=user1,email=user1@domain.com,metadata={key=val}]")); + user = new User("u1", new String[] {"r1"}, new User("u2", "r2", "r3")); + assertThat(user.toString(), is("User[username=u1,roles=[r1],fullName=null,email=null,metadata={}," + + "authenticatedUser=[User[username=u2,roles=[r2,r3],fullName=null,email=null,metadata={}]]]")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java new file mode 100644 index 0000000000000..209bc790a8c54 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class DeleteWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected DeleteWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean found = randomBoolean(); + return new DeleteWatchResponse(id, version, found); + } + + @Override + protected DeleteWatchResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java new file mode 100644 index 0000000000000..1fc2f61b684c7 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PutWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected PutWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean created = randomBoolean(); + return new PutWatchResponse(id, version, created); + } + + @Override + protected PutWatchResponse doParseInstance(XContentParser parser) throws IOException { + return PutWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} From 610a9c8c8251125fa14002ca3037310ac3363e06 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 27 Aug 2018 11:51:28 -0400 Subject: [PATCH 18/18] Introduce mapping version to index metadata (#33147) This commit introduces mapping version to index metadata. This value is monotonically increasing and is updated on mapping updates. This will be useful in cross-cluster replication so that we can request mapping updates from the leader only when there is a mapping update as opposed to the strategy we employ today which is to request a mapping update any time there is an index metadata update. As index metadata updates can occur for many reasons other than mapping updates, this leads to some unnecessary requests and work in cross-cluster replication. --- .../elasticsearch/cluster/ClusterState.java | 2 +- .../cluster/metadata/IndexMetaData.java | 52 ++++++++++++++++++- .../metadata/MetaDataMappingService.java | 18 +++++-- .../org/elasticsearch/index/IndexService.java | 4 +- .../index/mapper/MapperService.java | 50 ++++++++++++++++-- .../cluster/IndicesClusterStateService.java | 6 +-- .../snapshots/RestoreService.java | 1 + .../metadata/MetaDataMappingServiceTests.java | 30 +++++++++++ .../gateway/MetaDataStateFormatTests.java | 1 + .../index/mapper/DynamicMappingTests.java | 10 ++++ .../index/mapper/UpdateMappingTests.java | 29 +++++++++++ ...actIndicesClusterStateServiceTestCase.java | 2 +- 12 files changed, 188 insertions(+), 17 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 276e00a2ba3db..f7606d4bb061f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -284,7 +284,7 @@ public String toString() { final String TAB = " "; for (IndexMetaData indexMetaData : metaData) { sb.append(TAB).append(indexMetaData.getIndex()); - sb.append(": v[").append(indexMetaData.getVersion()).append("]\n"); + sb.append(": v[").append(indexMetaData.getVersion()).append("], mv[").append(indexMetaData.getMappingVersion()).append("]\n"); for (int shard = 0; shard < indexMetaData.getNumberOfShards(); shard++) { sb.append(TAB).append(TAB).append(shard).append(": "); sb.append("p_term [").append(indexMetaData.primaryTerm(shard)).append("], "); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 5438e39904cb1..28b391123738a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.elasticsearch.Assertions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.action.support.ActiveShardCount; @@ -291,6 +292,7 @@ public Iterator> settings() { public static final String KEY_IN_SYNC_ALLOCATIONS = "in_sync_allocations"; static final String KEY_VERSION = "version"; + static final String KEY_MAPPING_VERSION = "mapping_version"; static final String KEY_ROUTING_NUM_SHARDS = "routing_num_shards"; static final String KEY_SETTINGS = "settings"; static final String KEY_STATE = "state"; @@ -309,6 +311,9 @@ public Iterator> settings() { private final Index index; private final long version; + + private final long mappingVersion; + private final long[] primaryTerms; private final State state; @@ -336,7 +341,7 @@ public Iterator> settings() { private final ActiveShardCount waitForActiveShards; private final ImmutableOpenMap rolloverInfos; - private IndexMetaData(Index index, long version, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings, + private IndexMetaData(Index index, long version, long mappingVersion, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings, ImmutableOpenMap mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs, ImmutableOpenIntMap> inSyncAllocationIds, DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters initialRecoveryFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters, @@ -345,6 +350,8 @@ private IndexMetaData(Index index, long version, long[] primaryTerms, State stat this.index = index; this.version = version; + assert mappingVersion >= 0 : mappingVersion; + this.mappingVersion = mappingVersion; this.primaryTerms = primaryTerms; assert primaryTerms.length == numberOfShards; this.state = state; @@ -394,6 +401,9 @@ public long getVersion() { return this.version; } + public long getMappingVersion() { + return mappingVersion; + } /** * The term of the current selected primary. This is a non-negative number incremented when @@ -644,6 +654,7 @@ private static class IndexMetaDataDiff implements Diff { private final String index; private final int routingNumShards; private final long version; + private final long mappingVersion; private final long[] primaryTerms; private final State state; private final Settings settings; @@ -656,6 +667,7 @@ private static class IndexMetaDataDiff implements Diff { IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) { index = after.index.getName(); version = after.version; + mappingVersion = after.mappingVersion; routingNumShards = after.routingNumShards; state = after.state; settings = after.settings; @@ -672,6 +684,11 @@ private static class IndexMetaDataDiff implements Diff { index = in.readString(); routingNumShards = in.readInt(); version = in.readLong(); + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + mappingVersion = in.readVLong(); + } else { + mappingVersion = 1; + } state = State.fromId(in.readByte()); settings = Settings.readSettingsFromStream(in); primaryTerms = in.readVLongArray(); @@ -708,6 +725,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(index); out.writeInt(routingNumShards); out.writeLong(version); + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeVLong(mappingVersion); + } out.writeByte(state.id); Settings.writeSettingsToStream(settings, out); out.writeVLongArray(primaryTerms); @@ -724,6 +744,7 @@ public void writeTo(StreamOutput out) throws IOException { public IndexMetaData apply(IndexMetaData part) { Builder builder = builder(index); builder.version(version); + builder.mappingVersion(mappingVersion); builder.setRoutingNumShards(routingNumShards); builder.state(state); builder.settings(settings); @@ -740,6 +761,11 @@ public IndexMetaData apply(IndexMetaData part) { public static IndexMetaData readFrom(StreamInput in) throws IOException { Builder builder = new Builder(in.readString()); builder.version(in.readLong()); + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + builder.mappingVersion(in.readVLong()); + } else { + builder.mappingVersion(1); + } builder.setRoutingNumShards(in.readInt()); builder.state(State.fromId(in.readByte())); builder.settings(readSettingsFromStream(in)); @@ -779,6 +805,9 @@ public static IndexMetaData readFrom(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { out.writeString(index.getName()); // uuid will come as part of settings out.writeLong(version); + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeVLong(mappingVersion); + } out.writeInt(routingNumShards); out.writeByte(state.id()); writeSettingsToStream(settings, out); @@ -822,6 +851,7 @@ public static class Builder { private String index; private State state = State.OPEN; private long version = 1; + private long mappingVersion = 1; private long[] primaryTerms = null; private Settings settings = Settings.Builder.EMPTY_SETTINGS; private final ImmutableOpenMap.Builder mappings; @@ -844,6 +874,7 @@ public Builder(IndexMetaData indexMetaData) { this.index = indexMetaData.getIndex().getName(); this.state = indexMetaData.state; this.version = indexMetaData.version; + this.mappingVersion = indexMetaData.mappingVersion; this.settings = indexMetaData.getSettings(); this.primaryTerms = indexMetaData.primaryTerms.clone(); this.mappings = ImmutableOpenMap.builder(indexMetaData.mappings); @@ -1010,6 +1041,15 @@ public Builder version(long version) { return this; } + public long mappingVersion() { + return mappingVersion; + } + + public Builder mappingVersion(final long mappingVersion) { + this.mappingVersion = mappingVersion; + return this; + } + /** * returns the primary term for the given shard. * See {@link IndexMetaData#primaryTerm(int)} for more information. @@ -1137,7 +1177,7 @@ public IndexMetaData build() { final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE); - return new IndexMetaData(new Index(index, uuid), version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), + return new IndexMetaData(new Index(index, uuid), version, mappingVersion, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledInSyncAllocationIds.build(), requireFilters, initialRecoveryFilters, includeFilters, excludeFilters, indexCreatedVersion, indexUpgradedVersion, getRoutingNumShards(), routingPartitionSize, waitForActiveShards, rolloverInfos.build()); } @@ -1146,6 +1186,7 @@ public static void toXContent(IndexMetaData indexMetaData, XContentBuilder build builder.startObject(indexMetaData.getIndex().getName()); builder.field(KEY_VERSION, indexMetaData.getVersion()); + builder.field(KEY_MAPPING_VERSION, indexMetaData.getMappingVersion()); builder.field(KEY_ROUTING_NUM_SHARDS, indexMetaData.getRoutingNumShards()); builder.field(KEY_STATE, indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH)); @@ -1219,6 +1260,7 @@ public static IndexMetaData fromXContent(XContentParser parser) throws IOExcepti if (token != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("expected object but got a " + token); } + boolean mappingVersion = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -1317,6 +1359,9 @@ public static IndexMetaData fromXContent(XContentParser parser) throws IOExcepti builder.state(State.fromString(parser.text())); } else if (KEY_VERSION.equals(currentFieldName)) { builder.version(parser.longValue()); + } else if (KEY_MAPPING_VERSION.equals(currentFieldName)) { + mappingVersion = true; + builder.mappingVersion(parser.longValue()); } else if (KEY_ROUTING_NUM_SHARDS.equals(currentFieldName)) { builder.setRoutingNumShards(parser.intValue()); } else { @@ -1326,6 +1371,9 @@ public static IndexMetaData fromXContent(XContentParser parser) throws IOExcepti throw new IllegalArgumentException("Unexpected token " + token); } } + if (Assertions.ENABLED && Version.indexCreated(builder.settings).onOrAfter(Version.V_6_5_0)) { + assert mappingVersion : "mapping version should be present for indices created on or after 6.5.0"; + } return builder.build(); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 70b6f7472ca99..cd1203b6dcd71 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -21,13 +21,12 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateTaskListener; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -38,6 +37,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; @@ -300,6 +300,7 @@ private ClusterState applyRequest(ClusterState currentState, PutMappingClusterSt MetaData.Builder builder = MetaData.builder(metaData); boolean updated = false; for (IndexMetaData indexMetaData : updateList) { + boolean updatedMapping = false; // do the actual merge here on the master, and update the mapping source // we use the exact same indexService and metadata we used to validate above here to actually apply the update final Index index = indexMetaData.getIndex(); @@ -316,7 +317,7 @@ private ClusterState applyRequest(ClusterState currentState, PutMappingClusterSt if (existingSource.equals(updatedSource)) { // same source, no changes, ignore it } else { - updated = true; + updatedMapping = true; // use the merged mapping source if (logger.isDebugEnabled()) { logger.debug("{} update_mapping [{}] with source [{}]", index, mergedMapper.type(), updatedSource); @@ -326,7 +327,7 @@ private ClusterState applyRequest(ClusterState currentState, PutMappingClusterSt } } else { - updated = true; + updatedMapping = true; if (logger.isDebugEnabled()) { logger.debug("{} create_mapping [{}] with source [{}]", index, mappingType, updatedSource); } else if (logger.isInfoEnabled()) { @@ -340,7 +341,16 @@ private ClusterState applyRequest(ClusterState currentState, PutMappingClusterSt for (DocumentMapper mapper : mapperService.docMappers(true)) { indexMetaDataBuilder.putMapping(new MappingMetaData(mapper.mappingSource())); } + if (updatedMapping) { + indexMetaDataBuilder.mappingVersion(1 + indexMetaDataBuilder.mappingVersion()); + } + /* + * This implicitly increments the index metadata version and builds the index metadata. This means that we need to have + * already incremented the mapping version if necessary. Therefore, the mapping version increment must remain before this + * statement. + */ builder.put(indexMetaDataBuilder); + updated |= updatedMapping; } if (updated) { return ClusterState.builder(currentState).metaData(builder).build(); diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 3b559a23bb860..8a2b98d7a8200 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -521,8 +521,8 @@ List getSearchOperationListener() { // pkg private for } @Override - public boolean updateMapping(IndexMetaData indexMetaData) throws IOException { - return mapperService().updateMapping(indexMetaData); + public boolean updateMapping(final IndexMetaData currentIndexMetaData, final IndexMetaData newIndexMetaData) throws IOException { + return mapperService().updateMapping(currentIndexMetaData, newIndexMetaData); } private class StoreCloseListener implements Store.OnClose { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b0a0493ab6f63..ece6e944744e7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -25,6 +25,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.index.Term; +import org.elasticsearch.Assertions; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -225,14 +226,14 @@ public static Map parseMapping(NamedXContentRegistry xContentReg /** * Update mapping by only merging the metadata that is different between received and stored entries */ - public boolean updateMapping(IndexMetaData indexMetaData) throws IOException { - assert indexMetaData.getIndex().equals(index()) : "index mismatch: expected " + index() + " but was " + indexMetaData.getIndex(); + public boolean updateMapping(final IndexMetaData currentIndexMetaData, final IndexMetaData newIndexMetaData) throws IOException { + assert newIndexMetaData.getIndex().equals(index()) : "index mismatch: expected " + index() + " but was " + newIndexMetaData.getIndex(); // go over and add the relevant mappings (or update them) final Set existingMappers = new HashSet<>(mappers.keySet()); final Map updatedEntries; try { // only update entries if needed - updatedEntries = internalMerge(indexMetaData, MergeReason.MAPPING_RECOVERY, true, true); + updatedEntries = internalMerge(newIndexMetaData, MergeReason.MAPPING_RECOVERY, true, true); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("[{}] failed to apply mappings", index()), e); throw e; @@ -240,9 +241,11 @@ public boolean updateMapping(IndexMetaData indexMetaData) throws IOException { boolean requireRefresh = false; + assertMappingVersion(currentIndexMetaData, newIndexMetaData, updatedEntries); + for (DocumentMapper documentMapper : updatedEntries.values()) { String mappingType = documentMapper.type(); - CompressedXContent incomingMappingSource = indexMetaData.mapping(mappingType).source(); + CompressedXContent incomingMappingSource = newIndexMetaData.mapping(mappingType).source(); String op = existingMappers.contains(mappingType) ? "updated" : "added"; if (logger.isDebugEnabled() && incomingMappingSource.compressed().length < 512) { @@ -267,6 +270,45 @@ public boolean updateMapping(IndexMetaData indexMetaData) throws IOException { return requireRefresh; } + private void assertMappingVersion( + final IndexMetaData currentIndexMetaData, + final IndexMetaData newIndexMetaData, + final Map updatedEntries) { + if (Assertions.ENABLED + && currentIndexMetaData != null + && currentIndexMetaData.getCreationVersion().onOrAfter(Version.V_6_5_0)) { + if (currentIndexMetaData.getMappingVersion() == newIndexMetaData.getMappingVersion()) { + // if the mapping version is unchanged, then there should not be any updates and all mappings should be the same + assert updatedEntries.isEmpty() : updatedEntries; + for (final ObjectCursor mapping : newIndexMetaData.getMappings().values()) { + final CompressedXContent currentSource = currentIndexMetaData.mapping(mapping.value.type()).source(); + final CompressedXContent newSource = mapping.value.source(); + assert currentSource.equals(newSource) : + "expected current mapping [" + currentSource + "] for type [" + mapping.value.type() + "] " + + "to be the same as new mapping [" + newSource + "]"; + } + } else { + // if the mapping version is changed, it should increase, there should be updates, and the mapping should be different + final long currentMappingVersion = currentIndexMetaData.getMappingVersion(); + final long newMappingVersion = newIndexMetaData.getMappingVersion(); + assert currentMappingVersion < newMappingVersion : + "expected current mapping version [" + currentMappingVersion + "] " + + "to be less than new mapping version [" + newMappingVersion + "]"; + assert updatedEntries.isEmpty() == false; + for (final DocumentMapper documentMapper : updatedEntries.values()) { + final MappingMetaData currentMapping = currentIndexMetaData.mapping(documentMapper.type()); + if (currentMapping != null) { + final CompressedXContent currentSource = currentMapping.source(); + final CompressedXContent newSource = documentMapper.mappingSource(); + assert currentSource.equals(newSource) == false : + "expected current mapping [" + currentSource + "] for type [" + documentMapper.type() + "] " + + "to be different than new mapping"; + } + } + } + } + } + public void merge(Map> mappings, MergeReason reason, boolean updateAllTypes) { Map mappingSourcesCompressed = new LinkedHashMap<>(mappings.size()); for (Map.Entry> entry : mappings.entrySet()) { diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index e6a86d47f55c0..692010119dc2d 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -456,7 +456,7 @@ private void createIndices(final ClusterState state) { AllocatedIndex indexService = null; try { indexService = indicesService.createIndex(indexMetaData, buildInIndexListener); - if (indexService.updateMapping(indexMetaData) && sendRefreshMapping) { + if (indexService.updateMapping(null, indexMetaData) && sendRefreshMapping) { nodeMappingRefreshAction.nodeMappingRefresh(state.nodes().getMasterNode(), new NodeMappingRefreshAction.NodeMappingRefreshRequest(indexMetaData.getIndex().getName(), indexMetaData.getIndexUUID(), state.nodes().getLocalNodeId()) @@ -490,7 +490,7 @@ private void updateIndices(ClusterChangedEvent event) { if (ClusterChangedEvent.indexMetaDataChanged(currentIndexMetaData, newIndexMetaData)) { indexService.updateMetaData(newIndexMetaData); try { - if (indexService.updateMapping(newIndexMetaData) && sendRefreshMapping) { + if (indexService.updateMapping(currentIndexMetaData, newIndexMetaData) && sendRefreshMapping) { nodeMappingRefreshAction.nodeMappingRefresh(state.nodes().getMasterNode(), new NodeMappingRefreshAction.NodeMappingRefreshRequest(newIndexMetaData.getIndex().getName(), newIndexMetaData.getIndexUUID(), state.nodes().getLocalNodeId()) @@ -778,7 +778,7 @@ public interface AllocatedIndex extends Iterable, IndexCompo /** * Checks if index requires refresh from master. */ - boolean updateMapping(IndexMetaData indexMetaData) throws IOException; + boolean updateMapping(IndexMetaData currentIndexMetaData, IndexMetaData newIndexMetaData) throws IOException; /** * Returns shard with given id. diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index a7df9bdfdfd87..702d63d0d9401 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -292,6 +292,7 @@ public ClusterState execute(ClusterState currentState) { // Index exists and it's closed - open it in metadata and start recovery IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN); indexMdBuilder.version(Math.max(snapshotIndexMetaData.getVersion(), currentIndexMetaData.getVersion() + 1)); + indexMdBuilder.mappingVersion(Math.max(snapshotIndexMetaData.getMappingVersion(), currentIndexMetaData.getMappingVersion() + 1)); if (!request.includeAliases()) { // Remove all snapshot aliases if (!snapshotIndexMetaData.getAliases().isEmpty()) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java index a3fecc7eae0b9..62b454419141e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java @@ -135,4 +135,34 @@ public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception assertSame(result, result2); } + public void testMappingVersion() throws Exception { + final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + final long previousVersion = indexService.getMetaData().getMappingVersion(); + final MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class); + final ClusterService clusterService = getInstanceFromNode(ClusterService.class); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + request.indices(new Index[] {indexService.index()}); + request.source("{ \"properties\": { \"field\": { \"type\": \"text\" }}}"); + final ClusterStateTaskExecutor.ClusterTasksResult result = + mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)); + assertThat(result.executionResults.size(), equalTo(1)); + assertTrue(result.executionResults.values().iterator().next().isSuccess()); + assertThat(result.resultingState.metaData().index("test").getMappingVersion(), equalTo(1 + previousVersion)); + } + + public void testMappingVersionUnchanged() throws Exception { + final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + final long previousVersion = indexService.getMetaData().getMappingVersion(); + final MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class); + final ClusterService clusterService = getInstanceFromNode(ClusterService.class); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type"); + request.indices(new Index[] {indexService.index()}); + request.source("{ \"properties\": {}}"); + final ClusterStateTaskExecutor.ClusterTasksResult result = + mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)); + assertThat(result.executionResults.size(), equalTo(1)); + assertTrue(result.executionResults.values().iterator().next().isSuccess()); + assertThat(result.resultingState.metaData().index("test").getMappingVersion(), equalTo(previousVersion)); + } + } diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index d236d01f049dd..0bf80e5239874 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -267,6 +267,7 @@ public void testLoadState() throws IOException { IndexMetaData deserialized = indices.get(original.getIndex().getName()); assertThat(deserialized, notNullValue()); assertThat(deserialized.getVersion(), equalTo(original.getVersion())); + assertThat(deserialized.getMappingVersion(), equalTo(original.getMappingVersion())); assertThat(deserialized.getNumberOfReplicas(), equalTo(original.getNumberOfReplicas())); assertThat(deserialized.getNumberOfShards(), equalTo(original.getNumberOfShards())); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 721695e87f6ac..c5c99926610f1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -786,4 +787,13 @@ public void testDynamicTemplateOrder() throws IOException { client().prepareIndex("test", "type", "1").setSource("foo", "abc").get(); assertThat(index.mapperService().fullName("foo"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); } + + public void testMappingVersionAfterDynamicMappingUpdate() { + createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + final ClusterService clusterService = getInstanceFromNode(ClusterService.class); + final long previousVersion = clusterService.state().metaData().index("test").getMappingVersion(); + client().prepareIndex("test", "type", "1").setSource("field", "text").get(); + assertThat(clusterService.state().metaData().index("test").getMappingVersion(), equalTo(1 + previousVersion)); + } + } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java index 25b328637f18b..c51171da990f3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -31,6 +33,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; +import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collection; @@ -265,4 +268,30 @@ public void testRejectFieldDefinedTwiceInSameType() throws IOException { () -> mapperService2.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE, false)); assertThat(e.getMessage(), equalTo("mapper [foo] of different type, current_type [long], merged_type [ObjectMapper]")); } + + public void testMappingVersion() { + createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + final ClusterService clusterService = getInstanceFromNode(ClusterService.class); + { + final long previousVersion = clusterService.state().metaData().index("test").getMappingVersion(); + final PutMappingRequest request = new PutMappingRequest(); + request.indices("test"); + request.type("type"); + request.source("field", "type=text"); + client().admin().indices().putMapping(request).actionGet(); + assertThat(clusterService.state().metaData().index("test").getMappingVersion(), Matchers.equalTo(1 + previousVersion)); + } + + { + final long previousVersion = clusterService.state().metaData().index("test").getMappingVersion(); + final PutMappingRequest request = new PutMappingRequest(); + request.indices("test"); + request.type("type"); + request.source("field", "type=text"); + client().admin().indices().putMapping(request).actionGet(); + // the version should be unchanged after putting the same mapping again + assertThat(clusterService.state().metaData().index("test").getMappingVersion(), Matchers.equalTo(previousVersion)); + } + } + } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index 580696264bdd4..c68e4870aaeb0 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -273,7 +273,7 @@ public IndexSettings getIndexSettings() { } @Override - public boolean updateMapping(IndexMetaData indexMetaData) throws IOException { + public boolean updateMapping(final IndexMetaData currentIndexMetaData, final IndexMetaData newIndexMetaData) throws IOException { failRandomly(); return false; }