diff --git a/docs/java-api/query-dsl/geo-distance-query.asciidoc b/docs/java-api/query-dsl/geo-distance-query.asciidoc index 7927dff440be6..cc8c89ca61eea 100644 --- a/docs/java-api/query-dsl/geo-distance-query.asciidoc +++ b/docs/java-api/query-dsl/geo-distance-query.asciidoc @@ -5,7 +5,7 @@ See {ref}/query-dsl-geo-distance-query.html[Geo Distance Query] ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{query-dsl-test}[geo_bounding_box] +include-tagged::{query-dsl-test}[geo_distance] -------------------------------------------------- <1> field <2> center point diff --git a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc index 2e4b9a1101108..58de8c5a3e142 100644 --- a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc @@ -224,8 +224,40 @@ Time values can also be specified via abbreviations supported by < + } + } + } + ] + } + } + } +} +-------------------------------------------------- +// CONSOLE + +<1> Supports expressive date <> + +====== Time Zone Date-times are stored in Elasticsearch in UTC. By default, all bucketing and rounding is also done in UTC. The `time_zone` parameter can be used to indicate @@ -362,6 +394,10 @@ GET /_search ... "aggregations": { "my_buckets": { + "after_key": { <1> + "date": 1494288000000, + "product": "mad max" + }, "buckets": [ { "key": { @@ -371,7 +407,7 @@ GET /_search "doc_count": 1 }, { - "key": { <1> + "key": { "date": 1494288000000, "product": "mad max" }, @@ -386,9 +422,14 @@ GET /_search <1> The last composite bucket returned by the query. +NOTE: The `after_key` is equals to the last bucket returned in the response before +any filtering that could be done by <>. +If all buckets are filtered/removed by a pipeline aggregation, the `after_key` will contain +the last bucket before filtering. + The `after` parameter can be used to retrieve the composite buckets that are **after** the last composite buckets returned in a previous round. -For the example below the last bucket is `"key": [1494288000000, "mad max"]` so the next +For the example below the last bucket can be found in `after_key` and the next round of result can be retrieved with: [source,js] @@ -453,6 +494,10 @@ GET /_search ... "aggregations": { "my_buckets": { + "after_key": { + "date": 1494201600000, + "product": "rocky" + }, "buckets": [ { "key": { diff --git a/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc b/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc index 8797e6041d5f3..f45629b14e746 100644 --- a/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc @@ -93,7 +93,7 @@ GET /issues/_search // TEST[s/_search/_search\?filter_path=aggregations/] As you can see above, the `reverse_nested` aggregation is put in to a `nested` aggregation as this is the only place -in the dsl where the `reversed_nested` aggregation can be used. Its sole purpose is to join back to a parent doc higher +in the dsl where the `reverse_nested` aggregation can be used. Its sole purpose is to join back to a parent doc higher up in the nested structure. <1> A `reverse_nested` aggregation that joins back to the root / main document level, because no `path` has been defined. diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index e768cb0b295b8..1c739c40996b2 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -114,6 +114,11 @@ This means that if the number of unique terms is greater than `size`, the return (it could be that the term counts are slightly off and it could even be that a term that should have been in the top size buckets was not returned). +NOTE: If you want to retrieve **all** terms or all combinations of terms in a nested `terms` aggregation + you should use the <> aggregation which + allows to paginate over all possible terms rather than setting a size greater than the cardinality of the field in the + `terms` aggregation. The `terms` aggregation is meant to return the `top` terms and does not allow pagination. + [[search-aggregations-bucket-terms-aggregation-approximate-counts]] ==== Document counts are approximate diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 0a6dbd0eb8359..b3156dbc1f414 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -858,7 +858,7 @@ GET /bank/_search Note that if `size` is not specified, it defaults to 10. -This example does a `match_all` and returns documents 11 through 20: +This example does a `match_all` and returns documents 10 through 19: [source,js] -------------------------------------------------- diff --git a/docs/reference/migration/migrate_6_0/rest.asciidoc b/docs/reference/migration/migrate_6_0/rest.asciidoc index 4c6be3173a47e..fbf1f0cc96a11 100644 --- a/docs/reference/migration/migrate_6_0/rest.asciidoc +++ b/docs/reference/migration/migrate_6_0/rest.asciidoc @@ -66,9 +66,11 @@ Previously it was possible to execute `GET /_aliases,_mappings` or `GET /myindex/_settings,_alias` by separating multiple types of requests with commas in order to retrieve multiple types of information about one or more indices. This comma-separation for retrieving multiple pieces of information has been -removed.. `GET /_all` can be used to retrieve all aliases, settings, and -mappings for all indices. In order to retrieve only the mappings for an index, -`GET /myindex/_mappings` (or `_aliases`, or `_settings`). +removed. `GET /_all` can be used to retrieve all aliases, settings, and +mappings for all indices. + +In order to retrieve only the mapping for an index use: +`GET /myindex/_mapping` (or `_alias` for a list of aliases, or `_settings` for the settings). ==== Requests to existing endpoints with incorrect HTTP verb now return 405 responses diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index 302fa475a0983..974c141b03faa 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -145,8 +145,9 @@ You can specify the locale to use with `boundary_scanner_locale`. boundary_scanner_locale:: Controls which locale is used to search for sentence and word boundaries. -encoder:: Indicates if the highlighted text should be HTML encoded: -`default` (no encoding) or `html` (escapes HTML highlighting tags). +encoder:: Indicates if the snippet should be HTML encoded: +`default` (no encoding) or `html` (HTML-escape the snippet text and then +insert the highlighting tags) fields:: Specifies the fields to retrieve highlights for. You can use wildcards to specify fields. For example, you could specify `comment_*` to diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index 7925856656e15..5167f7d1434de 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -81,7 +81,7 @@ public final class MethodWriter extends GeneratorAdapter { private final BitSet statements; private final CompilerSettings settings; - private final Deque> stringConcatArgs = + private final Deque> stringConcatArgs = (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE == null) ? null : new ArrayDeque<>(); public MethodWriter(int access, Method method, ClassVisitor cw, BitSet statements, CompilerSettings settings) { @@ -200,7 +200,7 @@ private void writeCast(Class from, Class to) { * Proxy the box method to use valueOf instead to ensure that the modern boxing methods are used. */ @Override - public void box(org.objectweb.asm.Type type) { + public void box(Type type) { valueOf(type); } @@ -252,10 +252,10 @@ public int writeNewStrings() { } } - public void writeAppendStrings(final Definition.Type type) { + public void writeAppendStrings(Class clazz) { if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) { // Java 9+: record type information - stringConcatArgs.peek().add(type.type); + stringConcatArgs.peek().add(getType(clazz)); // prevent too many concat args. // If there are too many, do the actual concat: if (stringConcatArgs.peek().size() >= MAX_INDY_STRING_CONCAT_ARGS) { @@ -266,24 +266,24 @@ public void writeAppendStrings(final Definition.Type type) { } } else { // Java 8: push a StringBuilder append - if (type.clazz == boolean.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); - else if (type.clazz == char.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); - else if (type.clazz == byte.class || - type.clazz == short.class || - type.clazz == int.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_INT); - else if (type.clazz == long.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); - else if (type.clazz == float.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); - else if (type.clazz == double.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); - else if (type.clazz == String.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); - else invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT); + if (clazz == boolean.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); + else if (clazz == char.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); + else if (clazz == byte.class || + clazz == short.class || + clazz == int.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_INT); + else if (clazz == long.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); + else if (clazz == float.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); + else if (clazz == double.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); + else if (clazz == String.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); + else invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT); } } public void writeToStrings() { if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) { // Java 9+: use type information and push invokeDynamic - final String desc = org.objectweb.asm.Type.getMethodDescriptor(STRING_TYPE, - stringConcatArgs.pop().stream().toArray(org.objectweb.asm.Type[]::new)); + final String desc = Type.getMethodDescriptor(STRING_TYPE, + stringConcatArgs.pop().stream().toArray(Type[]::new)); invokeDynamic("concat", desc, INDY_STRING_CONCAT_BOOTSTRAP_HANDLE); } else { // Java 8: call toString() on StringBuilder @@ -292,9 +292,9 @@ public void writeToStrings() { } /** Writes a dynamic binary instruction: returnType, lhs, and rhs can be different */ - public void writeDynamicBinaryInstruction(Location location, Definition.Type returnType, Definition.Type lhs, Definition.Type rhs, + public void writeDynamicBinaryInstruction(Location location, Class returnType, Class lhs, Class rhs, Operation operation, int flags) { - org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType(returnType.type, lhs.type, rhs.type); + Type methodType = Type.getMethodType(getType(returnType), getType(lhs), getType(rhs)); switch (operation) { case MUL: @@ -310,7 +310,7 @@ public void writeDynamicBinaryInstruction(Location location, Definition.Type ret // if either side is primitive, then the + operator should always throw NPE on null, // so we don't need a special NPE guard. // otherwise, we need to allow nulls for possible string concatenation. - boolean hasPrimitiveArg = lhs.clazz.isPrimitive() || rhs.clazz.isPrimitive(); + boolean hasPrimitiveArg = lhs.isPrimitive() || rhs.isPrimitive(); if (!hasPrimitiveArg) { flags |= DefBootstrap.OPERATOR_ALLOWS_NULL; } @@ -343,8 +343,8 @@ public void writeDynamicBinaryInstruction(Location location, Definition.Type ret } /** Writes a static binary instruction */ - public void writeBinaryInstruction(Location location, Definition.Type type, Operation operation) { - if ((type.clazz == float.class || type.clazz == double.class) && + public void writeBinaryInstruction(Location location, Class clazz, Operation operation) { + if ( (clazz == float.class || clazz == double.class) && (operation == Operation.LSH || operation == Operation.USH || operation == Operation.RSH || operation == Operation.BWAND || operation == Operation.XOR || operation == Operation.BWOR)) { @@ -352,17 +352,17 @@ public void writeBinaryInstruction(Location location, Definition.Type type, Oper } switch (operation) { - case MUL: math(GeneratorAdapter.MUL, type.type); break; - case DIV: math(GeneratorAdapter.DIV, type.type); break; - case REM: math(GeneratorAdapter.REM, type.type); break; - case ADD: math(GeneratorAdapter.ADD, type.type); break; - case SUB: math(GeneratorAdapter.SUB, type.type); break; - case LSH: math(GeneratorAdapter.SHL, type.type); break; - case USH: math(GeneratorAdapter.USHR, type.type); break; - case RSH: math(GeneratorAdapter.SHR, type.type); break; - case BWAND: math(GeneratorAdapter.AND, type.type); break; - case XOR: math(GeneratorAdapter.XOR, type.type); break; - case BWOR: math(GeneratorAdapter.OR, type.type); break; + case MUL: math(GeneratorAdapter.MUL, getType(clazz)); break; + case DIV: math(GeneratorAdapter.DIV, getType(clazz)); break; + case REM: math(GeneratorAdapter.REM, getType(clazz)); break; + case ADD: math(GeneratorAdapter.ADD, getType(clazz)); break; + case SUB: math(GeneratorAdapter.SUB, getType(clazz)); break; + case LSH: math(GeneratorAdapter.SHL, getType(clazz)); break; + case USH: math(GeneratorAdapter.USHR, getType(clazz)); break; + case RSH: math(GeneratorAdapter.SHR, getType(clazz)); break; + case BWAND: math(GeneratorAdapter.AND, getType(clazz)); break; + case XOR: math(GeneratorAdapter.XOR, getType(clazz)); break; + case BWOR: math(GeneratorAdapter.OR, getType(clazz)); break; default: throw location.createError(new IllegalStateException("Illegal tree structure.")); } @@ -416,7 +416,7 @@ public void visitEnd() { * @param flavor type of call * @param params flavor-specific parameters */ - public void invokeDefCall(String name, org.objectweb.asm.Type methodType, int flavor, Object... params) { + public void invokeDefCall(String name, Type methodType, int flavor, Object... params) { Object[] args = new Object[params.length + 2]; args[0] = settings.getInitialCallSiteDepth(); args[1] = flavor; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java index 45ca4601e963d..3715c5802bb75 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java @@ -25,6 +25,7 @@ import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Definition.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -274,12 +275,12 @@ void write(MethodWriter writer, Globals globals) { writer.writeDup(lhs.accessElementCount(), catElementStackSize); // dup the top element and insert it // before concat helper on stack lhs.load(writer, globals); // read the current lhs's value - writer.writeAppendStrings(lhs.actual); // append the lhs's value using the StringBuilder + writer.writeAppendStrings(Definition.TypeToClass(lhs.actual)); // append the lhs's value using the StringBuilder rhs.write(writer, globals); // write the bytecode for the rhs - if (!(rhs instanceof EBinary) || !((EBinary)rhs).cat) { // check to see if the rhs has already done a concatenation - writer.writeAppendStrings(rhs.actual); // append the rhs's value since it's hasn't already + if (!(rhs instanceof EBinary) || !((EBinary)rhs).cat) { // check to see if the rhs has already done a concatenation + writer.writeAppendStrings(Definition.TypeToClass(rhs.actual)); // append the rhs's value since it's hasn't already } writer.writeToStrings(); // put the value for string concat onto the stack @@ -313,9 +314,9 @@ void write(MethodWriter writer, Globals globals) { // write the operation instruction for compound assignment if (promote.dynamic) { writer.writeDynamicBinaryInstruction( - location, promote, DefType, DefType, operation, DefBootstrap.OPERATOR_COMPOUND_ASSIGNMENT); + location, Definition.TypeToClass(promote), def.class, def.class, operation, DefBootstrap.OPERATOR_COMPOUND_ASSIGNMENT); } else { - writer.writeBinaryInstruction(location, promote, operation); + writer.writeBinaryInstruction(location, Definition.TypeToClass(promote), operation); } writer.writeCast(back); // if necessary cast the promotion type value back to the lhs's type diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 55c2145acd8cd..b0ad92d3fc422 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -649,13 +649,13 @@ void write(MethodWriter writer, Globals globals) { left.write(writer, globals); if (!(left instanceof EBinary) || !((EBinary)left).cat) { - writer.writeAppendStrings(left.actual); + writer.writeAppendStrings(Definition.TypeToClass(left.actual)); } right.write(writer, globals); if (!(right instanceof EBinary) || !((EBinary)right).cat) { - writer.writeAppendStrings(right.actual); + writer.writeAppendStrings(Definition.TypeToClass(right.actual)); } if (!cat) { @@ -684,9 +684,10 @@ void write(MethodWriter writer, Globals globals) { if (originallyExplicit) { flags |= DefBootstrap.OPERATOR_EXPLICIT_CAST; } - writer.writeDynamicBinaryInstruction(location, actual, left.actual, right.actual, operation, flags); + writer.writeDynamicBinaryInstruction(location, Definition.TypeToClass(actual), + Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), operation, flags); } else { - writer.writeBinaryInstruction(location, actual, operation); + writer.writeBinaryInstruction(location, Definition.TypeToClass(actual), operation); } } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java index 39562eca0fefc..7233af2e0e740 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java @@ -76,7 +76,7 @@ public void testDeleteByQueryWithWorkers() throws Exception { private void testCase(AbstractBulkByScrollRequestBuilder request, String actionName) throws Exception { logger.info("Starting test for [{}] with [{}] slices", actionName, request.request().getSlices()); /* Add ten documents per slice so most slices will have many documents to process, having to go to multiple batches. - * we can't rely on all of them doing so, but + * We can't rely on the slices being evenly sized but 10 means we have some pretty big slices. */ createIndex("test"); @@ -168,6 +168,8 @@ private void testCase(AbstractBulkByScrollRequestBuilder request, String a // Now the response should come back quickly because we've rethrottled the request BulkByScrollResponse response = responseListener.get(); + + // It'd be bad if the entire require completed in a single batch. The test wouldn't be testing anything. assertThat("Entire request completed in a single batch. This may invalidate the test as throttling is done between batches.", response.getBatches(), greaterThanOrEqualTo(numSlices)); } @@ -187,8 +189,9 @@ private ListTasksResponse rethrottleTask(TaskId taskToRethrottle, float newReque assertThat(rethrottleResponse.getTasks(), hasSize(1)); response.set(rethrottleResponse); } catch (ElasticsearchException e) { - // if it's the error we're expecting, rethrow as AssertionError so awaitBusy doesn't exit early if (e.getCause() instanceof IllegalArgumentException) { + // We want to retry in this case so we throw an assertion error + logger.info("caught unprepared task, retrying until prepared"); throw new AssertionError("Rethrottle request for task [" + taskToRethrottle.getId() + "] failed", e); } else { throw e; @@ -204,14 +207,32 @@ private TaskGroup findTaskToRethrottle(String actionName, int sliceCount) { do { ListTasksResponse tasks = client().admin().cluster().prepareListTasks().setActions(actionName).setDetailed(true).get(); tasks.rethrowFailures("Finding tasks to rethrottle"); - assertThat(tasks.getTaskGroups(), hasSize(lessThan(2))); + assertThat("tasks are left over from the last execution of this test", + tasks.getTaskGroups(), hasSize(lessThan(2))); if (0 == tasks.getTaskGroups().size()) { + // The parent task hasn't started yet continue; } TaskGroup taskGroup = tasks.getTaskGroups().get(0); - if (sliceCount != 1 && taskGroup.getChildTasks().size() == 0) { - // If there are child tasks wait for at least one to start - continue; + if (sliceCount != 1) { + BulkByScrollTask.Status status = (BulkByScrollTask.Status) taskGroup.getTaskInfo().getStatus(); + /* + * If there are child tasks wait for all of them to start. It + * is possible that we'll end up with some very small slices + * (maybe even empty!) that complete super fast so we have to + * count them too. + */ + long finishedChildStatuses = status.getSliceStatuses().stream() + .filter(n -> n != null) + .count(); + logger.info("Expected [{}] total children, [{}] are running and [{}] are finished\n{}", + sliceCount, taskGroup.getChildTasks().size(), finishedChildStatuses, status.getSliceStatuses()); + if (sliceCount == finishedChildStatuses) { + fail("all slices finished:\n" + status); + } + if (sliceCount != taskGroup.getChildTasks().size() + finishedChildStatuses) { + continue; + } } return taskGroup; } while (System.nanoTime() - start < TimeUnit.SECONDS.toNanos(10)); diff --git a/qa/vagrant/src/test/resources/packaging/utils/plugins.bash b/qa/vagrant/src/test/resources/packaging/utils/plugins.bash index 4d7e100ba9f9e..eda3038ee93d3 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/plugins.bash @@ -30,7 +30,7 @@ # specific language governing permissions and limitations # under the License. -# Install a plugin an run all the common post installation tests. +# Install a plugin install_plugin() { local name=$1 local path="$2" @@ -52,8 +52,6 @@ install_plugin() { sudo -E -u $ESPLUGIN_COMMAND_USER bash -c "umask $umask && \"$ESHOME/bin/elasticsearch-plugin\" install -batch \"file://$path\"" fi - assert_file_exist "$ESPLUGINS/$name" - assert_file_exist "$ESPLUGINS/$name/plugin-descriptor.properties" #check we did not accidentially create a log file as root as /usr/share/elasticsearch assert_file_not_exist "/usr/share/elasticsearch/logs" @@ -66,13 +64,6 @@ install_plugin() { fi } -install_jvm_plugin() { - local name=$1 - local path="$2" - install_plugin $name "$path" $3 - assert_file_exist "$ESPLUGINS/$name/$name"*".jar" -} - # Remove a plugin and make sure its plugin directory is removed. remove_plugin() { local name=$1 @@ -95,7 +86,7 @@ remove_plugin() { # placements for non-site plugins. install_jvm_example() { local relativePath=${1:-$(readlink -m jvm-example-*.zip)} - install_jvm_plugin jvm-example "$relativePath" $2 + install_plugin jvm-example "$relativePath" $2 bin_user=$(find "$ESHOME/bin" -maxdepth 0 -printf "%u") bin_owner=$(find "$ESHOME/bin" -maxdepth 0 -printf "%g") @@ -156,9 +147,11 @@ install_and_check_plugin() { local full_name="$prefix-$name" fi - install_jvm_plugin $full_name "$(readlink -m $full_name-*.zip)" + install_plugin $full_name "$(readlink -m $full_name-*.zip)" assert_module_or_plugin_directory "$ESPLUGINS/$full_name" + assert_file_exist "$ESPLUGINS/$full_name/plugin-descriptor.properties" + assert_file_exist "$ESPLUGINS/$full_name/$full_name"*".jar" # analysis plugins have a corresponding analyzers jar if [ $prefix == 'analysis' ]; then @@ -176,6 +169,17 @@ install_and_check_plugin() { done } +# Install a meta plugin +# $1 - the plugin name +# $@ - all remaining arguments are jars that must exist in the plugin's +# installation directory +install_meta_plugin() { + local name=$1 + + install_plugin $name "$(readlink -m $name-*.zip)" + assert_module_or_plugin_directory "$ESPLUGINS/$name" +} + # Compare a list of plugin names to the plugins in the plugins pom and see if they are the same # $1 the file containing the list of plugins we want to compare to # $2 description of the source of the plugin list diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index 755e77d10dd1b..b8c89517ec119 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -7,6 +7,8 @@ setup: mappings: doc: properties: + date: + type: date keyword: type: keyword long: @@ -40,6 +42,20 @@ setup: id: 4 body: { "keyword": "bar", "long": [1000, 0] } + - do: + index: + index: test + type: doc + id: 5 + body: { "date": "2017-10-20T03:08:45" } + + - do: + index: + index: test + type: doc + id: 6 + body: { "date": "2017-10-21T07:00:00" } + - do: indices.refresh: index: [test] @@ -66,7 +82,7 @@ setup: } ] - - match: {hits.total: 4} + - match: {hits.total: 6} - length: { aggregations.test.buckets: 2 } - match: { aggregations.test.buckets.0.key.kw: "bar" } - match: { aggregations.test.buckets.0.doc_count: 3 } @@ -104,7 +120,7 @@ setup: } ] - - match: {hits.total: 4} + - match: {hits.total: 6} - length: { aggregations.test.buckets: 5 } - match: { aggregations.test.buckets.0.key.long: 0} - match: { aggregations.test.buckets.0.key.kw: "bar" } @@ -154,7 +170,7 @@ setup: ] after: { "long": 20, "kw": "foo" } - - match: {hits.total: 4} + - match: {hits.total: 6} - length: { aggregations.test.buckets: 2 } - match: { aggregations.test.buckets.0.key.long: 100 } - match: { aggregations.test.buckets.0.key.kw: "bar" } @@ -188,7 +204,7 @@ setup: ] after: { "kw": "delta" } - - match: {hits.total: 4} + - match: {hits.total: 6} - length: { aggregations.test.buckets: 1 } - match: { aggregations.test.buckets.0.key.kw: "foo" } - match: { aggregations.test.buckets.0.doc_count: 2 } @@ -220,3 +236,90 @@ setup: } } ] + +--- +"Composite aggregation with format": + - skip: + version: " - 6.2.99" + reason: this uses a new option (format) added in 6.3.0 + + - do: + search: + index: test + body: + aggregations: + test: + composite: + sources: [ + { + "date": { + "date_histogram": { + "field": "date", + "interval": "1d", + "format": "yyyy-MM-dd" + } + } + } + ] + + - match: {hits.total: 6} + - length: { aggregations.test.buckets: 2 } + - match: { aggregations.test.buckets.0.key.date: "2017-10-20" } + - match: { aggregations.test.buckets.0.doc_count: 1 } + - match: { aggregations.test.buckets.1.key.date: "2017-10-21" } + - match: { aggregations.test.buckets.1.doc_count: 1 } + + - do: + search: + index: test + body: + aggregations: + test: + composite: + after: { + date: "2017-10-20" + } + sources: [ + { + "date": { + "date_histogram": { + "field": "date", + "interval": "1d", + "format": "yyyy-MM-dd" + } + } + } + ] + + - match: {hits.total: 6} + - length: { aggregations.test.buckets: 1 } + - match: { aggregations.test.buckets.0.key.date: "2017-10-21" } + - match: { aggregations.test.buckets.0.doc_count: 1 } + +--- +"Composite aggregation with after_key in the response": + - skip: + version: " - 6.2.99" + reason: starting in 6.3.0 after_key is returned in the response + + - do: + search: + index: test + body: + aggregations: + test: + composite: + sources: [ + { + "keyword": { + "terms": { + "field": "keyword", + } + } + } + ] + + - match: {hits.total: 6} + - length: { aggregations.test.buckets: 2 } + - length: { aggregations.test.after_key: 1 } + - match: { aggregations.test.after_key.keyword: "foo" } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 8c11377523387..3b4476574b2bc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -440,7 +440,9 @@ public boolean updateAllTypes() { return updateAllTypes; } - /** See {@link #updateAllTypes()} */ + /** See {@link #updateAllTypes()} + * @deprecated useless with 6.x indices which may only have one type */ + @Deprecated public CreateIndexRequest updateAllTypes(boolean updateAllTypes) { this.updateAllTypes = updateAllTypes; return this; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java index fabe269124e9e..0d0dd7eb3c09e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -239,7 +239,9 @@ public CreateIndexRequestBuilder setSource(XContentBuilder source) { return this; } - /** True if all fields that span multiple types should be updated, false otherwise */ + /** True if all fields that span multiple types should be updated, false otherwise + * @deprecated useless with 6.x indices which may only have one type */ + @Deprecated public CreateIndexRequestBuilder setUpdateAllTypes(boolean updateAllTypes) { request.updateAllTypes(updateAllTypes); return this; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index 01a61cab7854e..62c1ad68578c1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -296,7 +296,11 @@ public boolean updateAllTypes() { return updateAllTypes; } - /** See {@link #updateAllTypes()} */ + /** + * True if all fields that span multiple types should be updated, false otherwise. + * @deprecated useless with 6.x indices which may only have one type + */ + @Deprecated public PutMappingRequest updateAllTypes(boolean updateAllTypes) { this.updateAllTypes = updateAllTypes; return this; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java index 43bfe78c4871b..e6fcd841130ef 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java @@ -98,7 +98,9 @@ public PutMappingRequestBuilder setSource(Object... source) { return this; } - /** True if all fields that span multiple types should be updated, false otherwise */ + /** True if all fields that span multiple types should be updated, false otherwise + * @deprecated useless with 6.x indices which may only have one type */ + @Deprecated public PutMappingRequestBuilder setUpdateAllTypes(boolean updateAllTypes) { request.updateAllTypes(updateAllTypes); return this; diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index e2f4d7697b62d..c3c6de5355af4 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -194,6 +194,16 @@ public synchronized void addSettingsUpdateConsumer(Setting setting, Consu addSettingsUpdater(setting.newUpdater(consumer, logger, validator)); } + /** + * Adds a settings consumer that is only executed if any setting in the supplied list of settings is changed. In that case all the + * settings are specified in the argument are returned. + * + * Also automatically adds empty consumers for all settings in order to activate logging + */ + public synchronized void addSettingsUpdateConsumer(Consumer consumer, List> settings) { + addSettingsUpdater(Setting.groupedSettingsUpdater(consumer, logger, settings)); + } + /** * Adds a settings consumer for affix settings. Affix settings have a namespace associated to it that needs to be available to the * consumer in order to be processed correctly. diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index fd91a8a7601c6..f7f67e424cc8d 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -509,10 +509,10 @@ public Tuple getValue(Settings current, Settings previous) { @Override public void apply(Tuple value, Settings current, Settings previous) { if (aSettingUpdater.hasChanged(current, previous)) { - logger.info("updating [{}] from [{}] to [{}]", aSetting.key, aSetting.getRaw(previous), aSetting.getRaw(current)); + logSettingUpdate(aSetting, current, previous, logger); } if (bSettingUpdater.hasChanged(current, previous)) { - logger.info("updating [{}] from [{}] to [{}]", bSetting.key, bSetting.getRaw(previous), bSetting.getRaw(current)); + logSettingUpdate(bSetting, current, previous, logger); } consumer.accept(value.v1(), value.v2()); } @@ -524,6 +524,46 @@ public String toString() { }; } + static AbstractScopedSettings.SettingUpdater groupedSettingsUpdater(Consumer consumer, Logger logger, + final List> configuredSettings) { + + return new AbstractScopedSettings.SettingUpdater() { + + private Settings get(Settings settings) { + return settings.filter(s -> { + for (Setting setting : configuredSettings) { + if (setting.key.match(s)) { + return true; + } + } + return false; + }); + } + + @Override + public boolean hasChanged(Settings current, Settings previous) { + Settings currentSettings = get(current); + Settings previousSettings = get(previous); + return currentSettings.equals(previousSettings) == false; + } + + @Override + public Settings getValue(Settings current, Settings previous) { + return get(current); + } + + @Override + public void apply(Settings value, Settings current, Settings previous) { + consumer.accept(value); + } + + @Override + public String toString() { + return "Updater grouped: " + configuredSettings.stream().map(Setting::getKey).collect(Collectors.joining(", ")); + } + }; + } + public static class AffixSetting extends Setting { private final AffixKey key; private final Function> delegateFactory; @@ -541,7 +581,7 @@ boolean isGroupSetting() { } private Stream matchStream(Settings settings) { - return settings.keySet().stream().filter((key) -> match(key)).map(settingKey -> key.getConcreteString(settingKey)); + return settings.keySet().stream().filter(this::match).map(key::getConcreteString); } public Set getSettingsDependencies(String settingsKey) { @@ -812,9 +852,7 @@ public Settings getValue(Settings current, Settings previous) { @Override public void apply(Settings value, Settings current, Settings previous) { - if (logger.isInfoEnabled()) { // getRaw can create quite some objects - logger.info("updating [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); - } + Setting.logSettingUpdate(GroupSetting.this, current, previous, logger); consumer.accept(value); } @@ -902,7 +940,7 @@ public T getValue(Settings current, Settings previous) { @Override public void apply(T value, Settings current, Settings previous) { - logger.info("updating [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); + logSettingUpdate(Setting.this, current, previous, logger); consumer.accept(value); } } @@ -1138,6 +1176,16 @@ private static String arrayToParsableString(List array) { } } + static void logSettingUpdate(Setting setting, Settings current, Settings previous, Logger logger) { + if (logger.isInfoEnabled()) { + if (setting.isFiltered()) { + logger.info("updating [{}]", setting.key); + } else { + logger.info("updating [{}] from [{}] to [{}]", setting.key, setting.getRaw(previous), setting.getRaw(current)); + } + } + } + public static Setting groupSetting(String key, Property... properties) { return groupSetting(key, (s) -> {}, properties); } @@ -1308,8 +1356,8 @@ public static final class AffixKey implements Key { if (suffix == null) { pattern = Pattern.compile("(" + Pattern.quote(prefix) + "((?:[-\\w]+[.])*[-\\w]+$))"); } else { - // the last part of this regexp is for lists since they are represented as x.${namespace}.y.1, x.${namespace}.y.2 - pattern = Pattern.compile("(" + Pattern.quote(prefix) + "([-\\w]+)\\." + Pattern.quote(suffix) + ")(?:\\.\\d+)?"); + // the last part of this regexp is to support both list and group keys + pattern = Pattern.compile("(" + Pattern.quote(prefix) + "([-\\w]+)\\." + Pattern.quote(suffix) + ")(?:\\..*)?"); } } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ReleasableLock.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ReleasableLock.java index 9c90b3bbde313..9cc5cf7bd8188 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ReleasableLock.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ReleasableLock.java @@ -74,7 +74,7 @@ private boolean removeCurrentThread() { return true; } - public Boolean isHeldByCurrentThread() { + public boolean isHeldByCurrentThread() { if (holdingThreads == null) { throw new UnsupportedOperationException("asserts must be enabled"); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index b11ecacd977aa..8d7737344a1f4 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1310,8 +1310,11 @@ public void openIndexAndCreateTranslog(boolean forceNewHistoryUUID, long globalC assert commitInfo.localCheckpoint >= globalCheckpoint : "trying to create a shard whose local checkpoint [" + commitInfo.localCheckpoint + "] is < global checkpoint [" + globalCheckpoint + "]"; - final List existingCommits = DirectoryReader.listCommits(store.directory()); - assert existingCommits.size() == 1 : "Open index create translog should have one commit, commits[" + existingCommits + "]"; + // This assertion is only guaranteed if all nodes are on 6.2+. + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_2_0)) { + final List existingCommits = DirectoryReader.listCommits(store.directory()); + assert existingCommits.size() == 1 : "Open index create translog should have one commit, commits[" + existingCommits + "]"; + } } globalCheckpointTracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "opening index with a new translog"); innerOpenEngineAndTranslog(EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG, forceNewHistoryUUID); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java index 6a741fd3951d3..77c397f98556e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -23,6 +23,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -33,6 +35,9 @@ import java.io.IOException; public class RestCreateIndexAction extends BaseRestHandler { + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(RestCreateIndexAction.class)); + public RestCreateIndexAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(RestRequest.Method.PUT, "/{index}", this); @@ -49,6 +54,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC if (request.hasContent()) { createIndexRequest.source(request.content(), request.getXContentType()); } + if (request.hasParam("update_all_types")) { + DEPRECATION_LOGGER.deprecated("[update_all_types] is deprecated since indices may not have more than one type anymore"); + } createIndexRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false)); createIndexRequest.timeout(request.paramAsTime("timeout", createIndexRequest.timeout())); createIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createIndexRequest.masterNodeTimeout())); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index 8d7e4a9e6c836..473d8ba005436 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -23,6 +23,8 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -36,6 +38,9 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutMappingAction extends BaseRestHandler { + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(RestPutMappingAction.class)); + public RestPutMappingAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(PUT, "/{index}/_mapping/", this); @@ -70,6 +75,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); putMappingRequest.type(request.param("type")); putMappingRequest.source(request.requiredContent(), request.getXContentType()); + if (request.hasParam("update_all_types")) { + DEPRECATION_LOGGER.deprecated("[update_all_types] is deprecated since indices may not have more than one type anymore"); + } putMappingRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false)); putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout())); putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout())); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregation.java index 9a22b2e378140..8147f94487f9b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregation.java @@ -52,6 +52,9 @@ static XContentBuilder bucketToXContent(CompositeAggregation.Bucket bucket, } static XContentBuilder toXContentFragment(CompositeAggregation aggregation, XContentBuilder builder, Params params) throws IOException { + if (aggregation.afterKey() != null) { + buildCompositeMap("after_key", aggregation.afterKey(), builder); + } builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (CompositeAggregation.Bucket bucket : aggregation.getBuckets()) { bucketToXContent(bucket, builder, params); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index 5b36063e17ac0..58a15bbb36684 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -147,17 +147,15 @@ protected AggregatorFactory doBuild(SearchContext context, AggregatorFactory< Sort sort = indexSortConfig.buildIndexSort(shardContext::fieldMapper, shardContext::getForField); System.arraycopy(sort.getSort(), 0, sortFields, 0, sortFields.length); } - List sourceNames = new ArrayList<>(); for (int i = 0; i < configs.length; i++) { configs[i] = sources.get(i).build(context, i, configs.length, sortFields[i]); - sourceNames.add(sources.get(i).name()); if (configs[i].valuesSource().needsScores()) { throw new IllegalArgumentException("[sources] cannot access _score"); } } final CompositeKey afterKey; if (after != null) { - if (after.size() != sources.size()) { + if (after.size() != configs.length) { throw new IllegalArgumentException("[after] has " + after.size() + " value(s) but [sources] has " + sources.size()); } @@ -179,7 +177,7 @@ protected AggregatorFactory doBuild(SearchContext context, AggregatorFactory< } else { afterKey = null; } - return new CompositeAggregationFactory(name, context, parent, subfactoriesBuilder, metaData, size, configs, sourceNames, afterKey); + return new CompositeAggregationFactory(name, context, parent, subfactoriesBuilder, metaData, size, configs, afterKey); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationFactory.java index c0aeb5304a580..2b2fa4fb7e3eb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationFactory.java @@ -32,17 +32,14 @@ class CompositeAggregationFactory extends AggregatorFactory { private final int size; private final CompositeValuesSourceConfig[] sources; - private final List sourceNames; private final CompositeKey afterKey; CompositeAggregationFactory(String name, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData, - int size, CompositeValuesSourceConfig[] sources, - List sourceNames, CompositeKey afterKey) throws IOException { + int size, CompositeValuesSourceConfig[] sources, CompositeKey afterKey) throws IOException { super(name, context, parent, subFactoriesBuilder, metaData); this.size = size; this.sources = sources; - this.sourceNames = sourceNames; this.afterKey = afterKey; } @@ -50,6 +47,6 @@ class CompositeAggregationFactory extends AggregatorFactory pipelineAggregators, Map metaData) throws IOException { return new CompositeAggregator(name, factories, context, parent, pipelineAggregators, metaData, - size, sources, sourceNames, afterKey); + size, sources, afterKey); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 3467aaf318baf..830aba3bcf1e1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.RoaringDocIdSet; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -43,11 +44,13 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; +import java.util.stream.Collectors; final class CompositeAggregator extends BucketsAggregator { private final int size; private final CompositeValuesSourceConfig[] sources; private final List sourceNames; + private final List formats; private final boolean canEarlyTerminate; private final TreeMap keys; @@ -59,12 +62,12 @@ final class CompositeAggregator extends BucketsAggregator { CompositeAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData, - int size, CompositeValuesSourceConfig[] sources, List sourceNames, - CompositeKey rawAfterKey) throws IOException { + int size, CompositeValuesSourceConfig[] sources, CompositeKey rawAfterKey) throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); this.size = size; this.sources = sources; - this.sourceNames = sourceNames; + this.sourceNames = Arrays.stream(sources).map(CompositeValuesSourceConfig::name).collect(Collectors.toList()); + this.formats = Arrays.stream(sources).map(CompositeValuesSourceConfig::format).collect(Collectors.toList()); // we use slot 0 to fill the current document (size+1). this.array = new CompositeValuesComparator(context.searcher().getIndexReader(), sources, size+1); if (rawAfterKey != null) { @@ -131,15 +134,18 @@ public InternalAggregation buildAggregation(long zeroBucket) throws IOException CompositeKey key = array.toCompositeKey(slot); InternalAggregations aggs = bucketAggregations(slot); int docCount = bucketDocCount(slot); - buckets[pos++] = new InternalComposite.InternalBucket(sourceNames, key, reverseMuls, docCount, aggs); + buckets[pos++] = new InternalComposite.InternalBucket(sourceNames, formats, key, reverseMuls, docCount, aggs); } - return new InternalComposite(name, size, sourceNames, Arrays.asList(buckets), reverseMuls, pipelineAggregators(), metaData()); + CompositeKey lastBucket = num > 0 ? buckets[num-1].getRawKey() : null; + return new InternalComposite(name, size, sourceNames, formats, Arrays.asList(buckets), lastBucket, reverseMuls, + pipelineAggregators(), metaData()); } @Override public InternalAggregation buildEmptyAggregation() { final int[] reverseMuls = getReverseMuls(); - return new InternalComposite(name, size, sourceNames, Collections.emptyList(), reverseMuls, pipelineAggregators(), metaData()); + return new InternalComposite(name, size, sourceNames, formats, Collections.emptyList(), null, reverseMuls, + pipelineAggregators(), metaData()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java index 6f3aacc9f8250..51c5a7c5a887f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java @@ -19,18 +19,38 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; import java.util.Arrays; /** * A key that is composed of multiple {@link Comparable} values. */ -class CompositeKey { +class CompositeKey implements Writeable { private final Comparable[] values; CompositeKey(Comparable... values) { this.values = values; } + CompositeKey(StreamInput in) throws IOException { + values = new Comparable[in.readVInt()]; + for (int i = 0; i < values.length; i++) { + values[i] = (Comparable) in.readGenericValue(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(values.length); + for (int i = 0; i < values.length; i++) { + out.writeGenericValue(values[i]); + } + } + Comparable[] values() { return values; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesComparator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesComparator.java index 849fe2c513e9b..0ce87460a5429 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesComparator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesComparator.java @@ -56,7 +56,7 @@ final class CompositeValuesComparator { if (vs.isFloatingPoint()) { arrays[i] = CompositeValuesSource.wrapDouble(vs, size, reverseMul); } else { - arrays[i] = CompositeValuesSource.wrapLong(vs, size, reverseMul); + arrays[i] = CompositeValuesSource.wrapLong(vs, sources[i].format(), size, reverseMul); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSource.java index 88d54744777e0..2d0368dfd4d28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSource.java @@ -23,8 +23,10 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.sort.SortOrder; @@ -96,8 +98,9 @@ interface Collector { /** * Creates a {@link CompositeValuesSource} that generates long values. */ - static CompositeValuesSource wrapLong(ValuesSource.Numeric vs, int size, int reverseMul) { - return new LongValuesSource(vs, size, reverseMul); + static CompositeValuesSource wrapLong(ValuesSource.Numeric vs, DocValueFormat format, + int size, int reverseMul) { + return new LongValuesSource(vs, format, size, reverseMul); } /** @@ -273,9 +276,12 @@ Collector getLeafCollector(LeafReaderContext context, Collector next) throws IOE */ private static class LongValuesSource extends CompositeValuesSource { private final long[] values; + // handles "format" for date histogram source + private final DocValueFormat format; - LongValuesSource(ValuesSource.Numeric vs, int size, int reverseMul) { + LongValuesSource(ValuesSource.Numeric vs, DocValueFormat format, int size, int reverseMul) { super(vs, size, reverseMul); + this.format = format; this.values = new long[size]; } @@ -304,7 +310,11 @@ void setTop(Comparable value) { if (value instanceof Number) { topValue = ((Number) value).longValue(); } else { - topValue = Long.parseLong(value.toString()); + // for date histogram source with "format", the after value is formatted + // as a string so we need to retrieve the original value in milliseconds. + topValue = format.parseLong(value.toString(), false, () -> { + throw new IllegalArgumentException("now() is not supported in [after] key"); + }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java index 2652d90f8c3e7..2e06d7c9fe30b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.SortField; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -51,6 +52,7 @@ public abstract class CompositeValuesSourceBuilder config = ValuesSourceConfig.resolve(context.getQueryShardContext(), - valueType, field, script, missing, null, null); + valueType, field, script, missing, null, format); return innerBuild(context, config, pos, numPos, sortField); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java index 4d5c1c8c84683..ee70d3f39a550 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java @@ -19,30 +19,47 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.sort.SortOrder; class CompositeValuesSourceConfig { private final String name; private final ValuesSource vs; + private final DocValueFormat format; private final int reverseMul; private final boolean canEarlyTerminate; - CompositeValuesSourceConfig(String name, ValuesSource vs, SortOrder order, boolean canEarlyTerminate) { + CompositeValuesSourceConfig(String name, ValuesSource vs, DocValueFormat format, SortOrder order, boolean canEarlyTerminate) { this.name = name; this.vs = vs; + this.format = format; this.canEarlyTerminate = canEarlyTerminate; this.reverseMul = order == SortOrder.ASC ? 1 : -1; } + /** + * Returns the name associated with this configuration. + */ String name() { return name; } + /** + * Returns the {@link ValuesSource} for this configuration. + */ ValuesSource valuesSource() { return vs; } + /** + * The {@link DocValueFormat} to use for formatting the keys. + * {@link DocValueFormat#RAW} means no formatting. + */ + DocValueFormat format() { + return format; + } + /** * The sort order for the values source (e.g. -1 for descending and 1 for ascending). */ @@ -51,6 +68,9 @@ int reverseMul() { return reverseMul; } + /** + * Returns whether this {@link ValuesSource} is used to sort the index. + */ boolean canEarlyTerminate() { return canEarlyTerminate; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index 0094da5069fd7..b7abf82a58ea3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -30,6 +30,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.support.FieldContext; @@ -46,8 +48,8 @@ import static org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder.DATE_FIELD_UNITS; /** - * A {@link CompositeValuesSourceBuilder} that that builds a {@link RoundingValuesSource} from a {@link Script} or - * a field name. + * A {@link CompositeValuesSourceBuilder} that builds a {@link RoundingValuesSource} from a {@link Script} or + * a field name using the provided interval. */ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuilder { static final String TYPE = "date_histogram"; @@ -55,6 +57,7 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild private static final ObjectParser PARSER; static { PARSER = new ObjectParser<>(DateHistogramValuesSourceBuilder.TYPE); + PARSER.declareString(DateHistogramValuesSourceBuilder::format, new ParseField("format")); PARSER.declareField((histogram, interval) -> { if (interval instanceof Long) { histogram.interval((long) interval); @@ -235,7 +238,11 @@ protected CompositeValuesSourceConfig innerBuild(SearchContext context, canEarlyTerminate = checkCanEarlyTerminate(context.searcher().getIndexReader(), fieldContext.field(), order() == SortOrder.ASC ? false : true, sortField); } - return new CompositeValuesSourceConfig(name, vs, order(), canEarlyTerminate); + // dates are returned as timestamp in milliseconds-since-the-epoch unless a specific date format + // is specified in the builder. + final DocValueFormat docValueFormat = format() == null ? DocValueFormat.RAW : config.format(); + return new CompositeValuesSourceConfig(name, vs, docValueFormat, + order(), canEarlyTerminate); } else { throw new IllegalArgumentException("invalid source, expected numeric, got " + orig.getClass().getSimpleName()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java index dd5eb1b52d04c..83ada5dbbc3c3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java @@ -37,7 +37,7 @@ import java.util.Objects; /** - * A {@link CompositeValuesSourceBuilder} that that builds a {@link HistogramValuesSource} from another numeric values source + * A {@link CompositeValuesSourceBuilder} that builds a {@link HistogramValuesSource} from another numeric values source * using the provided interval. */ public class HistogramValuesSourceBuilder extends CompositeValuesSourceBuilder { @@ -128,7 +128,7 @@ protected CompositeValuesSourceConfig innerBuild(SearchContext context, canEarlyTerminate = checkCanEarlyTerminate(context.searcher().getIndexReader(), fieldContext.field(), order() == SortOrder.ASC ? false : true, sortField); } - return new CompositeValuesSourceConfig(name, vs, order(), canEarlyTerminate); + return new CompositeValuesSourceConfig(name, vs, config.format(), order(), canEarlyTerminate); } else { throw new IllegalArgumentException("invalid source, expected numeric, got " + orig.getClass().getSimpleName()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 824250948d740..c9cb320d80d99 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -20,9 +20,11 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -47,14 +49,19 @@ public class InternalComposite private final int size; private final List buckets; + private final CompositeKey afterKey; private final int[] reverseMuls; private final List sourceNames; + private final List formats; - InternalComposite(String name, int size, List sourceNames, List buckets, int[] reverseMuls, + InternalComposite(String name, int size, List sourceNames, List formats, + List buckets, CompositeKey afterKey, int[] reverseMuls, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); this.sourceNames = sourceNames; + this.formats = formats; this.buckets = buckets; + this.afterKey = afterKey; this.size = size; this.reverseMuls = reverseMuls; } @@ -63,16 +70,40 @@ public InternalComposite(StreamInput in) throws IOException { super(in); this.size = in.readVInt(); this.sourceNames = in.readList(StreamInput::readString); + this.formats = new ArrayList<>(sourceNames.size()); + for (int i = 0; i < sourceNames.size(); i++) { + if (in.getVersion().onOrAfter(Version.V_6_3_0)) { + formats.add(in.readNamedWriteable(DocValueFormat.class)); + } else { + formats.add(DocValueFormat.RAW); + } + } this.reverseMuls = in.readIntArray(); - this.buckets = in.readList((input) -> new InternalBucket(input, sourceNames, reverseMuls)); + this.buckets = in.readList((input) -> new InternalBucket(input, sourceNames, formats, reverseMuls)); + if (in.getVersion().onOrAfter(Version.V_6_3_0)) { + this.afterKey = in.readBoolean() ? new CompositeKey(in) : null; + } else { + this.afterKey = buckets.size() > 0 ? buckets.get(buckets.size()-1).key : null; + } } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeVInt(size); out.writeStringList(sourceNames); + if (out.getVersion().onOrAfter(Version.V_6_3_0)) { + for (DocValueFormat format : formats) { + out.writeNamedWriteable(format); + } + } out.writeIntArray(reverseMuls); out.writeList(buckets); + if (out.getVersion().onOrAfter(Version.V_6_3_0)) { + out.writeBoolean(afterKey != null); + if (afterKey != null) { + afterKey.writeTo(out); + } + } } @Override @@ -86,13 +117,20 @@ public String getWriteableName() { } @Override - public InternalComposite create(List buckets) { - return new InternalComposite(name, size, sourceNames, buckets, reverseMuls, pipelineAggregators(), getMetaData()); + public InternalComposite create(List newBuckets) { + /** + * This is used by pipeline aggregations to filter/remove buckets so we + * keep the afterKey of the original aggregation in order + * to be able to retrieve the next page even if all buckets have been filtered. + */ + return new InternalComposite(name, size, sourceNames, formats, newBuckets, afterKey, + reverseMuls, pipelineAggregators(), getMetaData()); } @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.sourceNames, prototype.key, prototype.reverseMuls, prototype.docCount, aggregations); + return new InternalBucket(prototype.sourceNames, prototype.formats, prototype.key, prototype.reverseMuls, + prototype.docCount, aggregations); } public int getSize() { @@ -106,7 +144,10 @@ public List getBuckets() { @Override public Map afterKey() { - return buckets.size() > 0 ? buckets.get(buckets.size()-1).getKey() : null; + if (afterKey != null) { + return new ArrayMap(sourceNames, formats, afterKey.values()); + } + return null; } // Visible for tests @@ -149,7 +190,8 @@ public InternalAggregation doReduce(List aggregations, Redu reduceContext.consumeBucketsAndMaybeBreak(1); result.add(reduceBucket); } - return new InternalComposite(name, size, sourceNames, result, reverseMuls, pipelineAggregators(), metaData); + final CompositeKey lastKey = result.size() > 0 ? result.get(result.size()-1).getRawKey() : null; + return new InternalComposite(name, size, sourceNames, formats, result, lastKey, reverseMuls, pipelineAggregators(), metaData); } @Override @@ -157,12 +199,13 @@ protected boolean doEquals(Object obj) { InternalComposite that = (InternalComposite) obj; return Objects.equals(size, that.size) && Objects.equals(buckets, that.buckets) && + Objects.equals(afterKey, that.afterKey) && Arrays.equals(reverseMuls, that.reverseMuls); } @Override protected int doHashCode() { - return Objects.hash(size, buckets, Arrays.hashCode(reverseMuls)); + return Objects.hash(size, buckets, afterKey, Arrays.hashCode(reverseMuls)); } private static class BucketIterator implements Comparable { @@ -191,35 +234,32 @@ static class InternalBucket extends InternalMultiBucketAggregation.InternalBucke private final InternalAggregations aggregations; private final transient int[] reverseMuls; private final transient List sourceNames; + private final transient List formats; - InternalBucket(List sourceNames, CompositeKey key, int[] reverseMuls, long docCount, InternalAggregations aggregations) { + InternalBucket(List sourceNames, List formats, CompositeKey key, int[] reverseMuls, long docCount, + InternalAggregations aggregations) { this.key = key; this.docCount = docCount; this.aggregations = aggregations; this.reverseMuls = reverseMuls; this.sourceNames = sourceNames; + this.formats = formats; } @SuppressWarnings("unchecked") - InternalBucket(StreamInput in, List sourceNames, int[] reverseMuls) throws IOException { - final Comparable[] values = new Comparable[in.readVInt()]; - for (int i = 0; i < values.length; i++) { - values[i] = (Comparable) in.readGenericValue(); - } - this.key = new CompositeKey(values); + InternalBucket(StreamInput in, List sourceNames, List formats, int[] reverseMuls) throws IOException { + this.key = new CompositeKey(in); this.docCount = in.readVLong(); this.aggregations = InternalAggregations.readAggregations(in); this.reverseMuls = reverseMuls; this.sourceNames = sourceNames; + this.formats = formats; } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(key.size()); - for (int i = 0; i < key.size(); i++) { - out.writeGenericValue(key.get(i)); - } + key.writeTo(out); out.writeVLong(docCount); aggregations.writeTo(out); } @@ -242,9 +282,11 @@ public boolean equals(Object obj) { @Override public Map getKey() { - return new ArrayMap(sourceNames, key.values()); + // returns the formatted key in a map + return new ArrayMap(sourceNames, formats, key.values()); } + // get the raw key (without formatting to preserve the natural order). // visible for testing CompositeKey getRawKey() { return key; @@ -260,7 +302,7 @@ public String getKeyAsString() { } builder.append(sourceNames.get(i)); builder.append('='); - builder.append(formatObject(key.get(i))); + builder.append(formatObject(key.get(i), formats.get(i))); } builder.append('}'); return builder.toString(); @@ -284,7 +326,7 @@ InternalBucket reduce(List buckets, ReduceContext reduceContext) aggregations.add(bucket.aggregations); } InternalAggregations aggs = InternalAggregations.reduce(aggregations, reduceContext); - return new InternalBucket(sourceNames, key, reverseMuls, docCount, aggs); + return new InternalBucket(sourceNames, formats, key, reverseMuls, docCount, aggs); } @Override @@ -303,26 +345,52 @@ public int compareKey(InternalBucket other) { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { /** - * See {@link CompositeAggregation#bucketToXContentFragment} + * See {@link CompositeAggregation#bucketToXContent} */ throw new UnsupportedOperationException("not implemented"); } } - static Object formatObject(Object obj) { - if (obj instanceof BytesRef) { - return ((BytesRef) obj).utf8ToString(); + /** + * Format obj using the provided {@link DocValueFormat}. + * If the format is equals to {@link DocValueFormat#RAW}, the object is returned as is + * for numbers and a string for {@link BytesRef}s. + */ + static Object formatObject(Object obj, DocValueFormat format) { + if (obj.getClass() == BytesRef.class) { + BytesRef value = (BytesRef) obj; + if (format == DocValueFormat.RAW) { + return value.utf8ToString(); + } else { + return format.format((BytesRef) obj); + } + } else if (obj.getClass() == Long.class) { + Long value = (Long) obj; + if (format == DocValueFormat.RAW) { + return value; + } else { + return format.format(value); + } + } else if (obj.getClass() == Double.class) { + Double value = (Double) obj; + if (format == DocValueFormat.RAW) { + return value; + } else { + return format.format((Double) obj); + } } return obj; } private static class ArrayMap extends AbstractMap { final List keys; + final List formats; final Object[] values; - ArrayMap(List keys, Object[] values) { - assert keys.size() == values.length; + ArrayMap(List keys, List formats, Object[] values) { + assert keys.size() == values.length && keys.size() == formats.size(); this.keys = keys; + this.formats = formats; this.values = values; } @@ -335,7 +403,7 @@ public int size() { public Object get(Object key) { for (int i = 0; i < keys.size(); i++) { if (key.equals(keys.get(i))) { - return formatObject(values[i]); + return formatObject(values[i], formats.get(i)); } } return null; @@ -356,7 +424,7 @@ public boolean hasNext() { @Override public Entry next() { SimpleEntry entry = - new SimpleEntry<>(keys.get(pos), formatObject(values[pos])); + new SimpleEntry<>(keys.get(pos), formatObject(values[pos], formats.get(pos))); ++ pos; return entry; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java index a6c3fd3fb6f08..e7d6f775f1d87 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/ParsedComposite.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -33,15 +34,26 @@ public class ParsedComposite extends ParsedMultiBucketAggregation(ParsedComposite.class.getSimpleName(), true, ParsedComposite::new); static { + PARSER.declareField(ParsedComposite::setAfterKey, (p, c) -> p.mapOrdered(), new ParseField("after_key"), + ObjectParser.ValueType.OBJECT); declareMultiBucketAggregationFields(PARSER, parser -> ParsedComposite.ParsedBucket.fromXContent(parser), parser -> null ); } + private Map afterKey; + public static ParsedComposite fromXContent(XContentParser parser, String name) throws IOException { ParsedComposite aggregation = PARSER.parse(parser, null); aggregation.setName(name); + if (aggregation.afterKey == null && aggregation.getBuckets().size() > 0) { + /** + * Previous versions (< 6.3) don't send afterKey + * in the response so we set it as the last returned buckets. + */ + aggregation.setAfterKey(aggregation.getBuckets().get(aggregation.getBuckets().size()-1).key); + } return aggregation; } @@ -57,9 +69,16 @@ public List getBuckets() { @Override public Map afterKey() { + if (afterKey != null) { + return afterKey; + } return buckets.size() > 0 ? buckets.get(buckets.size()-1).getKey() : null; } + private void setAfterKey(Map afterKey) { + this.afterKey = afterKey; + } + @Override protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { return CompositeAggregation.toXContentFragment(this, builder, params); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java index 481c14a37f504..6ca5cdbcb6230 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java @@ -95,6 +95,6 @@ protected CompositeValuesSourceConfig innerBuild(SearchContext context, canEarlyTerminate = checkCanEarlyTerminate(context.searcher().getIndexReader(), fieldContext.field(), order() == SortOrder.ASC ? false : true, sortField); } - return new CompositeValuesSourceConfig(name, vs, order(), canEarlyTerminate); + return new CompositeValuesSourceConfig(name, vs, config.format(), order(), canEarlyTerminate); } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 4a4beb2e0e3ef..180f11730dfed 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -38,6 +38,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -712,4 +713,79 @@ public void testTimeValue() { assertThat(setting.get(Settings.EMPTY).getMillis(), equalTo(random.getMillis() * factor)); } + public void testSettingsGroupUpdater() { + Setting intSetting = Setting.intSetting("prefix.foo", 1, Property.NodeScope, Property.Dynamic); + Setting intSetting2 = Setting.intSetting("prefix.same", 1, Property.NodeScope, Property.Dynamic); + AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, logger, + Arrays.asList(intSetting, intSetting2)); + + Settings current = Settings.builder().put("prefix.foo", 123).put("prefix.same", 5555).build(); + Settings previous = Settings.builder().put("prefix.foo", 321).put("prefix.same", 5555).build(); + assertTrue(updater.apply(current, previous)); + } + + public void testSettingsGroupUpdaterRemoval() { + Setting intSetting = Setting.intSetting("prefix.foo", 1, Property.NodeScope, Property.Dynamic); + Setting intSetting2 = Setting.intSetting("prefix.same", 1, Property.NodeScope, Property.Dynamic); + AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, logger, + Arrays.asList(intSetting, intSetting2)); + + Settings current = Settings.builder().put("prefix.same", 5555).build(); + Settings previous = Settings.builder().put("prefix.foo", 321).put("prefix.same", 5555).build(); + assertTrue(updater.apply(current, previous)); + } + + public void testSettingsGroupUpdaterWithAffixSetting() { + Setting intSetting = Setting.intSetting("prefix.foo", 1, Property.NodeScope, Property.Dynamic); + Setting.AffixSetting prefixKeySetting = + Setting.prefixKeySetting("prefix.foo.bar.", key -> Setting.simpleString(key, Property.NodeScope, Property.Dynamic)); + Setting.AffixSetting affixSetting = + Setting.affixKeySetting("prefix.foo.", "suffix", key -> Setting.simpleString(key,Property.NodeScope, Property.Dynamic)); + + AbstractScopedSettings.SettingUpdater updater = Setting.groupedSettingsUpdater(s -> {}, logger, + Arrays.asList(intSetting, prefixKeySetting, affixSetting)); + + Settings.Builder currentSettingsBuilder = Settings.builder() + .put("prefix.foo.bar.baz", "foo") + .put("prefix.foo.infix.suffix", "foo"); + Settings.Builder previousSettingsBuilder = Settings.builder() + .put("prefix.foo.bar.baz", "foo") + .put("prefix.foo.infix.suffix", "foo"); + boolean removePrefixKeySetting = randomBoolean(); + boolean changePrefixKeySetting = randomBoolean(); + boolean removeAffixKeySetting = randomBoolean(); + boolean changeAffixKeySetting = randomBoolean(); + boolean removeAffixNamespace = randomBoolean(); + + if (removePrefixKeySetting) { + previousSettingsBuilder.remove("prefix.foo.bar.baz"); + } + if (changePrefixKeySetting) { + currentSettingsBuilder.put("prefix.foo.bar.baz", "bar"); + } + if (removeAffixKeySetting) { + previousSettingsBuilder.remove("prefix.foo.infix.suffix"); + } + if (changeAffixKeySetting) { + currentSettingsBuilder.put("prefix.foo.infix.suffix", "bar"); + } + if (removeAffixKeySetting == false && changeAffixKeySetting == false && removeAffixNamespace) { + currentSettingsBuilder.remove("prefix.foo.infix.suffix"); + currentSettingsBuilder.put("prefix.foo.infix2.suffix", "bar"); + previousSettingsBuilder.put("prefix.foo.infix2.suffix", "bar"); + } + + boolean expectedChange = removeAffixKeySetting || removePrefixKeySetting || changeAffixKeySetting || changePrefixKeySetting + || removeAffixNamespace; + assertThat(updater.apply(currentSettingsBuilder.build(), previousSettingsBuilder.build()), is(expectedChange)); + } + + public void testAffixNamespacesWithGroupSetting() { + final Setting.AffixSetting affixSetting = + Setting.affixKeySetting("prefix.","suffix", + (key) -> Setting.groupSetting(key + ".", Setting.Property.Dynamic, Setting.Property.NodeScope)); + + assertThat(affixSetting.getNamespaces(Settings.builder().put("prefix.infix.suffix", "anything").build()), hasSize(1)); + assertThat(affixSetting.getNamespaces(Settings.builder().put("prefix.infix.suffix.anything", "anything").build()), hasSize(1)); + } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 9e6d4be7095f0..dfece2d9d459c 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -18,16 +18,22 @@ */ package org.elasticsearch.common.settings; -import org.elasticsearch.common.Strings; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.logging.ServerLoggers; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.rest.FakeRestRequest; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; +import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.equalTo; @@ -100,7 +106,43 @@ public void testSettingsFiltering() throws IOException { .build(), "a.b.*.d" ); + } + + public void testFilteredSettingIsNotLogged() throws Exception { + Settings oldSettings = Settings.builder().put("key", "old").build(); + Settings newSettings = Settings.builder().put("key", "new").build(); + + Setting filteredSetting = Setting.simpleString("key", Property.Filtered); + assertExpectedLogMessages((testLogger) -> Setting.logSettingUpdate(filteredSetting, newSettings, oldSettings, testLogger), + new MockLogAppender.SeenEventExpectation("secure logging", "org.elasticsearch.test", Level.INFO, "updating [key]"), + new MockLogAppender.UnseenEventExpectation("unwanted old setting name", "org.elasticsearch.test", Level.INFO, "*old*"), + new MockLogAppender.UnseenEventExpectation("unwanted new setting name", "org.elasticsearch.test", Level.INFO, "*new*") + ); + } + + public void testRegularSettingUpdateIsFullyLogged() throws Exception { + Settings oldSettings = Settings.builder().put("key", "old").build(); + Settings newSettings = Settings.builder().put("key", "new").build(); + + Setting regularSetting = Setting.simpleString("key"); + assertExpectedLogMessages((testLogger) -> Setting.logSettingUpdate(regularSetting, newSettings, oldSettings, testLogger), + new MockLogAppender.SeenEventExpectation("regular logging", "org.elasticsearch.test", Level.INFO, + "updating [key] from [old] to [new]")); + } + private void assertExpectedLogMessages(Consumer consumer, + MockLogAppender.LoggingExpectation ... expectations) throws IllegalAccessException { + Logger testLogger = Loggers.getLogger("org.elasticsearch.test"); + MockLogAppender appender = new MockLogAppender(); + ServerLoggers.addAppender(testLogger, appender); + try { + appender.start(); + Arrays.stream(expectations).forEach(appender::addExpectation); + consumer.accept(testLogger); + appender.assertAllExpectationsMatched(); + } finally { + ServerLoggers.removeAppender(testLogger, appender); + } } private void testFiltering(Settings source, Settings filtered, String... patterns) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 172aebbc0e5dc..094457a8bf4f6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -39,6 +39,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -68,6 +69,9 @@ import java.util.function.Consumer; import java.util.function.Supplier; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + public class CompositeAggregatorTests extends AggregatorTestCase { private static MappedFieldType[] FIELD_TYPES; @@ -125,6 +129,7 @@ public void testWithKeyword() throws Exception { return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); }, (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=d}", result.afterKey().toString()); assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); @@ -142,6 +147,7 @@ public void testWithKeyword() throws Exception { .aggregateAfter(Collections.singletonMap("keyword", "a")); }, (result) -> { assertEquals(2, result.getBuckets().size()); + assertEquals("{keyword=d}", result.afterKey().toString()); assertEquals("{keyword=c}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); @@ -170,6 +176,7 @@ public void testWithKeywordMissingAfter() throws Exception { return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); }, (result) -> { assertEquals(4, result.getBuckets().size()); + assertEquals("{keyword=zoo}", result.afterKey().toString()); assertEquals("{keyword=bar}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=delta}", result.getBuckets().get(1).getKeyAsString()); @@ -189,6 +196,7 @@ public void testWithKeywordMissingAfter() throws Exception { .aggregateAfter(Collections.singletonMap("keyword", "car")); }, (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=zoo}", result.afterKey().toString()); assertEquals("{keyword=delta}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=foo}", result.getBuckets().get(1).getKeyAsString()); @@ -206,6 +214,7 @@ public void testWithKeywordMissingAfter() throws Exception { .aggregateAfter(Collections.singletonMap("keyword", "mar")); }, (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=bar}", result.afterKey().toString()); assertEquals("{keyword=foo}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=delta}", result.getBuckets().get(1).getKeyAsString()); @@ -236,6 +245,7 @@ public void testWithKeywordDesc() throws Exception { return new CompositeAggregationBuilder("name", Collections.singletonList(terms)); }, (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=a}", result.afterKey().toString()); assertEquals("{keyword=a}", result.getBuckets().get(2).getKeyAsString()); assertEquals(2L, result.getBuckets().get(2).getDocCount()); assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); @@ -254,6 +264,8 @@ public void testWithKeywordDesc() throws Exception { .aggregateAfter(Collections.singletonMap("keyword", "c")); }, (result) -> { + assertEquals(result.afterKey().toString(), "{keyword=a}"); + assertEquals("{keyword=a}", result.afterKey().toString()); assertEquals(1, result.getBuckets().size()); assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); @@ -281,6 +293,7 @@ public void testMultiValuedWithKeyword() throws Exception { }, (result) -> { assertEquals(5, result.getBuckets().size()); + assertEquals("{keyword=z}", result.afterKey().toString()); assertEquals("{keyword=a}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=b}", result.getBuckets().get(1).getKeyAsString()); @@ -303,6 +316,7 @@ public void testMultiValuedWithKeyword() throws Exception { }, (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=z}", result.afterKey().toString()); assertEquals("{keyword=c}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); @@ -334,6 +348,7 @@ public void testMultiValuedWithKeywordDesc() throws Exception { }, (result) -> { assertEquals(5, result.getBuckets().size()); + assertEquals("{keyword=a}", result.afterKey().toString()); assertEquals("{keyword=a}", result.getBuckets().get(4).getKeyAsString()); assertEquals(2L, result.getBuckets().get(4).getDocCount()); assertEquals("{keyword=b}", result.getBuckets().get(3).getKeyAsString()); @@ -357,6 +372,7 @@ public void testMultiValuedWithKeywordDesc() throws Exception { }, (result) -> { assertEquals(2, result.getBuckets().size()); + assertEquals("{keyword=a}", result.afterKey().toString()); assertEquals("{keyword=a}", result.getBuckets().get(1).getKeyAsString()); assertEquals(2L, result.getBuckets().get(1).getDocCount()); assertEquals("{keyword=b}", result.getBuckets().get(0).getKeyAsString()); @@ -391,6 +407,7 @@ public void testWithKeywordAndLong() throws Exception { ), (result) -> { assertEquals(4, result.getBuckets().size()); + assertEquals("{keyword=d, long=10}", result.afterKey().toString()); assertEquals("{keyword=a, long=0}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=a, long=100}", result.getBuckets().get(1).getKeyAsString()); @@ -412,6 +429,7 @@ public void testWithKeywordAndLong() throws Exception { ), (result) -> { assertEquals(2, result.getBuckets().size()); + assertEquals("{keyword=d, long=10}", result.afterKey().toString()); assertEquals("{keyword=c, long=100}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=d, long=10}", result.getBuckets().get(1).getKeyAsString()); @@ -447,6 +465,7 @@ public void testWithKeywordAndLongDesc() throws Exception { ), (result) -> { assertEquals(4, result.getBuckets().size()); + assertEquals("{keyword=a, long=0}", result.afterKey().toString()); assertEquals("{keyword=a, long=0}", result.getBuckets().get(3).getKeyAsString()); assertEquals(1L, result.getBuckets().get(3).getDocCount()); assertEquals("{keyword=a, long=100}", result.getBuckets().get(2).getKeyAsString()); @@ -467,6 +486,7 @@ public void testWithKeywordAndLongDesc() throws Exception { )).aggregateAfter(createAfterKey("keyword", "d", "long", 10L) ), (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{keyword=a, long=0}", result.afterKey().toString()); assertEquals("{keyword=a, long=0}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); assertEquals("{keyword=a, long=100}", result.getBuckets().get(1).getKeyAsString()); @@ -499,6 +519,7 @@ public void testMultiValuedWithKeywordAndLong() throws Exception { )) , (result) -> { assertEquals(10, result.getBuckets().size()); + assertEquals("{keyword=z, long=0}", result.afterKey().toString()); assertEquals("{keyword=a, long=0}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=a, long=100}", result.getBuckets().get(1).getKeyAsString()); @@ -532,6 +553,7 @@ public void testMultiValuedWithKeywordAndLong() throws Exception { ).aggregateAfter(createAfterKey("keyword", "c", "long", 10L)) , (result) -> { assertEquals(6, result.getBuckets().size()); + assertEquals("{keyword=z, long=100}", result.afterKey().toString()); assertEquals("{keyword=c, long=100}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=d, long=10}", result.getBuckets().get(1).getKeyAsString()); @@ -573,6 +595,7 @@ public void testMultiValuedWithKeywordAndLongDesc() throws Exception { ), (result) -> { assertEquals(10, result.getBuckets().size()); + assertEquals("{keyword=a, long=0}", result.afterKey().toString()); assertEquals("{keyword=a, long=0}", result.getBuckets().get(9).getKeyAsString()); assertEquals(1L, result.getBuckets().get(9).getDocCount()); assertEquals("{keyword=a, long=100}", result.getBuckets().get(8).getKeyAsString()); @@ -607,6 +630,7 @@ public void testMultiValuedWithKeywordAndLongDesc() throws Exception { ), (result) -> { assertEquals(2, result.getBuckets().size()); + assertEquals("{keyword=a, long=0}", result.afterKey().toString()); assertEquals("{keyword=a, long=0}", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); assertEquals("{keyword=a, long=100}", result.getBuckets().get(0).getKeyAsString()); @@ -640,6 +664,7 @@ public void testMultiValuedWithKeywordLongAndDouble() throws Exception { ) , (result) -> { assertEquals(10, result.getBuckets().size()); + assertEquals("{keyword=c, long=100, double=0.4}", result.afterKey().toString()); assertEquals("{keyword=a, long=0, double=0.09}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); assertEquals("{keyword=a, long=0, double=0.4}", result.getBuckets().get(1).getKeyAsString()); @@ -674,6 +699,7 @@ public void testMultiValuedWithKeywordLongAndDouble() throws Exception { ).aggregateAfter(createAfterKey("keyword", "a", "long", 100L, "double", 0.4d)) ,(result) -> { assertEquals(10, result.getBuckets().size()); + assertEquals("{keyword=z, long=0, double=0.09}", result.afterKey().toString()); assertEquals("{keyword=b, long=100, double=0.4}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=c, long=0, double=0.09}", result.getBuckets().get(1).getKeyAsString()); @@ -708,6 +734,7 @@ public void testMultiValuedWithKeywordLongAndDouble() throws Exception { ).aggregateAfter(createAfterKey("keyword", "z", "long", 100L, "double", 0.4d)) , (result) -> { assertEquals(0, result.getBuckets().size()); + assertNull(result.afterKey()); } ); } @@ -734,6 +761,7 @@ public void testWithDateHistogram() throws IOException { }, (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{date=1508457600000}", result.afterKey().toString()); assertEquals("{date=1474329600000}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{date=1508371200000}", result.getBuckets().get(1).getKeyAsString()); @@ -753,6 +781,7 @@ public void testWithDateHistogram() throws IOException { }, (result) -> { assertEquals(2, result.getBuckets().size()); + assertEquals("{date=1508457600000}", result.afterKey().toString()); assertEquals("{date=1508371200000}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{date=1508457600000}", result.getBuckets().get(1).getKeyAsString()); @@ -761,6 +790,91 @@ public void testWithDateHistogram() throws IOException { ); } + public void testWithDateHistogramAndFormat() throws IOException { + final List>> dataset = new ArrayList<>(); + dataset.addAll( + Arrays.asList( + createDocument("date", asLong("2017-10-20T03:08:45")), + createDocument("date", asLong("2016-09-20T09:00:34")), + createDocument("date", asLong("2016-09-20T11:34:00")), + createDocument("date", asLong("2017-10-20T06:09:24")), + createDocument("date", asLong("2017-10-19T06:09:24")), + createDocument("long", 4L) + ) + ); + final Sort sort = new Sort(new SortedNumericSortField("date", SortField.Type.LONG)); + testSearchCase(new MatchAllDocsQuery(), sort, dataset, + () -> { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") + .field("date") + .dateHistogramInterval(DateHistogramInterval.days(1)) + .format("yyyy-MM-dd"); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)); + }, + (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{date=2017-10-20}", result.afterKey().toString()); + assertEquals("{date=2016-09-20}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{date=2017-10-19}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("{date=2017-10-20}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + } + ); + + testSearchCase(new MatchAllDocsQuery(), sort, dataset, + () -> { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") + .field("date") + .dateHistogramInterval(DateHistogramInterval.days(1)) + .format("yyyy-MM-dd"); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) + .aggregateAfter(createAfterKey("date", "2016-09-20")); + + }, (result) -> { + assertEquals(2, result.getBuckets().size()); + assertEquals("{date=2017-10-20}", result.afterKey().toString()); + assertEquals("{date=2017-10-19}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("{date=2017-10-20}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + } + ); + } + + public void testThatDateHistogramFailsFormatAfter() throws IOException { + ElasticsearchParseException exc = expectThrows(ElasticsearchParseException.class, + () -> testSearchCase(new MatchAllDocsQuery(), null, Collections.emptyList(), + () -> { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") + .field("date") + .dateHistogramInterval(DateHistogramInterval.days(1)) + .format("yyyy-MM-dd"); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) + .aggregateAfter(createAfterKey("date", "now")); + }, + (result) -> {} + )); + assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(exc.getCause().getMessage(), containsString("now() is not supported in [after] key")); + + exc = expectThrows(ElasticsearchParseException.class, + () -> testSearchCase(new MatchAllDocsQuery(), null, Collections.emptyList(), + () -> { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") + .field("date") + .dateHistogramInterval(DateHistogramInterval.days(1)) + .format("yyyy-MM-dd"); + return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) + .aggregateAfter(createAfterKey("date", "1474329600000")); + }, + (result) -> {} + )); + assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(exc.getCause().getMessage(), containsString("Parse failure")); + } + public void testWithDateHistogramAndTimeZone() throws IOException { final List>> dataset = new ArrayList<>(); dataset.addAll( @@ -784,6 +898,7 @@ public void testWithDateHistogramAndTimeZone() throws IOException { }, (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{date=1508454000000}", result.afterKey().toString()); assertEquals("{date=1474326000000}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{date=1508367600000}", result.getBuckets().get(1).getKeyAsString()); @@ -804,6 +919,7 @@ public void testWithDateHistogramAndTimeZone() throws IOException { }, (result) -> { assertEquals(2, result.getBuckets().size()); + assertEquals("{date=1508454000000}", result.afterKey().toString()); assertEquals("{date=1508367600000}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{date=1508454000000}", result.getBuckets().get(1).getKeyAsString()); @@ -837,6 +953,7 @@ public void testWithDateHistogramAndKeyword() throws IOException { ), (result) -> { assertEquals(7, result.getBuckets().size()); + assertEquals("{date=1508457600000, keyword=d}", result.afterKey().toString()); assertEquals("{date=1474329600000, keyword=b}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{date=1474329600000, keyword=c}", result.getBuckets().get(1).getKeyAsString()); @@ -867,6 +984,7 @@ public void testWithDateHistogramAndKeyword() throws IOException { ).aggregateAfter(createAfterKey("date", 1508371200000L, "keyword", "g")) , (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{date=1508457600000, keyword=d}", result.afterKey().toString()); assertEquals("{date=1508457600000, keyword=a}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{date=1508457600000, keyword=c}", result.getBuckets().get(1).getKeyAsString()); @@ -899,6 +1017,7 @@ public void testWithKeywordAndHistogram() throws IOException { ) , (result) -> { assertEquals(7, result.getBuckets().size()); + assertEquals("{keyword=z, price=50.0}", result.afterKey().toString()); assertEquals("{keyword=a, price=100.0}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=b, price=50.0}", result.getBuckets().get(1).getKeyAsString()); @@ -926,6 +1045,7 @@ public void testWithKeywordAndHistogram() throws IOException { ).aggregateAfter(createAfterKey("keyword", "c", "price", 50.0)) , (result) -> { assertEquals(4, result.getBuckets().size()); + assertEquals("{keyword=z, price=50.0}", result.afterKey().toString()); assertEquals("{keyword=c, price=100.0}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=d, price=100.0}", result.getBuckets().get(1).getKeyAsString()); @@ -965,6 +1085,7 @@ public void testWithHistogramAndKeyword() throws IOException { ) , (result) -> { assertEquals(8, result.getBuckets().size()); + assertEquals("{histo=0.9, keyword=d}", result.afterKey().toString()); assertEquals("{histo=0.4, keyword=a}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{histo=0.4, keyword=b}", result.getBuckets().get(1).getKeyAsString()); @@ -994,6 +1115,7 @@ public void testWithHistogramAndKeyword() throws IOException { ).aggregateAfter(createAfterKey("histo", 0.8d, "keyword", "b")) , (result) -> { assertEquals(3, result.getBuckets().size()); + assertEquals("{histo=0.9, keyword=d}", result.afterKey().toString()); assertEquals("{histo=0.8, keyword=z}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{histo=0.9, keyword=a}", result.getBuckets().get(1).getKeyAsString()); @@ -1027,6 +1149,7 @@ public void testWithKeywordAndDateHistogram() throws IOException { ) , (result) -> { assertEquals(7, result.getBuckets().size()); + assertEquals("{keyword=z, date_histo=1474329600000}", result.afterKey().toString()); assertEquals("{keyword=a, date_histo=1508457600000}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=b, date_histo=1474329600000}", result.getBuckets().get(1).getKeyAsString()); @@ -1055,6 +1178,7 @@ public void testWithKeywordAndDateHistogram() throws IOException { ).aggregateAfter(createAfterKey("keyword","c", "date_histo", 1474329600000L)) , (result) -> { assertEquals(4, result.getBuckets().size()); + assertEquals("{keyword=z, date_histo=1474329600000}", result.afterKey().toString()); assertEquals("{keyword=c, date_histo=1508457600000}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("{keyword=d, date_histo=1508457600000}", result.getBuckets().get(1).getKeyAsString()); @@ -1220,7 +1344,6 @@ private void addToDocument(Document doc, Map> keys) { } } - @SuppressWarnings("unchecked") private static Map createAfterKey(Object... fields) { assert fields.length % 2 == 0; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java index 10cc5b8016dc5..022f5e6abc13c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java @@ -21,12 +21,15 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; +import org.joda.time.DateTimeZone; import org.junit.After; import java.io.IOException; @@ -41,28 +44,45 @@ import java.util.stream.Collectors; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiLettersOfLengthBetween; -import static com.carrotsearch.randomizedtesting.RandomizedTest.randomLongBetween; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class InternalCompositeTests extends InternalMultiBucketAggregationTestCase { private List sourceNames; + private List formats; private int[] reverseMuls; - private int[] formats; + private int[] types; private int size; + private static DocValueFormat randomDocValueFormat(boolean isLong) { + if (isLong) { + // we use specific format only for date histogram on a long/date field + if (randomBoolean()) { + return new DocValueFormat.DateTime(Joda.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1)); + } else { + return DocValueFormat.RAW; + } + } else { + // and the raw format for the other types + return DocValueFormat.RAW; + } + } + @Override public void setUp() throws Exception { super.setUp(); int numFields = randomIntBetween(1, 10); size = randomNumberOfBuckets(); sourceNames = new ArrayList<>(); + formats = new ArrayList<>(); reverseMuls = new int[numFields]; - formats = new int[numFields]; + types = new int[numFields]; for (int i = 0; i < numFields; i++) { sourceNames.add("field_" + i); reverseMuls[i] = randomBoolean() ? 1 : -1; - formats[i] = randomIntBetween(0, 2); + int type = randomIntBetween(0, 2); + types[i] = type; + formats.add(randomDocValueFormat(type == 0)); } } @@ -70,9 +90,10 @@ public void setUp() throws Exception { @After public void tearDown() throws Exception { super.tearDown(); - sourceNames= null; - reverseMuls = null; + sourceNames = null; formats = null; + reverseMuls = null; + types = null; } @Override @@ -93,7 +114,7 @@ protected

P parseAndAssert(final InternalAggregati private CompositeKey createCompositeKey() { Comparable[] keys = new Comparable[sourceNames.size()]; for (int j = 0; j < keys.length; j++) { - switch (formats[j]) { + switch (types[j]) { case 0: keys[j] = randomLong(); break; @@ -123,19 +144,6 @@ private Comparator getKeyComparator() { }; } - @SuppressWarnings("unchecked") - private Comparator getBucketComparator() { - return (o1, o2) -> { - for (int i = 0; i < o1.getRawKey().size(); i++) { - int cmp = ((Comparable) o1.getRawKey().get(i)).compareTo(o2.getRawKey().get(i)) * reverseMuls[i]; - if (cmp != 0) { - return cmp; - } - } - return 0; - }; - } - @Override protected InternalComposite createTestInstance(String name, List pipelineAggregators, Map metaData, InternalAggregations aggregations) { @@ -149,11 +157,13 @@ protected InternalComposite createTestInstance(String name, List o1.compareKey(o2)); - return new InternalComposite(name, size, sourceNames, buckets, reverseMuls, Collections.emptyList(), metaData); + CompositeKey lastBucket = buckets.size() > 0 ? buckets.get(buckets.size()-1).getRawKey() : null; + return new InternalComposite(name, size, sourceNames, formats, buckets, lastBucket, reverseMuls, + Collections.emptyList(), metaData); } @Override @@ -172,7 +182,7 @@ protected InternalComposite mutateInstance(InternalComposite instance) throws IO break; case 1: buckets = new ArrayList<>(buckets); - buckets.add(new InternalComposite.InternalBucket(sourceNames, createCompositeKey(), reverseMuls, + buckets.add(new InternalComposite.InternalBucket(sourceNames, formats, createCompositeKey(), reverseMuls, randomLongBetween(1, 100), InternalAggregations.EMPTY) ); break; @@ -187,7 +197,8 @@ protected InternalComposite mutateInstance(InternalComposite instance) throws IO default: throw new AssertionError("illegal branch"); } - return new InternalComposite(instance.getName(), instance.getSize(), sourceNames, buckets, reverseMuls, + CompositeKey lastBucket = buckets.size() > 0 ? buckets.get(buckets.size()-1).getRawKey() : null; + return new InternalComposite(instance.getName(), instance.getSize(), sourceNames, formats, buckets, lastBucket, reverseMuls, instance.pipelineAggregators(), metaData); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index b35dc9563ce5c..6e5f919f33fdf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -92,7 +92,7 @@ public void match(LogEvent event) { saw = true; } } else { - if (event.getMessage().toString().contains(message)) { + if (event.getMessage().getFormattedMessage().contains(message)) { saw = true; } }