From fb5b2dff579d9aee4c715493bdf5c9ca29665d45 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Tue, 13 Mar 2018 12:33:37 -0700 Subject: [PATCH 01/89] Correct the way to reference params in painless --- docs/reference/search/request/script-fields.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/request/script-fields.asciidoc b/docs/reference/search/request/script-fields.asciidoc index 24e9c2a017fcb..55623faf2684c 100644 --- a/docs/reference/search/request/script-fields.asciidoc +++ b/docs/reference/search/request/script-fields.asciidoc @@ -21,7 +21,7 @@ GET /_search "test2" : { "script" : { "lang": "painless", - "source": "doc['my_field_name'].value * factor", + "source": "doc['my_field_name'].value * params.factor", "params" : { "factor" : 2.0 } From 6dae955b6a16d3aae7487c6e65d1b96b29dc654c Mon Sep 17 00:00:00 2001 From: Paul Sanwald Date: Tue, 13 Mar 2018 12:58:30 -0700 Subject: [PATCH 02/89] Document and test date_range "missing" support (#28983) * Add a REST integration test that documents date_range support Add a test case that exercises date_range aggregations using the missing option. Addresses #17597 * Test cleanup and correction Adding a document with a null date to exercise `missing` option, update test name to something reasonable. * Update documentation to explain how the "missing" parameter works for date_range aggregations. * Wrap lines at 80 chars in docs. * Change format of test to YAML for readability. --- .../bucket/daterange-aggregation.asciidoc | 93 +++++++++++++++---- .../test/search.aggregation/40_range.yml | 74 +++++++++++++++ 2 files changed, 150 insertions(+), 17 deletions(-) diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc index 42c64f23cd335..4b172402da9ec 100644 --- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc @@ -1,8 +1,14 @@ [[search-aggregations-bucket-daterange-aggregation]] === Date Range Aggregation -A range aggregation that is dedicated for date values. The main difference between this aggregation and the normal <> aggregation is that the `from` and `to` values can be expressed in <> expressions, and it is also possible to specify a date format by which the `from` and `to` response fields will be returned. -Note that this aggregation includes the `from` value and excludes the `to` value for each range. +A range aggregation that is dedicated for date values. The main difference +between this aggregation and the normal +<> +aggregation is that the `from` and `to` values can be expressed in +<> expressions, and it is also possible to specify a date +format by which the `from` and `to` response fields will be returned. +Note that this aggregation includes the `from` value and excludes the `to` value +for each range. Example: @@ -30,8 +36,9 @@ POST /sales/_search?size=0 <1> < now minus 10 months, rounded down to the start of the month. <2> >= now minus 10 months, rounded down to the start of the month. -In the example above, we created two range buckets, the first will "bucket" all documents dated prior to 10 months ago and -the second will "bucket" all documents dated since 10 months ago +In the example above, we created two range buckets, the first will "bucket" all +documents dated prior to 10 months ago and the second will "bucket" all +documents dated since 10 months ago Response: @@ -61,12 +68,52 @@ Response: -------------------------------------------------- // TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] +==== Missing Values + +The `missing` parameter defines how documents that are missing a value should +be treated. By default they will be ignored but it is also possible to treat +them as if they had a value. This is done by adding a set of fieldname : +value mappings to specify default values per field. + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs": { + "range": { + "date_range": { + "field": "date", + "missing": "1976/11/30", + "ranges": [ + { + "key": "Older", + "to": "2016/02/01" + }, <1> + { + "key": "Newer", + "from": "2016/02/01", + "to" : "now/d" + } + ] + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +<1> Documents without a value in the `date` field will be added to the "Older" +bucket, as if they had a date value of "1899-12-31". + [[date-format-pattern]] ==== Date Format/Pattern -NOTE: this information was copied from http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html[JodaDate] +NOTE: this information was copied from +http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html[JodaDate] -All ASCII letters are reserved as format pattern letters, which are defined as follows: +All ASCII letters are reserved as format pattern letters, which are defined +as follows: [options="header"] |======= @@ -104,30 +151,41 @@ All ASCII letters are reserved as format pattern letters, which are defined as f The count of pattern letters determine the format. -Text:: If the number of pattern letters is 4 or more, the full form is used; otherwise a short or abbreviated form is used if available. +Text:: If the number of pattern letters is 4 or more, the full form is used; +otherwise a short or abbreviated form is used if available. -Number:: The minimum number of digits. Shorter numbers are zero-padded to this amount. +Number:: The minimum number of digits. Shorter numbers are zero-padded to +this amount. -Year:: Numeric presentation for year and weekyear fields are handled specially. For example, if the count of 'y' is 2, the year will be displayed as the zero-based year of the century, which is two digits. +Year:: Numeric presentation for year and weekyear fields are handled +specially. For example, if the count of 'y' is 2, the year will be displayed +as the zero-based year of the century, which is two digits. Month:: 3 or over, use text, otherwise use number. -Zone:: 'Z' outputs offset without a colon, 'ZZ' outputs the offset with a colon, 'ZZZ' or more outputs the zone id. +Zone:: 'Z' outputs offset without a colon, 'ZZ' outputs the offset with a +colon, 'ZZZ' or more outputs the zone id. Zone names:: Time zone names ('z') cannot be parsed. -Any characters in the pattern that are not in the ranges of ['a'..'z'] and ['A'..'Z'] will be treated as quoted text. For instance, characters like ':', '.', ' ', '#' and '?' will appear in the resulting time text even they are not embraced within single quotes. +Any characters in the pattern that are not in the ranges of ['a'..'z'] and +['A'..'Z'] will be treated as quoted text. For instance, characters like ':', + '.', ' ', '#' and '?' will appear in the resulting time text even they are + not embraced within single quotes. [[time-zones]] ==== Time zone in date range aggregations -Dates can be converted from another time zone to UTC by specifying the `time_zone` parameter. +Dates can be converted from another time zone to UTC by specifying the +`time_zone` parameter. -Time zones may either be specified as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as one of -the http://www.joda.org/joda-time/timezones.html[time zone ids] from the TZ database. +Time zones may either be specified as an ISO 8601 UTC offset (e.g. +01:00 or +-08:00) or as one of the http://www.joda.org/joda-time/timezones.html [time +zone ids] from the TZ database. -The `time_zone` parameter is also applied to rounding in date math expressions. As an example, -to round to the beginning of the day in the CET time zone, you can do the following: +The `time_zone` parameter is also applied to rounding in date math expressions. +As an example, to round to the beginning of the day in the CET time zone, you +can do the following: [source,js] -------------------------------------------------- @@ -156,7 +214,8 @@ POST /sales/_search?size=0 ==== Keyed Response -Setting the `keyed` flag to `true` will associate a unique string key with each bucket and return the ranges as a hash rather than an array: +Setting the `keyed` flag to `true` will associate a unique string key with each +bucket and return the ranges as a hash rather than an array: [source,js] -------------------------------------------------- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml index fd8a016976d62..366243c78ee7b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml @@ -273,3 +273,77 @@ setup: - match: { aggregations.date_range.buckets.1.from: 3000000 } - match: { aggregations.date_range.buckets.1.to: 4000000 } +--- +"Date Range Missing": + - do: + index: + index: test + type: test + id: 1 + body: { "date" : "28800000000" } + + - do: + index: + index: test + type: test + id: 2 + body: { "date" : "315561600000" } + + - do: + index: + index: test + type: test + id: 3 + body: { "date" : "631180800000" } + + - do: + index: + index: test + type: test + id: 4 + body: { "date" : "-2524492800000" } + + - do: + index: + index: test + type: test + id: 5 + body: { "ip" : "192.168.0.1" } + + - do: + indices.refresh: {} + + - do: + search: + body: + aggs: + age_groups: + date_range: + field: date + missing: "-2240496000000" + ranges: + - key: Generation Y + from: '315561600000' + to: '946713600000' + - key: Generation X + from: "-157737600000" + to: '315561600000' + - key: Other + to: "-2208960000000" + + - match: { hits.total: 5 } + + - length: { aggregations.age_groups.buckets: 3 } + + - match: { aggregations.age_groups.buckets.0.key: "Other" } + + - match: { aggregations.age_groups.buckets.0.doc_count: 2 } + + - match: { aggregations.age_groups.buckets.1.key: "Generation X" } + + - match: { aggregations.age_groups.buckets.1.doc_count: 1 } + + - match: { aggregations.age_groups.buckets.2.key: "Generation Y" } + + - match: { aggregations.age_groups.buckets.2.doc_count: 2 } + From 6dadce47613a3c69d928940bcc1b2043e0a0184a Mon Sep 17 00:00:00 2001 From: Robin Neatherway Date: Tue, 13 Mar 2018 20:16:48 +0000 Subject: [PATCH 03/89] Painless: Correct ClassToName string conversion (#28997) A typo of 'dimensions' rather than 'dimension' caused an infinite loop. --- .../src/main/java/org/elasticsearch/painless/Definition.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index 6e37e5be0bb0d..95032acabef9b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -565,7 +565,7 @@ public static String ClassToName(Class clazz) { if (component == def.class) { StringBuilder builder = new StringBuilder(def.class.getSimpleName()); - for (int dimension = 0; dimension < dimensions; dimensions++) { + for (int dimension = 0; dimension < dimensions; dimension++) { builder.append("[]"); } From 697b9f8b82a6b0e97e2a343e7483f3b4957b9f4c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 16:30:13 -0400 Subject: [PATCH 04/89] Remove interning from prefix logger (#29031) This interning is completely unnecessary because we look up the marker by the prefix (value, not identity) anyway. This means that regardless of the identity of the prefix, we end up with the same marker. That is all that we really care about here. --- .../java/org/elasticsearch/common/logging/PrefixLogger.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java b/server/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java index a78330c3e8564..f46d360a3fa5b 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java +++ b/server/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java @@ -76,7 +76,7 @@ public String prefix() { PrefixLogger(final ExtendedLogger logger, final String name, final String prefix) { super(logger, name, null); - final String actualPrefix = (prefix == null ? "" : prefix).intern(); + final String actualPrefix = (prefix == null ? "" : prefix); final Marker actualMarker; // markers is not thread-safe, so we synchronize access synchronized (markers) { @@ -88,6 +88,7 @@ public String prefix() { * those references are held strongly, this would give a strong reference back to the key preventing them from ever being * collected. This also guarantees that no other strong reference can be held to the prefix anywhere. */ + // noinspection RedundantStringConstructorCall markers.put(new String(actualPrefix), actualMarker); } else { actualMarker = maybeMarker; From c8e71327ab264c3a5c89bcb64da3c8abcb5f04d2 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 16:31:19 -0400 Subject: [PATCH 05/89] Log template creation and deletion (#29027) These can be seen at the debug level via cluster state update logging but really they should be more visible like index creation and deletion. This commit adds info-level logging for template puts and deletes. --- .../cluster/metadata/MetaDataIndexTemplateService.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index de065a4b922f6..507eaf412d5fa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -117,6 +117,7 @@ public ClusterState execute(ClusterState currentState) { } MetaData.Builder metaData = MetaData.builder(currentState.metaData()); for (String templateName : templateNames) { + logger.info("removing template [{}]", templateName); metaData.removeTemplate(templateName); } return ClusterState.builder(currentState).metaData(metaData).build(); @@ -185,6 +186,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { MetaData.Builder builder = MetaData.builder(currentState.metaData()).put(template); + logger.info("adding template [{}] for index patterns {}", request.name, request.indexPatterns); return ClusterState.builder(currentState).metaData(builder).build(); } From 98ad2596d881529778ee407431753282e3ae3f98 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 16:32:16 -0400 Subject: [PATCH 06/89] Put JVM crash logs in the default log directory (#29028) This commit adds a JVM flag to ensure that the JVM fatal error logs land in the default log directory. Users that wish to use an alternative location should change the path configured here. --- distribution/build.gradle | 6 ++++++ distribution/src/config/jvm.options | 3 +++ 2 files changed, 9 insertions(+) diff --git a/distribution/build.gradle b/distribution/build.gradle index 5bb9944b4a362..f3fe27168f70e 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -265,6 +265,12 @@ subprojects { 'def': "#-XX:HeapDumpPath=/heap/dump/path" ], + 'error.file': [ + 'deb': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", + 'rpm': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", + 'def': "#-XX:ErrorFile=/error/file/path" + ], + 'stopping.timeout': [ 'rpm': 86400, ], diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index eb3d772fa56fd..e862343de8d6b 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -81,6 +81,9 @@ # ensure the directory exists and has sufficient space ${heap.dump.path} +# specify an alternative path for JVM fatal error logs +${error.file} + ## JDK 8 GC logging 8:-XX:+PrintGCDetails From 4dc3adad5172f1d7c137b983a1715797a9021190 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 17:32:18 -0400 Subject: [PATCH 07/89] Archive unknown or invalid settings on updates (#28888) Today we can end up in a situation where the cluster state contains unknown or invalid settings. This can happen easily during a rolling upgrade. For example, consider two nodes that are on a version that considers the setting foo.bar to be known and valid. Assume one of these nodes is restarted on a higher version that considers foo.bar to now be either unknown or invalid, and then the second node is restarted too. Now, both nodes will be on a version that consider foo.bar to be unknown or invalid yet this setting will still be contained in the cluster state. This means that if a cluster settings update is applied and we validate the settings update with the existing settings then validation will fail. In such a state, the offending setting can not even be removed. This commit helps out with this situation by archiving any settings that are unknown or invalid at the time that a settings update is applied. This allows the setting update to go through, and the archived settings can be removed at a later time. --- .../cluster/settings/SettingsUpdater.java | 83 ++++- .../TransportClusterUpdateSettingsAction.java | 3 +- .../settings/SettingsUpdaterTests.java | 332 +++++++++++++++++- 3 files changed, 398 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java index dc13913652a34..ec72dd949674c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -19,13 +19,20 @@ package org.elasticsearch.action.admin.cluster.settings; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import java.util.Map; + import static org.elasticsearch.cluster.ClusterState.builder; +import static org.elasticsearch.common.settings.AbstractScopedSettings.ARCHIVED_SETTINGS_PREFIX; /** * Updates transient and persistent cluster state settings if there are any changes @@ -48,15 +55,34 @@ synchronized Settings getPersistentUpdate() { return persistentUpdates.build(); } - synchronized ClusterState updateSettings(final ClusterState currentState, Settings transientToApply, Settings persistentToApply) { + synchronized ClusterState updateSettings( + final ClusterState currentState, final Settings transientToApply, final Settings persistentToApply, final Logger logger) { boolean changed = false; - Settings.Builder transientSettings = Settings.builder(); - transientSettings.put(currentState.metaData().transientSettings()); - changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient"); + /* + * Our cluster state could have unknown or invalid settings that are known and valid in a previous version of Elasticsearch. We can + * end up in this situation during a rolling upgrade where the previous version will infect the current version of Elasticsearch + * with settings that the current version either no longer knows about or now considers to have invalid values. When the current + * version of Elasticsearch becomes infected with a cluster state containing such settings, we need to skip validating such settings + * and instead archive them. Consequently, for the current transient and persistent settings in the cluster state we do the + * following: + * - split existing settings instance into two with the known and valid settings in one, and the unknown or invalid in another + * (note that existing archived settings are included in the known and valid settings) + * - validate the incoming settings update combined with the existing known and valid settings + * - merge in the archived unknown or invalid settings + */ + final Tuple partitionedTransientSettings = + partitionKnownAndValidSettings(currentState.metaData().transientSettings(), "transient", logger); + final Settings knownAndValidTransientSettings = partitionedTransientSettings.v1(); + final Settings unknownOrInvalidTransientSettings = partitionedTransientSettings.v2(); + final Settings.Builder transientSettings = Settings.builder().put(knownAndValidTransientSettings); + changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient"); - Settings.Builder persistentSettings = Settings.builder(); - persistentSettings.put(currentState.metaData().persistentSettings()); + final Tuple partitionedPersistentSettings = + partitionKnownAndValidSettings(currentState.metaData().persistentSettings(), "persistent", logger); + final Settings knownAndValidPersistentSettings = partitionedPersistentSettings.v1(); + final Settings unknownOrInvalidPersistentSettings = partitionedPersistentSettings.v2(); + final Settings.Builder persistentSettings = Settings.builder().put(knownAndValidPersistentSettings); changed |= clusterSettings.updateDynamicSettings(persistentToApply, persistentSettings, persistentUpdates, "persistent"); final ClusterState clusterState; @@ -69,8 +95,8 @@ synchronized ClusterState updateSettings(final ClusterState currentState, Settin clusterSettings.validate(persistentFinalSettings, true); MetaData.Builder metaData = MetaData.builder(currentState.metaData()) - .persistentSettings(persistentFinalSettings) - .transientSettings(transientFinalSettings); + .transientSettings(Settings.builder().put(transientFinalSettings).put(unknownOrInvalidTransientSettings).build()) + .persistentSettings(Settings.builder().put(persistentFinalSettings).put(unknownOrInvalidPersistentSettings).build()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings()) @@ -102,5 +128,46 @@ synchronized ClusterState updateSettings(final ClusterState currentState, Settin return clusterState; } + /** + * Partitions the settings into those that are known and valid versus those that are unknown or invalid. The resulting tuple contains + * the known and valid settings in the first component and the unknown or invalid settings in the second component. Note that archived + * settings contained in the settings to partition are included in the first component. + * + * @param settings the settings to partition + * @param settingsType a string to identify the settings (for logging) + * @param logger a logger to sending warnings to + * @return the partitioned settings + */ + private Tuple partitionKnownAndValidSettings( + final Settings settings, final String settingsType, final Logger logger) { + final Settings existingArchivedSettings = settings.filter(k -> k.startsWith(ARCHIVED_SETTINGS_PREFIX)); + final Settings settingsExcludingExistingArchivedSettings = + settings.filter(k -> k.startsWith(ARCHIVED_SETTINGS_PREFIX) == false); + final Settings settingsWithUnknownOrInvalidArchived = clusterSettings.archiveUnknownOrInvalidSettings( + settingsExcludingExistingArchivedSettings, + e -> logUnknownSetting(settingsType, e, logger), + (e, ex) -> logInvalidSetting(settingsType, e, ex, logger)); + return Tuple.tuple( + Settings.builder() + .put(settingsWithUnknownOrInvalidArchived.filter(k -> k.startsWith(ARCHIVED_SETTINGS_PREFIX) == false)) + .put(existingArchivedSettings) + .build(), + settingsWithUnknownOrInvalidArchived.filter(k -> k.startsWith(ARCHIVED_SETTINGS_PREFIX))); + } + + private void logUnknownSetting(final String settingType, final Map.Entry e, final Logger logger) { + logger.warn("ignoring existing unknown {} setting: [{}] with value [{}]; archiving", settingType, e.getKey(), e.getValue()); + } + + private void logInvalidSetting( + final String settingType, final Map.Entry e, final IllegalArgumentException ex, final Logger logger) { + logger.warn( + (Supplier) + () -> new ParameterizedMessage("ignoring existing invalid {} setting: [{}] with value [{}]; archiving", + settingType, + e.getKey(), + e.getValue()), + ex); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index dae55b2fc048a..edc30bd3c35fd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -180,7 +180,8 @@ public void onFailure(String source, Exception e) { @Override public ClusterState execute(final ClusterState currentState) { - ClusterState clusterState = updater.updateSettings(currentState, request.transientSettings(), request.persistentSettings()); + ClusterState clusterState = + updater.updateSettings(currentState, request.transientSettings(), request.persistentSettings(), logger); changed = clusterState != currentState; return clusterState; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java index 19dd64e6324ca..d582141898684 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java @@ -28,11 +28,20 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; +import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.common.settings.AbstractScopedSettings.ARCHIVED_SETTINGS_PREFIX; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.not; + public class SettingsUpdaterTests extends ESTestCase { @@ -51,7 +60,7 @@ public void testUpdateSetting() { .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 4.5).build()); ClusterState build = builder.metaData(metaData).build(); ClusterState clusterState = updater.updateSettings(build, Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.5).build(), - Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.4).build()); + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.4).build(), logger); assertNotSame(clusterState, build); assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 0.4, 0.1); assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 2.5, 0.1); @@ -59,14 +68,14 @@ public void testUpdateSetting() { assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().transientSettings()), 4.5, 0.1); clusterState = updater.updateSettings(clusterState, Settings.builder().putNull("cluster.routing.*").build(), - Settings.EMPTY); + Settings.EMPTY, logger); assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 0.4, 0.1); assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 2.5, 0.1); assertFalse(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); clusterState = updater.updateSettings(clusterState, - Settings.EMPTY, Settings.builder().putNull("cluster.routing.*").put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 10.0).build()); + Settings.EMPTY, Settings.builder().putNull("cluster.routing.*").put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 10.0).build(), logger); assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 10.0, 0.1); assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().persistentSettings())); @@ -93,7 +102,7 @@ public void testAllOrNothing() { try { updater.updateSettings(build, Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").build(), - Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build(), logger); fail("all or nothing"); } catch (IllegalArgumentException ex) { logger.info("", ex); @@ -119,21 +128,21 @@ public void testClusterBlock() { ClusterState build = builder.metaData(metaData).build(); ClusterState clusterState = updater.updateSettings(build, Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), true).build(), - Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build(), logger); assertEquals(clusterState.blocks().global().size(), 1); assertEquals(clusterState.blocks().global().iterator().next(), MetaData.CLUSTER_READ_ONLY_BLOCK); clusterState = updater.updateSettings(build, Settings.EMPTY, - Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build()); + Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build(), logger); assertEquals(clusterState.blocks().global().size(), 0); clusterState = updater.updateSettings(build, Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true).build(), - Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build(), logger); assertEquals(clusterState.blocks().global().size(), 1); assertEquals(clusterState.blocks().global().iterator().next(), MetaData.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK); clusterState = updater.updateSettings(build, Settings.EMPTY, - Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), false).build()); + Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), false).build(), logger); assertEquals(clusterState.blocks().global().size(), 0); } @@ -151,16 +160,317 @@ public void testDeprecationLogging() { ClusterState.builder(new ClusterName("foo")).metaData(MetaData.builder().persistentSettings(settings).build()).build(); final Settings toApplyDebug = Settings.builder().put("logger.org.elasticsearch", "debug").build(); - final ClusterState afterDebug = settingsUpdater.updateSettings(clusterState, toApplyDebug, Settings.EMPTY); + final ClusterState afterDebug = settingsUpdater.updateSettings(clusterState, toApplyDebug, Settings.EMPTY, logger); assertSettingDeprecationsAndWarnings(new Setting[] { deprecatedSetting }); final Settings toApplyUnset = Settings.builder().putNull("logger.org.elasticsearch").build(); - final ClusterState afterUnset = settingsUpdater.updateSettings(afterDebug, toApplyUnset, Settings.EMPTY); + final ClusterState afterUnset = settingsUpdater.updateSettings(afterDebug, toApplyUnset, Settings.EMPTY, logger); assertSettingDeprecationsAndWarnings(new Setting[] { deprecatedSetting }); // we also check that if no settings are changed, deprecation logging still occurs - settingsUpdater.updateSettings(afterUnset, toApplyUnset, Settings.EMPTY); + settingsUpdater.updateSettings(afterUnset, toApplyUnset, Settings.EMPTY, logger); assertSettingDeprecationsAndWarnings(new Setting[] { deprecatedSetting }); } + public void testUpdateWithUnknownAndSettings() { + // we will randomly apply some new dynamic persistent and transient settings + final int numberOfDynamicSettings = randomIntBetween(1, 8); + final List> dynamicSettings = new ArrayList<>(numberOfDynamicSettings); + for (int i = 0; i < numberOfDynamicSettings; i++) { + final Setting dynamicSetting = Setting.simpleString("dynamic.setting" + i, Property.Dynamic, Property.NodeScope); + dynamicSettings.add(dynamicSetting); + } + + // these are invalid settings that exist as either persistent or transient settings + final int numberOfInvalidSettings = randomIntBetween(0, 7); + final List> invalidSettings = new ArrayList<>(numberOfInvalidSettings); + for (int i = 0; i < numberOfInvalidSettings; i++) { + final Setting invalidSetting = Setting.simpleString( + "invalid.setting" + i, + (value, settings) -> { + throw new IllegalArgumentException("invalid"); + }, + Property.NodeScope); + invalidSettings.add(invalidSetting); + } + + // these are unknown settings that exist as either persistent or transient settings + final int numberOfUnknownSettings = randomIntBetween(0, 7); + final List> unknownSettings = new ArrayList<>(numberOfUnknownSettings); + for (int i = 0; i < numberOfUnknownSettings; i++) { + final Setting unknownSetting = Setting.simpleString("unknown.setting" + i, Property.NodeScope); + unknownSettings.add(unknownSetting); + } + + final Settings.Builder existingPersistentSettings = Settings.builder(); + final Settings.Builder existingTransientSettings = Settings.builder(); + + for (final Setting dynamicSetting : dynamicSettings) { + switch (randomIntBetween(0, 2)) { + case 0: + existingPersistentSettings.put(dynamicSetting.getKey(), "existing_value"); + break; + case 1: + existingTransientSettings.put(dynamicSetting.getKey(), "existing_value"); + break; + case 2: + break; + } + } + + for (final Setting invalidSetting : invalidSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(invalidSetting.getKey(), "value"); + } else { + existingTransientSettings.put(invalidSetting.getKey(), "value"); + } + } + + for (final Setting unknownSetting : unknownSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(unknownSetting.getKey(), "value"); + } else { + existingTransientSettings.put(unknownSetting.getKey(), "value"); + } + } + + // register all the known settings (note that we do not register the unknown settings) + final Set> knownSettings = + Stream.concat( + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), + Stream.concat(dynamicSettings.stream(), invalidSettings.stream())) + .collect(Collectors.toSet()); + final ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, knownSettings); + for (final Setting dynamicSetting : dynamicSettings) { + clusterSettings.addSettingsUpdateConsumer(dynamicSetting, s -> {}); + } + final SettingsUpdater settingsUpdater = new SettingsUpdater(clusterSettings); + final MetaData.Builder metaDataBuilder = + MetaData.builder() + .persistentSettings(existingPersistentSettings.build()) + .transientSettings(existingTransientSettings.build()); + final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")).metaData(metaDataBuilder).build(); + + // prepare the dynamic settings update + final Settings.Builder persistentToApply = Settings.builder(); + final Settings.Builder transientToApply = Settings.builder(); + for (final Setting dynamicSetting : dynamicSettings) { + switch (randomIntBetween(0, 2)) { + case 0: + persistentToApply.put(dynamicSetting.getKey(), "new_value"); + break; + case 1: + transientToApply.put(dynamicSetting.getKey(), "new_value"); + break; + case 2: + break; + } + } + + if (transientToApply.keys().isEmpty() && persistentToApply.keys().isEmpty()) { + // force a settings update otherwise our assertions below will fail + if (randomBoolean()) { + persistentToApply.put(dynamicSettings.get(0).getKey(), "new_value"); + } else { + transientToApply.put(dynamicSettings.get(0).getKey(), "new_value"); + } + } + + final ClusterState clusterStateAfterUpdate = + settingsUpdater.updateSettings(clusterState, transientToApply.build(), persistentToApply.build(), logger); + + // the invalid settings should be archived and not present in non-archived form + for (final Setting invalidSetting : invalidSettings) { + if (existingPersistentSettings.keys().contains(invalidSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + invalidSetting.getKey())); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + invalidSetting.getKey())); + } + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(invalidSetting.getKey()))); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(invalidSetting.getKey()))); + } + + // the unknown settings should be archived and not present in non-archived form + for (final Setting unknownSetting : unknownSettings) { + if (existingPersistentSettings.keys().contains(unknownSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + unknownSetting.getKey())); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + unknownSetting.getKey())); + } + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(unknownSetting.getKey()))); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(unknownSetting.getKey()))); + } + + // the dynamic settings should be applied + for (final Setting dynamicSetting : dynamicSettings) { + if (persistentToApply.keys().contains(dynamicSetting.getKey())) { + assertThat(clusterStateAfterUpdate.metaData().persistentSettings().keySet(), hasItem(dynamicSetting.getKey())); + assertThat(clusterStateAfterUpdate.metaData().persistentSettings().get(dynamicSetting.getKey()), equalTo("new_value")); + } else if (transientToApply.keys().contains(dynamicSetting.getKey())) { + assertThat(clusterStateAfterUpdate.metaData().transientSettings().keySet(), hasItem(dynamicSetting.getKey())); + assertThat(clusterStateAfterUpdate.metaData().transientSettings().get(dynamicSetting.getKey()), equalTo("new_value")); + } else { + if (existingPersistentSettings.keys().contains(dynamicSetting.getKey())) { + assertThat(clusterStateAfterUpdate.metaData().persistentSettings().keySet(), hasItem(dynamicSetting.getKey())); + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().get(dynamicSetting.getKey()), + equalTo("existing_value")); + } else if (existingTransientSettings.keys().contains(dynamicSetting.getKey())) { + assertThat(clusterStateAfterUpdate.metaData().transientSettings().keySet(), hasItem(dynamicSetting.getKey())); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().get(dynamicSetting.getKey()), + equalTo("existing_value")); + } else { + assertThat(clusterStateAfterUpdate.metaData().persistentSettings().keySet(), not(hasItem(dynamicSetting.getKey()))); + assertThat(clusterStateAfterUpdate.metaData().transientSettings().keySet(), not(hasItem(dynamicSetting.getKey()))); + } + } + } + } + + public void testRemovingArchivedSettingsDoesNotRemoveNonArchivedInvalidOrUnknownSettings() { + // these are settings that are archived in the cluster state as either persistent or transient settings + final int numberOfArchivedSettings = randomIntBetween(1, 8); + final List> archivedSettings = new ArrayList<>(numberOfArchivedSettings); + for (int i = 0; i < numberOfArchivedSettings; i++) { + final Setting archivedSetting = Setting.simpleString("setting", Property.NodeScope); + archivedSettings.add(archivedSetting); + } + + // these are invalid settings that exist as either persistent or transient settings + final int numberOfInvalidSettings = randomIntBetween(0, 7); + final List> invalidSettings = new ArrayList<>(numberOfInvalidSettings); + for (int i = 0; i < numberOfInvalidSettings; i++) { + final Setting invalidSetting = Setting.simpleString( + "invalid.setting" + i, + (value, settings) -> { + throw new IllegalArgumentException("invalid"); + }, + Property.NodeScope); + invalidSettings.add(invalidSetting); + } + + // these are unknown settings that exist as either persistent or transient settings + final int numberOfUnknownSettings = randomIntBetween(0, 7); + final List> unknownSettings = new ArrayList<>(numberOfUnknownSettings); + for (int i = 0; i < numberOfUnknownSettings; i++) { + final Setting unknownSetting = Setting.simpleString("unknown.setting" + i, Property.NodeScope); + unknownSettings.add(unknownSetting); + } + + final Settings.Builder existingPersistentSettings = Settings.builder(); + final Settings.Builder existingTransientSettings = Settings.builder(); + + for (final Setting archivedSetting : archivedSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey(), "value"); + } else { + existingTransientSettings.put(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey(), "value"); + } + } + + for (final Setting invalidSetting : invalidSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(invalidSetting.getKey(), "value"); + } else { + existingTransientSettings.put(invalidSetting.getKey(), "value"); + } + } + + for (final Setting unknownSetting : unknownSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(unknownSetting.getKey(), "value"); + } else { + existingTransientSettings.put(unknownSetting.getKey(), "value"); + } + } + + // register all the known settings (not that we do not register the unknown settings) + final Set> knownSettings = + Stream.concat( + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), + Stream.concat(archivedSettings.stream(), invalidSettings.stream())) + .collect(Collectors.toSet()); + final ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, knownSettings); + final SettingsUpdater settingsUpdater = new SettingsUpdater(clusterSettings); + final MetaData.Builder metaDataBuilder = + MetaData.builder() + .persistentSettings(existingPersistentSettings.build()) + .transientSettings(existingTransientSettings.build()); + final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")).metaData(metaDataBuilder).build(); + + final Settings.Builder persistentToApply = Settings.builder().put("archived.*", (String)null); + final Settings.Builder transientToApply = Settings.builder().put("archived.*", (String)null); + + final ClusterState clusterStateAfterUpdate = + settingsUpdater.updateSettings(clusterState, transientToApply.build(), persistentToApply.build(), logger); + + // existing archived settings are removed + for (final Setting archivedSetting : archivedSettings) { + if (existingPersistentSettings.keys().contains(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey()))); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey()))); + } + } + + // the invalid settings should be archived and not present in non-archived form + for (final Setting invalidSetting : invalidSettings) { + if (existingPersistentSettings.keys().contains(invalidSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + invalidSetting.getKey())); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + invalidSetting.getKey())); + } + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(invalidSetting.getKey()))); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(invalidSetting.getKey()))); + } + + // the unknown settings should be archived and not present in non-archived form + for (final Setting unknownSetting : unknownSettings) { + if (existingPersistentSettings.keys().contains(unknownSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + unknownSetting.getKey())); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + unknownSetting.getKey())); + } + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(unknownSetting.getKey()))); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(unknownSetting.getKey()))); + } + } + } From 4faf3cf02ca44f64bf89ac043e57dbdd12107281 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 18:18:01 -0400 Subject: [PATCH 08/89] Add docs for error file configuration (#29032) This commit adds docs for configuring the error file setting for where the JVM writes fatal error logs. --- .../setup/important-settings/error-file.asciidoc | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 docs/reference/setup/important-settings/error-file.asciidoc diff --git a/docs/reference/setup/important-settings/error-file.asciidoc b/docs/reference/setup/important-settings/error-file.asciidoc new file mode 100644 index 0000000000000..37f1d2a0b14ed --- /dev/null +++ b/docs/reference/setup/important-settings/error-file.asciidoc @@ -0,0 +1,16 @@ +[[error-file-path]] +=== JVM fatal error logs + +The <> and <> package distributions default to configuring +the JVM to write fatal error logs to `/var/lib/elasticsearch`; these are logs +produced by the JVM when it encounters a fatal error (e.g., a segmentation +fault). If this path is not suitable for receiving logs, you should modify the +entry `-XX:ErrorFile=/var/lib/elasticsearch/hs_err_pid%p.log` in +<> to an alternate path. + +Note that the archive distributions do not configure the error file path by +default. Instead, the JVM will default to writing to the working directory for +the Elasticsearch process. If you wish to configure an error file path, you +should modify the entry `#-XX:ErrorFile=/error/file/path` in +<> to remove the comment marker `#` and to specify an +actual path. From bddf9df8b4d1270ac607488c8282ae9f09516b4f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 18:27:14 -0400 Subject: [PATCH 09/89] Add search slowlog level to docs (#29040) This commit adds an indication how to set the search slowlog level to the docs. --- docs/reference/index-modules/slowlog.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/index-modules/slowlog.asciidoc b/docs/reference/index-modules/slowlog.asciidoc index c13b180cd323a..1dc84f380f509 100644 --- a/docs/reference/index-modules/slowlog.asciidoc +++ b/docs/reference/index-modules/slowlog.asciidoc @@ -22,6 +22,8 @@ index.search.slowlog.threshold.fetch.warn: 1s index.search.slowlog.threshold.fetch.info: 800ms index.search.slowlog.threshold.fetch.debug: 500ms index.search.slowlog.threshold.fetch.trace: 200ms + +index.search.slowlog.level: info -------------------------------------------------- All of the above settings are _dynamic_ and are set per-index. From 4ec179b2ba91a4546fdf2c5bae0d21716ddd6312 Mon Sep 17 00:00:00 2001 From: Chun On Lee Date: Tue, 13 Mar 2018 07:44:21 -0700 Subject: [PATCH 10/89] Update "_doc" to "account" type for bulk example (#28786) * Change 'account' to '_doc' as types are deprecated --- docs/reference/getting-started.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index b3156dbc1f414..af7fc8fa6d69b 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -669,7 +669,7 @@ You can download the sample dataset (accounts.json) from https://github.com/elas [source,sh] -------------------------------------------------- -curl -H "Content-Type: application/json" -XPOST "localhost:9200/bank/account/_bulk?pretty&refresh" --data-binary "@accounts.json" +curl -H "Content-Type: application/json" -XPOST "localhost:9200/bank/_doc/_bulk?pretty&refresh" --data-binary "@accounts.json" curl "localhost:9200/_cat/indices?v" -------------------------------------------------- // NOTCONSOLE From 9f2c4df94d98eed886ad283c1348d6528bc2af11 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Tue, 13 Mar 2018 17:00:01 -0700 Subject: [PATCH 11/89] REST tests: Increase version to skip to include 6.3 In order to fix the tests, the correct version needs to be skipped until the backport is done. Relates #28943 --- .../rest-api-spec/test/indices.clear_cache/10_basic.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml index b5e98949f03b4..507785a257cab 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml @@ -29,7 +29,7 @@ --- "clear_cache with field_data set to true": - skip: - version: " - 6.2.99" + version: " - 6.3.99" reason: field_data was deprecated in 6.3.0 features: "warnings" @@ -42,7 +42,7 @@ --- "clear_cache with fielddata set to true": - skip: - version: " - 6.2.99" + version: " - 6.3.99" reason: fielddata was deprecated before 6.3.0 - do: From 8f0da37c5e17575dd42432c1827eea6d48e5661e Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 13 Mar 2018 20:22:32 -0400 Subject: [PATCH 12/89] Revert "REST tests: Increase version to skip to include 6.3" This reverts commit 9f2c4df94d98eed886ad283c1348d6528bc2af11 which isn't needed now that the backport is complete. --- .../rest-api-spec/test/indices.clear_cache/10_basic.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml index 507785a257cab..b5e98949f03b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml @@ -29,7 +29,7 @@ --- "clear_cache with field_data set to true": - skip: - version: " - 6.3.99" + version: " - 6.2.99" reason: field_data was deprecated in 6.3.0 features: "warnings" @@ -42,7 +42,7 @@ --- "clear_cache with fielddata set to true": - skip: - version: " - 6.3.99" + version: " - 6.2.99" reason: fielddata was deprecated before 6.3.0 - do: From 46fcd07153617be145c1b7434e849afc694d640e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 20:40:47 -0400 Subject: [PATCH 13/89] Add total hits to the search slow log (#29034) This commit adds the total hits to the search slow log. --- .../main/java/org/elasticsearch/index/SearchSlowLog.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java index a48e3d7bd72c5..f3c5d07f1f2f4 100644 --- a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -149,8 +149,11 @@ static final class SlowLogSearchContextPrinter { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append(context.indexShard().shardId()).append(" "); - sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); + sb.append(context.indexShard().shardId()) + .append(" ") + .append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], ") + .append("took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ") + .append("total_hits[").append(context.queryResult().getTotalHits()).append("], "); if (context.getQueryShardContext().getTypes() == null) { sb.append("types[], "); } else { From 647d0a1e9518a02670087003145e40e9ee1620a1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 23:42:16 -0400 Subject: [PATCH 14/89] Do not swallow fail to convert exceptions (#29043) When converting the source for an indexing request to JSON, the conversion can throw an I/O exception which we swallow and proceed with logging to the slow log. The cause of the I/O exception is lost. This commit changes this behavior and chooses to drop the entry from the slow logs and instead lets an exception percolate up to the indexing operation listener loop. Here, the exception will be caught and logged at the warn level. --- .../org/elasticsearch/index/IndexingSlowLog.java | 8 ++++++++ .../elasticsearch/index/IndexingSlowLogTests.java | 12 ++++++++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index 94c3892ef361e..b75cda5b6ca70 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -33,6 +33,8 @@ import org.elasticsearch.index.shard.ShardId; import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Locale; import java.util.concurrent.TimeUnit; public final class IndexingSlowLog implements IndexingOperationListener { @@ -194,6 +196,12 @@ public String toString() { sb.append(", source[").append(Strings.cleanTruncate(source, maxSourceCharsToLog)).append("]"); } catch (IOException e) { sb.append(", source[_failed_to_convert_[").append(e.getMessage()).append("]]"); + /* + * We choose to fail to write to the slow log and instead let this percolate up to the post index listener loop where this + * will be logged at the warn level. + */ + final String message = String.format(Locale.ROOT, "failed to convert source for slow log entry [%s]", sb.toString()); + throw new UncheckedIOException(message, e); } return sb.toString(); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index 45b0d0aa2475c..ff5166e8f1a14 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index; +import com.fasterxml.jackson.core.JsonParseException; import org.apache.lucene.document.NumericDocValuesField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -34,6 +35,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.io.UncheckedIOException; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasToString; @@ -70,9 +72,15 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { "test", null, null, source, XContentType.JSON, null); p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3); - assertThat(p.toString(), containsString("_failed_to_convert_[Unrecognized token 'invalid':" + final UncheckedIOException e = expectThrows(UncheckedIOException.class, p::toString); + assertThat(e, hasToString(containsString("_failed_to_convert_[Unrecognized token 'invalid':" + " was expecting ('true', 'false' or 'null')\n" - + " at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper")); + + " at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper"))); + assertNotNull(e.getCause()); + assertThat(e.getCause(), instanceOf(JsonParseException.class)); + assertThat(e.getCause(), hasToString(containsString("Unrecognized token 'invalid':" + + " was expecting ('true', 'false' or 'null')\n" + + " at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper"))); } public void testReformatSetting() { From 24d10adaab2ac7ee2469d95789ea4d8c8527f041 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 14 Mar 2018 06:36:37 -0400 Subject: [PATCH 15/89] Main response should not have status 503 when okay (#29045) The REST status 503 means "I can not handle the request that you sent me." However today we respond to a main request with a 503 when there are certain cluster blocks despite still responding with an actual main response. This is broken, we should respond with a 200 status. This commit removes this silliness. --- .../CustomRestHighLevelClientTests.java | 7 +------ .../client/RestHighLevelClientTests.java | 2 +- .../documentation/MainDocumentationIT.java | 1 - .../action/main/MainResponse.java | 20 ++++++++----------- .../action/main/TransportMainAction.java | 2 +- .../rest/action/RestMainAction.java | 4 +--- .../action/main/MainActionTests.java | 4 +--- .../action/main/MainResponseTests.java | 16 ++++++--------- .../rest/action/RestMainActionTests.java | 13 +++++------- 9 files changed, 24 insertions(+), 45 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java index 6ca351365db56..42496822090fd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java @@ -37,11 +37,6 @@ import org.elasticsearch.action.main.MainRequest; import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseListener; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.xcontent.XContentHelper; @@ -162,7 +157,7 @@ private Response mockPerformRequest(Header httpHeader) throws IOException { ProtocolVersion protocol = new ProtocolVersion("HTTP", 1, 1); when(mockResponse.getStatusLine()).thenReturn(new BasicStatusLine(protocol, 200, "OK")); - MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT, true); + MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT); BytesRef bytesRef = XContentHelper.toXContent(response, XContentType.JSON, false).toBytesRef(); when(mockResponse.getEntity()).thenReturn(new ByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 79b8d8d1503a6..ce932adb2858a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -163,7 +163,7 @@ public void testPingSocketTimeout() throws IOException { public void testInfo() throws IOException { Header[] headers = randomHeaders(random(), "Header"); MainResponse testInfo = new MainResponse("nodeName", Version.CURRENT, new ClusterName("clusterName"), "clusterUuid", - Build.CURRENT, true); + Build.CURRENT); mockResponse(testInfo); MainResponse receivedInfo = restHighLevelClient.info(headers); assertEquals(testInfo, receivedInfo); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java index 0558091a76cb4..72986d44f97cd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java @@ -52,7 +52,6 @@ public void testMain() throws IOException { //tag::main-execute MainResponse response = client.info(); //end::main-execute - assertTrue(response.isAvailable()); //tag::main-response ClusterName clusterName = response.getClusterName(); // <1> String clusterUuid = response.getClusterUuid(); // <2> diff --git a/server/src/main/java/org/elasticsearch/action/main/MainResponse.java b/server/src/main/java/org/elasticsearch/action/main/MainResponse.java index 9bed7691374a5..a8bd1acbe4363 100644 --- a/server/src/main/java/org/elasticsearch/action/main/MainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/main/MainResponse.java @@ -41,18 +41,16 @@ public class MainResponse extends ActionResponse implements ToXContentObject { private ClusterName clusterName; private String clusterUuid; private Build build; - boolean available; MainResponse() { } - public MainResponse(String nodeName, Version version, ClusterName clusterName, String clusterUuid, Build build, boolean available) { + public MainResponse(String nodeName, Version version, ClusterName clusterName, String clusterUuid, Build build) { this.nodeName = nodeName; this.version = version; this.clusterName = clusterName; this.clusterUuid = clusterUuid; this.build = build; - this.available = available; } public String getNodeName() { @@ -75,10 +73,6 @@ public Build getBuild() { return build; } - public boolean isAvailable() { - return available; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -87,7 +81,9 @@ public void writeTo(StreamOutput out) throws IOException { clusterName.writeTo(out); out.writeString(clusterUuid); Build.writeBuild(build, out); - out.writeBoolean(available); + if (out.getVersion().before(Version.V_7_0_0_alpha1)) { + out.writeBoolean(true); + } } @Override @@ -98,7 +94,9 @@ public void readFrom(StreamInput in) throws IOException { clusterName = new ClusterName(in); clusterUuid = in.readString(); build = Build.readBuild(in); - available = in.readBoolean(); + if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + in.readBoolean(); + } } @Override @@ -133,7 +131,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws response.build = new Build((String) value.get("build_hash"), (String) value.get("build_date"), (boolean) value.get("build_snapshot")); response.version = Version.fromString((String) value.get("number")); - response.available = true; }, (parser, context) -> parser.map(), new ParseField("version")); } @@ -154,12 +151,11 @@ public boolean equals(Object o) { Objects.equals(version, other.version) && Objects.equals(clusterUuid, other.clusterUuid) && Objects.equals(build, other.build) && - Objects.equals(available, other.available) && Objects.equals(clusterName, other.clusterName); } @Override public int hashCode() { - return Objects.hash(nodeName, version, clusterUuid, build, clusterName, available); + return Objects.hash(nodeName, version, clusterUuid, build, clusterName); } } diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index 368696a9553d9..d560a7ecc11d3 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -53,6 +53,6 @@ protected void doExecute(MainRequest request, ActionListener liste final boolean available = clusterState.getBlocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE) == false; listener.onResponse( new MainResponse(Node.NODE_NAME_SETTING.get(settings), Version.CURRENT, clusterState.getClusterName(), - clusterState.metaData().clusterUUID(), Build.CURRENT, available)); + clusterState.metaData().clusterUUID(), Build.CURRENT)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java b/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java index c155b76cb810f..250ee209587e0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java @@ -60,13 +60,11 @@ public RestResponse buildResponse(MainResponse mainResponse, XContentBuilder bui } static BytesRestResponse convertMainResponse(MainResponse response, RestRequest request, XContentBuilder builder) throws IOException { - RestStatus status = response.isAvailable() ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE; - // Default to pretty printing, but allow ?pretty=false to disable if (request.hasParam("pretty") == false) { builder.prettyPrint().lfAtEnd(); } response.toXContent(builder, request); - return new BytesRestResponse(status, builder); + return new BytesRestResponse(RestStatus.OK, builder); } } diff --git a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 34f9bc15ecfa6..035cc0be4a5e5 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -48,9 +48,8 @@ public void testMainActionClusterAvailable() { final ClusterService clusterService = mock(ClusterService.class); final ClusterName clusterName = new ClusterName("elasticsearch"); final Settings settings = Settings.builder().put("node.name", "my-node").build(); - final boolean available = randomBoolean(); ClusterBlocks blocks; - if (available) { + if (randomBoolean()) { if (randomBoolean()) { blocks = ClusterBlocks.EMPTY_CLUSTER_BLOCK; } else { @@ -86,7 +85,6 @@ public void onFailure(Exception e) { }); assertNotNull(responseRef.get()); - assertEquals(available, responseRef.get().isAvailable()); verify(clusterService, times(1)).state(); } } diff --git a/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java b/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java index 03e75f90ff9e3..552e3801954aa 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java @@ -41,7 +41,7 @@ protected MainResponse createTestInstance() { String nodeName = randomAlphaOfLength(10); Build build = new Build(randomAlphaOfLength(8), new Date(randomNonNegativeLong()).toString(), randomBoolean()); Version version = VersionUtils.randomVersion(random()); - return new MainResponse(nodeName, version, clusterName, clusterUuid , build, true); + return new MainResponse(nodeName, version, clusterName, clusterUuid , build); } @Override @@ -58,7 +58,7 @@ public void testToXContent() throws IOException { String clusterUUID = randomAlphaOfLengthBetween(10, 20); Build build = new Build(Build.CURRENT.shortHash(), Build.CURRENT.date(), Build.CURRENT.isSnapshot()); Version version = Version.CURRENT; - MainResponse response = new MainResponse("nodeName", version, new ClusterName("clusterName"), clusterUUID, build, true); + MainResponse response = new MainResponse("nodeName", version, new ClusterName("clusterName"), clusterUUID, build); XContentBuilder builder = XContentFactory.jsonBuilder(); response.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{" @@ -80,12 +80,11 @@ public void testToXContent() throws IOException { @Override protected MainResponse mutateInstance(MainResponse mutateInstance) { String clusterUuid = mutateInstance.getClusterUuid(); - boolean available = mutateInstance.isAvailable(); Build build = mutateInstance.getBuild(); Version version = mutateInstance.getVersion(); String nodeName = mutateInstance.getNodeName(); ClusterName clusterName = mutateInstance.getClusterName(); - switch (randomIntBetween(0, 5)) { + switch (randomIntBetween(0, 4)) { case 0: clusterUuid = clusterUuid + randomAlphaOfLength(5); break; @@ -93,19 +92,16 @@ protected MainResponse mutateInstance(MainResponse mutateInstance) { nodeName = nodeName + randomAlphaOfLength(5); break; case 2: - available = !available; - break; - case 3: // toggle the snapshot flag of the original Build parameter build = new Build(build.shortHash(), build.date(), !build.isSnapshot()); break; - case 4: + case 3: version = randomValueOtherThan(version, () -> VersionUtils.randomVersion(random())); break; - case 5: + case 4: clusterName = new ClusterName(clusterName + randomAlphaOfLength(5)); break; } - return new MainResponse(nodeName, version, clusterName, clusterUuid, build, available); + return new MainResponse(nodeName, version, clusterName, clusterUuid, build); } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java index 13e6de063e81e..656b020d53279 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java @@ -36,6 +36,7 @@ import java.util.HashMap; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class RestMainActionTests extends ESTestCase { @@ -44,12 +45,10 @@ public void testHeadResponse() throws Exception { final String nodeName = "node1"; final ClusterName clusterName = new ClusterName("cluster1"); final String clusterUUID = randomAlphaOfLengthBetween(10, 20); - final boolean available = randomBoolean(); - final RestStatus expectedStatus = available ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE; final Version version = Version.CURRENT; final Build build = Build.CURRENT; - final MainResponse mainResponse = new MainResponse(nodeName, version, clusterName, clusterUUID, build, available); + final MainResponse mainResponse = new MainResponse(nodeName, version, clusterName, clusterUUID, build); XContentBuilder builder = JsonXContent.contentBuilder(); RestRequest restRequest = new FakeRestRequest() { @Override @@ -60,7 +59,7 @@ public Method method() { BytesRestResponse response = RestMainAction.convertMainResponse(mainResponse, restRequest, builder); assertNotNull(response); - assertEquals(expectedStatus, response.status()); + assertThat(response.status(), equalTo(RestStatus.OK)); // the empty responses are handled in the HTTP layer so we do // not assert on them here @@ -70,13 +69,11 @@ public void testGetResponse() throws Exception { final String nodeName = "node1"; final ClusterName clusterName = new ClusterName("cluster1"); final String clusterUUID = randomAlphaOfLengthBetween(10, 20); - final boolean available = randomBoolean(); - final RestStatus expectedStatus = available ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE; final Version version = Version.CURRENT; final Build build = Build.CURRENT; final boolean prettyPrint = randomBoolean(); - final MainResponse mainResponse = new MainResponse(nodeName, version, clusterName, clusterUUID, build, available); + final MainResponse mainResponse = new MainResponse(nodeName, version, clusterName, clusterUUID, build); XContentBuilder builder = JsonXContent.contentBuilder(); Map params = new HashMap<>(); @@ -87,7 +84,7 @@ public void testGetResponse() throws Exception { BytesRestResponse response = RestMainAction.convertMainResponse(mainResponse, restRequest, builder); assertNotNull(response); - assertEquals(expectedStatus, response.status()); + assertThat(response.status(), equalTo(RestStatus.OK)); assertThat(response.content().length(), greaterThan(0)); XContentBuilder responseBuilder = JsonXContent.contentBuilder(); From 34a264c375fc48e251797c745561dcc9abf24349 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 14 Mar 2018 11:51:22 +0100 Subject: [PATCH 16/89] added docs for `wrapper` query. Closes #11591 --- .../QueryDSLDocumentationTests.java | 8 +++++++ .../query-dsl/special-queries.asciidoc | 5 ++++ .../java-api/query-dsl/wrapper-query.asciidoc | 11 +++++++++ .../high-level/query-builders.asciidoc | 1 + .../query-dsl/special-queries.asciidoc | 5 ++++ .../query-dsl/wrapper-query.asciidoc | 24 +++++++++++++++++++ 6 files changed, 54 insertions(+) create mode 100644 docs/java-api/query-dsl/wrapper-query.asciidoc create mode 100644 docs/reference/query-dsl/wrapper-query.asciidoc diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java index 7e84b266f7f03..789d237c5a3bc 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java @@ -76,6 +76,7 @@ import static org.elasticsearch.index.query.QueryBuilders.termsQuery; import static org.elasticsearch.index.query.QueryBuilders.typeQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; +import static org.elasticsearch.index.query.QueryBuilders.wrapperQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction; @@ -449,4 +450,11 @@ public void testWildcard() { "k?mch*"); // <2> // end::wildcard } + + public void testWrapper() { + // tag::wrapper + String query = "{\"term\": {\"user\": \"kimchy\"}}"; // <1> + wrapperQuery(query); + // end::wrapper + } } diff --git a/docs/java-api/query-dsl/special-queries.asciidoc b/docs/java-api/query-dsl/special-queries.asciidoc index 4e4d59a6d4aa5..bca3bde3b3f62 100644 --- a/docs/java-api/query-dsl/special-queries.asciidoc +++ b/docs/java-api/query-dsl/special-queries.asciidoc @@ -18,9 +18,14 @@ This query allows a script to act as a filter. Also see the This query finds percolator queries based on documents. +<>:: + +A query that accepts other queries as json or yaml string. + include::mlt-query.asciidoc[] include::script-query.asciidoc[] include::percolate-query.asciidoc[] +include::wrapper-query.asciidoc[] diff --git a/docs/java-api/query-dsl/wrapper-query.asciidoc b/docs/java-api/query-dsl/wrapper-query.asciidoc new file mode 100644 index 0000000000000..3bdf3cc69d30a --- /dev/null +++ b/docs/java-api/query-dsl/wrapper-query.asciidoc @@ -0,0 +1,11 @@ +[[java-query-dsl-wrapper-query]] +==== Wrapper Query + +See {ref}/query-dsl-wrapper-query.html[Wrapper Query] + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{query-dsl-test}[wrapper] +-------------------------------------------------- + +<1> query defined as query builder diff --git a/docs/java-rest/high-level/query-builders.asciidoc b/docs/java-rest/high-level/query-builders.asciidoc index 88204baa8745d..32a3b06505b1d 100644 --- a/docs/java-rest/high-level/query-builders.asciidoc +++ b/docs/java-rest/high-level/query-builders.asciidoc @@ -82,6 +82,7 @@ This page lists all the available search queries with their corresponding `Query | {ref}/query-dsl-mlt-query.html[More Like This] | {query-ref}/MoreLikeThisQueryBuilder.html[MoreLikeThisQueryBuilder] | {query-ref}/QueryBuilders.html#moreLikeThisQuery-org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item:A-[QueryBuilders.moreLikeThisQuery()] | {ref}/query-dsl-script-query.html[Script] | {query-ref}/ScriptQueryBuilder.html[ScriptQueryBuilder] | {query-ref}/QueryBuilders.html#scriptQuery-org.elasticsearch.script.Script-[QueryBuilders.scriptQuery()] | {ref}/query-dsl-percolate-query.html[Percolate] | {percolate-ref}/PercolateQueryBuilder.html[PercolateQueryBuilder] | +| {ref}/query-dsl-wrapper-query.html[Wrapper] | {query-ref}/WrapperQueryBuilder.html[WrapperQueryBuilder] | {query-ref}/QueryBuilders.html#wrapperQuery-java.lang.String-[QueryBuilders.wrapperQuery()] |====== ==== Span queries diff --git a/docs/reference/query-dsl/special-queries.asciidoc b/docs/reference/query-dsl/special-queries.asciidoc index 3e3c140d6f582..a062fa7ddb1fb 100644 --- a/docs/reference/query-dsl/special-queries.asciidoc +++ b/docs/reference/query-dsl/special-queries.asciidoc @@ -19,9 +19,14 @@ This query allows a script to act as a filter. Also see the This query finds queries that are stored as documents that match with the specified document. +<>:: + +A query that accepts other queries as json or yaml string. + include::mlt-query.asciidoc[] include::script-query.asciidoc[] include::percolate-query.asciidoc[] +include::wrapper-query.asciidoc[] diff --git a/docs/reference/query-dsl/wrapper-query.asciidoc b/docs/reference/query-dsl/wrapper-query.asciidoc new file mode 100644 index 0000000000000..4ffef5bfc6bcc --- /dev/null +++ b/docs/reference/query-dsl/wrapper-query.asciidoc @@ -0,0 +1,24 @@ +[[query-dsl-wrapper-query]] +=== Wrapper Query + +A query that accepts any other query as base64 encoded string. + +[source,js] +-------------------------------------------------- +GET /_search +{ + "query" : { + "wrapper": { + "query" : "eyJ0ZXJtIiA6IHsgInVzZXIiIDogIktpbWNoeSIgfX0=" <1> + } + } +} +-------------------------------------------------- +// CONSOLE + +<1> Base64 encoded string: `{"term" : { "user" : "Kimchy" }}` + +This query is more useful in the context of the Java high-level REST client or +transport client to also accept queries as json formatted string. +In these cases queries can be specified as a json or yaml formatted string or +as a query builder (which is a available in the Java high-level REST client). \ No newline at end of file From d3d7c0452430ea81e391e2af2f996a0c25d48d6b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 14 Mar 2018 07:45:32 -0400 Subject: [PATCH 17/89] Fix description of die with dignity plugin This commit adjusts the description of the die with dignity plugin which was leftover from a previous iteration on this work. --- qa/die-with-dignity/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/die-with-dignity/build.gradle b/qa/die-with-dignity/build.gradle index a3a9bd3da5800..49a4d3c50873e 100644 --- a/qa/die-with-dignity/build.gradle +++ b/qa/die-with-dignity/build.gradle @@ -20,7 +20,7 @@ apply plugin: 'elasticsearch.esplugin' esplugin { - description 'Out of memory plugin' + description 'Die with dignity plugin' classname 'org.elasticsearch.DieWithDignityPlugin' } From 90469123b3bbecb1dca76ab8720f362580962d50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 14 Mar 2018 13:44:12 +0100 Subject: [PATCH 18/89] Fix eclipse compile issues (#29056) Eclipse Oxygen doesn't seem to be able to infer the correct type arguments for Arrays::asList in the given test context. Adding cast to make this more explicit. --- .../org/elasticsearch/core/internal/io/IOUtilsTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java b/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java index c133a9ddc1f3c..ee5af323b5219 100644 --- a/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java +++ b/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java @@ -53,7 +53,7 @@ public void testCloseArray() throws IOException { } public void testCloseIterable() throws IOException { - runTestClose(Arrays::asList, IOUtils::close); + runTestClose((Function>) Arrays::asList, IOUtils::close); } private void runTestClose(final Function function, final CheckedConsumer close) throws IOException { @@ -74,7 +74,7 @@ public void testCloseArrayWithIOExceptions() throws IOException { } public void testCloseIterableWithIOExceptions() throws IOException { - runTestCloseWithIOExceptions(Arrays::asList, IOUtils::close); + runTestCloseWithIOExceptions((Function>) Arrays::asList, IOUtils::close); } private void runTestCloseWithIOExceptions( @@ -113,7 +113,7 @@ public void testDeleteFilesIgnoringExceptionsArray() throws IOException { } public void testDeleteFilesIgnoringExceptionsIterable() throws IOException { - runDeleteFilesIgnoringExceptionsTest(Arrays::asList, IOUtils::deleteFilesIgnoringExceptions); + runDeleteFilesIgnoringExceptionsTest((Function>) Arrays::asList, IOUtils::deleteFilesIgnoringExceptions); } private void runDeleteFilesIgnoringExceptionsTest( From 42fe66162ee97fa0cd78fa64c4bcdd1b127d20ad Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 14 Mar 2018 07:12:15 -0700 Subject: [PATCH 19/89] Fix Parsing Bug with Update By Query for Stored Scripts (#29039) This changes the parsing logic for stored scripts in update by query to match the parsing logic for scripts in general Elasticsearch. Closes #28002 --- .../reindex/RestUpdateByQueryAction.java | 12 ++- .../test/update_by_query/10_script.yml | 81 +++++++++++++++++++ 2 files changed, 91 insertions(+), 2 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 8b898244c0750..8f09afbb17c6c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -86,7 +86,7 @@ private static Script parseScript(Object config) { Map configMap = (Map) config; String script = null; ScriptType type = null; - String lang = DEFAULT_SCRIPT_LANG; + String lang = null; Map params = Collections.emptyMap(); for (Iterator> itr = configMap.entrySet().iterator(); itr.hasNext();) { Map.Entry entry = itr.next(); @@ -126,7 +126,15 @@ private static Script parseScript(Object config) { } assert type != null : "if script is not null, type should definitely not be null"; - return new Script(type, lang, script, params); + if (type == ScriptType.STORED) { + if (lang != null) { + throw new IllegalArgumentException("lang cannot be specified for stored scripts"); + } + + return new Script(type, null, script, null, params); + } else { + return new Script(type, lang == null ? DEFAULT_SCRIPT_LANG : lang, script, params); + } } else { throw new IllegalArgumentException("Script value should be a String or a Map"); } diff --git a/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml index aee2b2fb05759..dcd5977fb12bc 100644 --- a/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml +++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml @@ -340,3 +340,84 @@ source: if (ctx._source.user == "kimchy") {ctx.op = "index"} else {ctx.op = "junk"} - match: { error.reason: 'Operation type [junk] not allowed, only [noop, index, delete] are allowed' } + +--- +"Update all docs with one deletion and one noop using a stored script": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "level": 9, "last_updated": "2016-01-01T12:10:30Z" } + - do: + index: + index: twitter + type: tweet + id: 2 + body: { "level": 10, "last_updated": "2016-01-01T12:10:30Z" } + - do: + index: + index: twitter + type: tweet + id: 3 + body: { "level": 11, "last_updated": "2016-01-01T12:10:30Z" } + - do: + index: + index: twitter + type: tweet + id: 4 + body: { "level": 12, "last_updated": "2016-01-01T12:10:30Z" } + - do: + indices.refresh: {} + - do: + put_script: + id: "my_update_script" + body: { "script": {"lang": "painless", + "source": "int choice = ctx._source.level % 3; + if (choice == 0) { + ctx._source.last_updated = '2016-01-02T00:00:00Z'; + } else if (choice == 1) { + ctx.op = 'noop'; + } else { + ctx.op = 'delete'; + }" } } + - match: { acknowledged: true } + + - do: + update_by_query: + refresh: true + index: twitter + body: + script: + id: "my_update_script" + + - match: {updated: 2} + - match: {deleted: 1} + - match: {noops: 1} + + - do: + search: + index: twitter + body: + query: + match: + last_updated: "2016-01-02T00:00:00Z" + - match: { hits.total: 2 } + + - do: + search: + index: twitter + body: + query: + match: + last_updated: "2016-01-01T12:10:30Z" + - match: { hits.total: 1 } + + - do: + search: + index: twitter + body: + query: + term: + level: 11 + - match: { hits.total: 0 } From 3e0e6444cf33d9145d383019e41f2b9188f058ad Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 14 Mar 2018 11:39:35 -0400 Subject: [PATCH 20/89] Mark field_data as deprecated in /_cache/clear It was deprecated in #28943. --- .../main/resources/rest-api-spec/api/indices.clear_cache.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json index 1f24199fad468..1523c722da31d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json @@ -14,7 +14,7 @@ "params": { "field_data": { "type" : "boolean", - "description" : "Clear field data" + "description" : "Clear field data. This is deprecated. Prefer `fielddata`." }, "fielddata": { "type" : "boolean", @@ -22,7 +22,7 @@ }, "fields": { "type" : "list", - "description" : "A comma-separated list of fields to clear when using the `field_data` parameter (default: all)" + "description" : "A comma-separated list of fields to clear when using the `fielddata` parameter (default: all)" }, "query": { "type" : "boolean", From 5bf92ca3b3f5aa4957fb9fb33ae2cc4140f1c10b Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 14 Mar 2018 15:43:53 +0000 Subject: [PATCH 21/89] Enforce that java.io.tmpdir exists on startup (#28217) If the default java.io.tmpdir is used then the startup script creates it, but if a custom java.io.tmpdir is used then the user must ensure it exists before running Elasticsearch. If they forget then it can cause errors that are hard to understand, so this change adds an explicit check early in the bootstrap and reports a clear error if java.io.tmpdir is not an accessible directory. --- .../bootstrap/Elasticsearch.java | 7 ++++++ .../org/elasticsearch/env/Environment.java | 20 ++++++++++++++- .../elasticsearch/env/EnvironmentTests.java | 25 +++++++++++++++++-- 3 files changed, 49 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 1538f0cdf0003..a0646288b1ad0 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -108,6 +108,13 @@ protected void execute(Terminal terminal, OptionSet options, Environment env) th final Path pidFile = pidfileOption.value(options); final boolean quiet = options.has(quietOption); + // a misconfigured java.io.tmpdir can cause hard-to-diagnose problems later, so reject it immediately + try { + env.validateTmpFile(); + } catch (IOException e) { + throw new UserException(ExitCodes.CONFIG, e.getMessage()); + } + try { init(daemonize, pidFile, quiet, env); } catch (NodeValidationException e) { diff --git a/server/src/main/java/org/elasticsearch/env/Environment.java b/server/src/main/java/org/elasticsearch/env/Environment.java index 2433ccf6e8ede..1f4940007afda 100644 --- a/server/src/main/java/org/elasticsearch/env/Environment.java +++ b/server/src/main/java/org/elasticsearch/env/Environment.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; @@ -87,9 +88,14 @@ public class Environment { private final Path pidFile; /** Path to the temporary file directory used by the JDK */ - private final Path tmpFile = PathUtils.get(System.getProperty("java.io.tmpdir")); + private final Path tmpFile; public Environment(final Settings settings, final Path configPath) { + this(settings, configPath, PathUtils.get(System.getProperty("java.io.tmpdir"))); + } + + // Should only be called directly by this class's unit tests + Environment(final Settings settings, final Path configPath, final Path tmpPath) { final Path homeFile; if (PATH_HOME_SETTING.exists(settings)) { homeFile = PathUtils.get(PATH_HOME_SETTING.get(settings)).normalize(); @@ -103,6 +109,8 @@ public Environment(final Settings settings, final Path configPath) { configFile = homeFile.resolve("config"); } + tmpFile = Objects.requireNonNull(tmpPath); + pluginsFile = homeFile.resolve("plugins"); List dataPaths = PATH_DATA_SETTING.get(settings); @@ -302,6 +310,16 @@ public Path tmpFile() { return tmpFile; } + /** Ensure the configured temp directory is a valid directory */ + public void validateTmpFile() throws IOException { + if (Files.exists(tmpFile) == false) { + throw new FileNotFoundException("Temporary file directory [" + tmpFile + "] does not exist or is not accessible"); + } + if (Files.isDirectory(tmpFile) == false) { + throw new IOException("Configured temporary file directory [" + tmpFile + "] is not a directory"); + } + } + public static FileStore getFileStore(final Path path) throws IOException { return new ESFileStore(Files.getFileStore(path)); } diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 5ca3f4dc6a591..5ada31b612941 100644 --- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; import java.nio.file.Path; @@ -28,6 +29,7 @@ import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -37,11 +39,11 @@ * Simple unit-tests for Environment.java */ public class EnvironmentTests extends ESTestCase { - public Environment newEnvironment() throws IOException { + public Environment newEnvironment() { return newEnvironment(Settings.EMPTY); } - public Environment newEnvironment(Settings settings) throws IOException { + public Environment newEnvironment(Settings settings) { Settings build = Settings.builder() .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) @@ -146,4 +148,23 @@ public void testNodeDoesNotRequireLocalStorageButHasPathData() { assertThat(e, hasToString(containsString("node does not require local storage yet path.data is set to [" + pathData + "]"))); } + public void testNonExistentTempPathValidation() { + Settings build = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); + Environment environment = new Environment(build, null, createTempDir().resolve("this_does_not_exist")); + FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpFile); + assertThat(e.getMessage(), startsWith("Temporary file directory [")); + assertThat(e.getMessage(), endsWith("this_does_not_exist] does not exist or is not accessible")); + } + + public void testTempPathValidationWhenRegularFile() throws IOException { + Settings build = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); + Environment environment = new Environment(build, null, createTempFile("something", ".test")); + IOException e = expectThrows(IOException.class, environment::validateTmpFile); + assertThat(e.getMessage(), startsWith("Configured temporary file directory [")); + assertThat(e.getMessage(), endsWith(".test] is not a directory")); + } } From cb9d10f971e800eed6f81ed8774bb1a63a39c167 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 14 Mar 2018 15:47:18 +0000 Subject: [PATCH 22/89] Protect against NPE in RestNodesAction (#29059) * Protect against NPE in RestNodesAction --- .../rest/action/cat/RestNodesAction.java | 28 +++++--- .../rest/action/cat/RestNodesActionTests.java | 69 +++++++++++++++++++ 2 files changed, 88 insertions(+), 9 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 0a16193894466..c232ec25322ff 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -238,7 +238,7 @@ protected Table getTableWithHeader(final RestRequest request) { return table; } - private Table buildTable(boolean fullId, RestRequest req, ClusterStateResponse state, NodesInfoResponse nodesInfo, + Table buildTable(boolean fullId, RestRequest req, ClusterStateResponse state, NodesInfoResponse nodesInfo, NodesStatsResponse nodesStats) { DiscoveryNodes nodes = state.getState().nodes(); @@ -273,14 +273,24 @@ private Table buildTable(boolean fullId, RestRequest req, ClusterStateResponse s table.addCell(node.getVersion().toString()); table.addCell(info == null ? null : info.getBuild().shortHash()); table.addCell(jvmInfo == null ? null : jvmInfo.version()); - - long diskTotal = fsInfo.getTotal().getTotal().getBytes(); - long diskUsed = diskTotal - fsInfo.getTotal().getAvailable().getBytes(); - double diskUsedRatio = diskTotal == 0 ? 1.0 : (double) diskUsed / diskTotal; - table.addCell(fsInfo == null ? null : fsInfo.getTotal().getTotal()); - table.addCell(fsInfo == null ? null : new ByteSizeValue(diskUsed)); - table.addCell(fsInfo == null ? null : fsInfo.getTotal().getAvailable()); - table.addCell(fsInfo == null ? null : String.format(Locale.ROOT, "%.2f", 100.0 * diskUsedRatio)); + + + ByteSizeValue diskTotal = null; + ByteSizeValue diskUsed = null; + ByteSizeValue diskAvailable = null; + String diskUsedPercent = null; + if (fsInfo != null) { + diskTotal = fsInfo.getTotal().getTotal(); + diskAvailable = fsInfo.getTotal().getAvailable(); + diskUsed = new ByteSizeValue(diskTotal.getBytes() - diskAvailable.getBytes()); + + double diskUsedRatio = diskTotal.getBytes() == 0 ? 1.0 : (double) diskUsed.getBytes() / diskTotal.getBytes(); + diskUsedPercent = String.format(Locale.ROOT, "%.2f", 100.0 * diskUsedRatio); + } + table.addCell(diskTotal); + table.addCell(diskUsed); + table.addCell(diskAvailable); + table.addCell(diskUsedPercent); table.addCell(jvmStats == null ? null : jvmStats.getMem().getHeapUsed()); table.addCell(jvmStats == null ? null : jvmStats.getMem().getHeapUsedPercent()); diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java new file mode 100644 index 0000000000000..32993a6b7c720 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.cat; + +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.usage.UsageService; +import org.junit.Before; + +import java.util.Collections; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class RestNodesActionTests extends ESTestCase { + + private RestNodesAction action; + + @Before + public void setUpAction() { + UsageService usageService = new UsageService(Settings.EMPTY); + action = new RestNodesAction(Settings.EMPTY, + new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null, usageService)); + } + + public void testBuildTableDoesNotThrowGivenNullNodeInfoAndStats() { + ClusterName clusterName = new ClusterName("cluster-1"); + DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); + builder.add(new DiscoveryNode("node-1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT)); + DiscoveryNodes discoveryNodes = builder.build(); + ClusterState clusterState = mock(ClusterState.class); + when(clusterState.nodes()).thenReturn(discoveryNodes); + + ClusterStateResponse clusterStateResponse = new ClusterStateResponse(clusterName, clusterState, randomNonNegativeLong()); + NodesInfoResponse nodesInfoResponse = new NodesInfoResponse(clusterName, Collections.emptyList(), Collections.emptyList()); + NodesStatsResponse nodesStatsResponse = new NodesStatsResponse(clusterName, Collections.emptyList(), Collections.emptyList()); + + action.buildTable(false, new FakeRestRequest(), clusterStateResponse, nodesInfoResponse, nodesStatsResponse); + } +} From ab2d74c0014b658f2b63e78055616a38e6cc8dd2 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 14 Mar 2018 17:16:48 +0100 Subject: [PATCH 23/89] Removed outdated docs about shading Elasticsearch --- docs/java-api/index.asciidoc | 56 ------------------------------------ 1 file changed, 56 deletions(-) diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index f9b0a7e58b5b9..002804cf6170e 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -130,62 +130,6 @@ and add it as a dependency. As an example, we will use the `slf4j-simple` logger -------------------------------------------------- - -== Dealing with JAR dependency conflicts - -If you want to use Elasticsearch in your Java application, you may have to deal with version conflicts with third party -dependencies like Guava and Joda. For instance, perhaps Elasticsearch uses Joda 2.8, while your code uses Joda 2.1. - -You have two choices: - -* The simplest solution is to upgrade. Newer module versions are likely to have fixed old bugs. -The further behind you fall, the harder it will be to upgrade later. Of course, it is possible that you are using a -third party dependency that in turn depends on an outdated version of a package, which prevents you from upgrading. - -* The second option is to relocate the troublesome dependencies and to shade them either with your own application -or with Elasticsearch and any plugins needed by the Elasticsearch client. - -The https://www.elastic.co/blog/to-shade-or-not-to-shade["To shade or not to shade" blog post] describes -all the steps for doing so. - -== Embedding jar with dependencies - -If you want to create a single jar containing your application and all dependencies, you should not -use `maven-assembly-plugin` for that because it can not deal with `META-INF/services` structure which is -required by Lucene jars. - -Instead, you can use `maven-shade-plugin` and configure it as follow: - -[source,xml] --------------------------------------------------- - - org.apache.maven.plugins - maven-shade-plugin - 2.4.1 - - - package - shade - - - - - - - - --------------------------------------------------- - -Note that if you have a `main` class you want to automatically call when running `java -jar yourjar.jar`, just add -it to the `transformers`: - -[source,xml] --------------------------------------------------- - - org.elasticsearch.demo.Generate - --------------------------------------------------- - :client-tests: {docdir}/../../server/src/test/java/org/elasticsearch/client/documentation include::client.asciidoc[] From 29a728526ed11e4681c63b12ad5876d7d1685053 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 14 Mar 2018 13:02:55 -0400 Subject: [PATCH 24/89] Fix typo in terminate after API docs This commit fixes a minor typo in the terminate after Java API docs. Relates #29065 --- docs/java-api/search.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/java-api/search.asciidoc b/docs/java-api/search.asciidoc index dfc2767520237..78fb4539641d5 100644 --- a/docs/java-api/search.asciidoc +++ b/docs/java-api/search.asciidoc @@ -128,7 +128,7 @@ documentation for details. The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. If set, you will be able to check if the operation terminated early by asking for `isTerminatedEarly()` in the -`SearchResponse` onject: +`SearchResponse` object: [source,java] -------------------------------------------------- From 87553bba16a93d6a9da11c2cceaefd3d526170f7 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 14 Mar 2018 19:07:20 +0100 Subject: [PATCH 25/89] Add ingest-attachment support for per document `indexed_chars` limit (#28977) We today support a global `indexed_chars` processor parameter. But in some cases, users would like to set this limit depending on the document itself. It used to be supported in mapper-attachments plugin by extracting the limit value from a meta field in the document sent to indexation process. We add an option which reads this limit value from the document itself by adding a setting named `indexed_chars_field`. Which allows running: ``` PUT _ingest/pipeline/attachment { "description" : "Extract attachment information. Used to parse pdf and office files", "processors" : [ { "attachment" : { "field" : "data", "indexed_chars_field" : "size" } } ] } ``` Then index either: ``` PUT index/doc/1?pipeline=attachment { "data": "BASE64" } ``` Which will use the default value (or the one defined by `indexed_chars`) Or ``` PUT index/doc/2?pipeline=attachment { "data": "BASE64", "size": 1000 } ``` Closes #28942 --- docs/plugins/ingest-attachment.asciidoc | 122 +++++++++++++++++- .../attachment/AttachmentProcessor.java | 25 +++- .../attachment/AttachmentProcessorTests.java | 62 +++++++-- .../20_attachment_processor.yml | 74 +++++++++++ 4 files changed, 264 insertions(+), 19 deletions(-) diff --git a/docs/plugins/ingest-attachment.asciidoc b/docs/plugins/ingest-attachment.asciidoc index 443d1fb578a6d..2f9564294d0b8 100644 --- a/docs/plugins/ingest-attachment.asciidoc +++ b/docs/plugins/ingest-attachment.asciidoc @@ -25,6 +25,7 @@ include::install_remove.asciidoc[] | `field` | yes | - | The field to get the base64 encoded field from | `target_field` | no | attachment | The field that will hold the attachment information | `indexed_chars` | no | 100000 | The number of chars being used for extraction to prevent huge fields. Use `-1` for no limit. +| `indexed_chars_field` | no | `null` | Field name from which you can overwrite the number of chars being used for extraction. See `indexed_chars`. | `properties` | no | all properties | Array of properties to select to be stored. Can be `content`, `title`, `name`, `author`, `keywords`, `date`, `content_type`, `content_length`, `language` | `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== @@ -44,11 +45,11 @@ PUT _ingest/pipeline/attachment } ] } -PUT my_index/my_type/my_id?pipeline=attachment +PUT my_index/_doc/my_id?pipeline=attachment { "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=" } -GET my_index/my_type/my_id +GET my_index/_doc/my_id -------------------------------------------------- // CONSOLE @@ -59,7 +60,7 @@ Returns this: { "found": true, "_index": "my_index", - "_type": "my_type", + "_type": "_doc", "_id": "my_id", "_version": 1, "_source": { @@ -99,6 +100,115 @@ NOTE: Extracting contents from binary data is a resource intensive operation and consumes a lot of resources. It is highly recommended to run pipelines using this processor in a dedicated ingest node. +[[ingest-attachment-extracted-chars]] +==== Limit the number of extracted chars + +To prevent extracting too many chars and overload the node memory, the number of chars being used for extraction +is limited by default to `100000`. You can change this value by setting `indexed_chars`. Use `-1` for no limit but +ensure when setting this that your node will have enough HEAP to extract the content of very big documents. + +You can also define this limit per document by extracting from a given field the limit to set. If the document +has that field, it will overwrite the `indexed_chars` setting. To set this field, define the `indexed_chars_field` +setting. + +For example: + +[source,js] +-------------------------------------------------- +PUT _ingest/pipeline/attachment +{ + "description" : "Extract attachment information", + "processors" : [ + { + "attachment" : { + "field" : "data", + "indexed_chars" : 11, + "indexed_chars_field" : "max_size" + } + } + ] +} +PUT my_index/_doc/my_id?pipeline=attachment +{ + "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=" +} +GET my_index/_doc/my_id +-------------------------------------------------- +// CONSOLE + +Returns this: + +[source,js] +-------------------------------------------------- +{ + "found": true, + "_index": "my_index", + "_type": "_doc", + "_id": "my_id", + "_version": 1, + "_source": { + "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", + "attachment": { + "content_type": "application/rtf", + "language": "sl", + "content": "Lorem ipsum", + "content_length": 11 + } + } +} +-------------------------------------------------- +// TESTRESPONSE + + +[source,js] +-------------------------------------------------- +PUT _ingest/pipeline/attachment +{ + "description" : "Extract attachment information", + "processors" : [ + { + "attachment" : { + "field" : "data", + "indexed_chars" : 11, + "indexed_chars_field" : "max_size" + } + } + ] +} +PUT my_index/_doc/my_id_2?pipeline=attachment +{ + "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", + "max_size": 5 +} +GET my_index/_doc/my_id_2 +-------------------------------------------------- +// CONSOLE + +Returns this: + +[source,js] +-------------------------------------------------- +{ + "found": true, + "_index": "my_index", + "_type": "_doc", + "_id": "my_id_2", + "_version": 1, + "_source": { + "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", + "max_size": 5, + "attachment": { + "content_type": "application/rtf", + "language": "ro", + "content": "Lorem", + "content_length": 5 + } + } +} +-------------------------------------------------- +// TESTRESPONSE + + [[ingest-attachment-with-arrays]] ==== Using the Attachment Processor with arrays @@ -150,7 +260,7 @@ PUT _ingest/pipeline/attachment } ] } -PUT my_index/my_type/my_id?pipeline=attachment +PUT my_index/_doc/my_id?pipeline=attachment { "attachments" : [ { @@ -163,7 +273,7 @@ PUT my_index/my_type/my_id?pipeline=attachment } ] } -GET my_index/my_type/my_id +GET my_index/_doc/my_id -------------------------------------------------- // CONSOLE @@ -172,7 +282,7 @@ Returns this: -------------------------------------------------- { "_index" : "my_index", - "_type" : "my_type", + "_type" : "_doc", "_id" : "my_id", "_version" : 1, "found" : true, diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java index b23c627290eb0..beaec88e97d1a 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java @@ -42,6 +42,7 @@ import static org.elasticsearch.ingest.ConfigurationUtils.readBooleanProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readIntProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; +import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalStringProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; public final class AttachmentProcessor extends AbstractProcessor { @@ -55,15 +56,17 @@ public final class AttachmentProcessor extends AbstractProcessor { private final Set properties; private final int indexedChars; private final boolean ignoreMissing; + private final String indexedCharsField; AttachmentProcessor(String tag, String field, String targetField, Set properties, - int indexedChars, boolean ignoreMissing) throws IOException { + int indexedChars, boolean ignoreMissing, String indexedCharsField) { super(tag); this.field = field; this.targetField = targetField; this.properties = properties; this.indexedChars = indexedChars; this.ignoreMissing = ignoreMissing; + this.indexedCharsField = indexedCharsField; } boolean isIgnoreMissing() { @@ -82,6 +85,17 @@ public void execute(IngestDocument ingestDocument) { throw new IllegalArgumentException("field [" + field + "] is null, cannot parse."); } + Integer indexedChars = this.indexedChars; + + if (indexedCharsField != null) { + // If the user provided the number of characters to be extracted as part of the document, we use it + indexedChars = ingestDocument.getFieldValue(indexedCharsField, Integer.class, true); + if (indexedChars == null) { + // If the field does not exist we fall back to the global limit + indexedChars = this.indexedChars; + } + } + Metadata metadata = new Metadata(); String parsedContent = ""; try { @@ -183,14 +197,15 @@ public AttachmentProcessor create(Map registry, Strin Map config) throws Exception { String field = readStringProperty(TYPE, processorTag, config, "field"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "attachment"); - List properyNames = readOptionalList(TYPE, processorTag, config, "properties"); + List propertyNames = readOptionalList(TYPE, processorTag, config, "properties"); int indexedChars = readIntProperty(TYPE, processorTag, config, "indexed_chars", NUMBER_OF_CHARS_INDEXED); boolean ignoreMissing = readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); + String indexedCharsField = readOptionalStringProperty(TYPE, processorTag, config, "indexed_chars_field"); final Set properties; - if (properyNames != null) { + if (propertyNames != null) { properties = EnumSet.noneOf(Property.class); - for (String fieldName : properyNames) { + for (String fieldName : propertyNames) { try { properties.add(Property.parse(fieldName)); } catch (Exception e) { @@ -202,7 +217,7 @@ public AttachmentProcessor create(Map registry, Strin properties = DEFAULT_PROPERTIES; } - return new AttachmentProcessor(processorTag, field, targetField, properties, indexedChars, ignoreMissing); + return new AttachmentProcessor(processorTag, field, targetField, properties, indexedChars, ignoreMissing, indexedCharsField); } } diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 07e369985321a..b7bfd199c70c0 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -54,9 +54,9 @@ public class AttachmentProcessorTests extends ESTestCase { private AttachmentProcessor processor; @Before - public void createStandardProcessor() throws IOException { + public void createStandardProcessor() { processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", - "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 10000, false); + "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 10000, false, null); } public void testEnglishTextDocument() throws Exception { @@ -89,7 +89,7 @@ public void testHtmlDocumentWithRandomFields() throws Exception { selectedProperties.add(AttachmentProcessor.Property.DATE); } processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", - "target_field", selectedProperties, 10000, false); + "target_field", selectedProperties, 10000, false, null); Map attachmentData = parseDocument("htmlWithEmptyDateMeta.html", processor); assertThat(attachmentData.keySet(), hasSize(selectedFieldNames.length)); @@ -242,7 +242,7 @@ public void testNullValueWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, true); + Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, true, null); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -250,7 +250,7 @@ public void testNullValueWithIgnoreMissing() throws Exception { public void testNonExistentWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, true); + Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, true, null); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -259,7 +259,7 @@ public void testNullWithoutIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, false); + Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, false, null); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [source_field] is null, cannot parse.")); } @@ -267,14 +267,20 @@ public void testNullWithoutIgnoreMissing() throws Exception { public void testNonExistentWithoutIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, false); + Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, false, null); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [source_field] not present as part of path [source_field]")); } private Map parseDocument(String file, AttachmentProcessor processor) throws Exception { + return parseDocument(file, processor, new HashMap<>()); + } + + private Map parseDocument(String file, AttachmentProcessor processor, Map optionalFields) + throws Exception { Map document = new HashMap<>(); document.put("source_field", getAsBase64(file)); + document.putAll(optionalFields); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -284,7 +290,47 @@ private Map parseDocument(String file, AttachmentProcessor proce return attachmentData; } - protected String getAsBase64(String filename) throws Exception { + public void testIndexedChars() throws Exception { + processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", + "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 19, false, null); + + Map attachmentData = parseDocument("text-in-english.txt", processor); + + assertThat(attachmentData.keySet(), containsInAnyOrder("language", "content", "content_type", "content_length")); + assertThat(attachmentData.get("language"), is("en")); + assertThat(attachmentData.get("content"), is("\"God Save the Queen")); + assertThat(attachmentData.get("content_type").toString(), containsString("text/plain")); + assertThat(attachmentData.get("content_length"), is(19L)); + + processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", + "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 19, false, "max_length"); + + attachmentData = parseDocument("text-in-english.txt", processor); + + assertThat(attachmentData.keySet(), containsInAnyOrder("language", "content", "content_type", "content_length")); + assertThat(attachmentData.get("language"), is("en")); + assertThat(attachmentData.get("content"), is("\"God Save the Queen")); + assertThat(attachmentData.get("content_type").toString(), containsString("text/plain")); + assertThat(attachmentData.get("content_length"), is(19L)); + + attachmentData = parseDocument("text-in-english.txt", processor, Collections.singletonMap("max_length", 10)); + + assertThat(attachmentData.keySet(), containsInAnyOrder("language", "content", "content_type", "content_length")); + assertThat(attachmentData.get("language"), is("sk")); + assertThat(attachmentData.get("content"), is("\"God Save")); + assertThat(attachmentData.get("content_type").toString(), containsString("text/plain")); + assertThat(attachmentData.get("content_length"), is(10L)); + + attachmentData = parseDocument("text-in-english.txt", processor, Collections.singletonMap("max_length", 100)); + + assertThat(attachmentData.keySet(), containsInAnyOrder("language", "content", "content_type", "content_length")); + assertThat(attachmentData.get("language"), is("en")); + assertThat(attachmentData.get("content"), is("\"God Save the Queen\" (alternatively \"God Save the King\"")); + assertThat(attachmentData.get("content_type").toString(), containsString("text/plain")); + assertThat(attachmentData.get("content_length"), is(56L)); + } + + private String getAsBase64(String filename) throws Exception { String path = "/org/elasticsearch/ingest/attachment/test/sample-files/" + filename; try (InputStream is = AttachmentProcessorTests.class.getResourceAsStream(path)) { byte bytes[] = IOUtils.toByteArray(is); diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml index cab1bfb591f7d..6a22071ba3829 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml @@ -112,3 +112,77 @@ - match: { _source.attachment.content: "This is an english text to tes" } - match: { _source.attachment.language: "en" } - match: { _source.attachment.content_length: 30 } + +--- +"Test indexed chars are configurable per document": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "attachment" : { + "field" : "field1", + "indexed_chars": 30, + "indexed_chars_field": "max_size" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } + + - do: + get: + index: test + type: test + id: 1 + - length: { _source.attachment: 4 } + - match: { _source.attachment.content: "This is an english text to tes" } + - match: { _source.attachment.language: "en" } + - match: { _source.attachment.content_length: 30 } + + - do: + index: + index: test + type: test + id: 2 + pipeline: "my_pipeline" + body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=", "max_size": 18 } + + - do: + get: + index: test + type: test + id: 2 + - length: { _source.attachment: 4 } + - match: { _source.attachment.content: "This is an english" } + - match: { _source.attachment.language: "en" } + - match: { _source.attachment.content_length: 18 } + + - do: + index: + index: test + type: test + id: 3 + pipeline: "my_pipeline" + body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=", "max_size": 100000000 } + + - do: + get: + index: test + type: test + id: 3 + - length: { _source.attachment: 4 } + - match: { _source.attachment.content: "This is an english text to test if the pipeline works" } + - match: { _source.attachment.language: "en" } + - match: { _source.attachment.content_length: 54 } From e312ac610aa805b0d007d6b6856bc0944747332c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 14 Mar 2018 14:25:14 -0400 Subject: [PATCH 26/89] Cleanup exception handling in IOUtils (#29069) When we copied IOUtils into the Elasticsearch codebase from Lucene, we brought with it its handling of throwables which are out of whack with how we handle throwables in our codebase. This commit modifies our copy of IOUtils to be consistent with how we handle throwables today: do not catch them. We take advantage of this cleanup to simplify IOUtils. --- .../core/internal/io/IOUtils.java | 77 ++++--------------- 1 file changed, 15 insertions(+), 62 deletions(-) diff --git a/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java index 7507327199baf..eaa4df768cd71 100644 --- a/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java +++ b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java @@ -58,23 +58,29 @@ public static void close(final Closeable... objects) throws IOException { * @see #close(Closeable...) */ public static void close(final Iterable objects) throws IOException { - Throwable th = null; + Exception ex = null; for (final Closeable object : objects) { try { if (object != null) { object.close(); } - } catch (final Throwable t) { - addSuppressed(th, t); - if (th == null) { - th = t; + } catch (final IOException | RuntimeException e) { + if (ex == null) { + ex = e; + } else { + ex.addSuppressed(e); } } } - if (th != null) { - throw rethrowAlways(th); + if (ex != null) { + if (ex instanceof IOException) { + throw (IOException) ex; + } else { + // since we only assigned an IOException or a RuntimeException to ex above, in this case ex must be a RuntimeException + throw (RuntimeException) ex; + } } } @@ -101,65 +107,12 @@ public static void closeWhileHandlingException(final Iterable - * This method never returns any value, even though it declares a return value of type {@link Error}. The return - * value declaration is very useful to let the compiler know that the code path following the invocation of this method is unreachable. - * So in most cases the invocation of this method will be guarded by an {@code if} and used together with a {@code throw} statement, as - * in: - *

- *

{@code
-     *   if (t != null) throw IOUtils.rethrowAlways(t)
-     * }
-     * 
- * - * @param th the throwable to rethrow; must not be null - * @return this method always results in an exception, it never returns any value; see method documentation for details and usage - * example - * @throws IOException if the argument was an instance of {@link IOException} - * @throws RuntimeException with the {@link RuntimeException#getCause()} set to the argument, if it was not an instance of - * {@link IOException} - */ - private static Error rethrowAlways(final Throwable th) throws IOException, RuntimeException { - if (th == null) { - throw new AssertionError("rethrow argument must not be null."); - } - - if (th instanceof IOException) { - throw (IOException) th; - } - - if (th instanceof RuntimeException) { - throw (RuntimeException) th; - } - - if (th instanceof Error) { - throw (Error) th; - } - - throw new RuntimeException(th); - } - /** * Deletes all given files, suppressing all thrown {@link IOException}s. Some of the files may be null, if so they are ignored. * @@ -180,7 +133,7 @@ public static void deleteFilesIgnoringExceptions(final Collection Date: Wed, 14 Mar 2018 14:27:01 -0400 Subject: [PATCH 27/89] Docs: HighLevelRestClient#ping (#29070) Add documentation for `HighLevelRestClient#ping`. Relates to #28389 --- ...java => MiscellaneousDocumentationIT.java} | 24 +++++++++---------- .../high-level/getting-started.asciidoc | 4 ++-- docs/java-rest/high-level/migration.asciidoc | 6 ++--- .../high-level/miscellaneous/main.asciidoc | 4 ++-- .../high-level/miscellaneous/ping.asciidoc | 13 ++++++++++ .../high-level/supported-apis.asciidoc | 2 ++ 6 files changed, 32 insertions(+), 21 deletions(-) rename client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/{MainDocumentationIT.java => MiscellaneousDocumentationIT.java} (81%) create mode 100644 docs/java-rest/high-level/miscellaneous/ping.asciidoc diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java similarity index 81% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index 72986d44f97cd..504ea797c35f6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -31,20 +31,10 @@ import java.io.IOException; /** - * This class is used to generate the Java Main API documentation. - * You need to wrap your code between two tags like: - * // tag::example[] - * // end::example[] - * - * Where example is your tag name. - * - * Then in the documentation, you can extract what is between tag and end tags with - * ["source","java",subs="attributes,callouts,macros"] - * -------------------------------------------------- - * include-tagged::{doc-tests}/MainDocumentationIT.java[example] - * -------------------------------------------------- + * Documentation for miscellaneous APIs in the high level java client. + * Code wrapped in {@code tag} and {@code end} tags is included in the docs. */ -public class MainDocumentationIT extends ESRestHighLevelClientTestCase { +public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase { public void testMain() throws IOException { RestHighLevelClient client = highLevelClient(); @@ -67,6 +57,14 @@ public void testMain() throws IOException { } } + public void testPing() throws IOException { + RestHighLevelClient client = highLevelClient(); + //tag::ping-execute + boolean response = client.ping(); + //end::ping-execute + assertTrue(response); + } + public void testInitializationFromClientBuilder() throws IOException { //tag::rest-high-level-client-init RestHighLevelClient client = new RestHighLevelClient( diff --git a/docs/java-rest/high-level/getting-started.asciidoc b/docs/java-rest/high-level/getting-started.asciidoc index ba5c7ba273eec..14a5058eb7272 100644 --- a/docs/java-rest/high-level/getting-started.asciidoc +++ b/docs/java-rest/high-level/getting-started.asciidoc @@ -126,7 +126,7 @@ to be built as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[rest-high-level-client-init] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-client-init] -------------------------------------------------- The high-level client will internally create the low-level client used to @@ -139,7 +139,7 @@ method, which will close the internal `RestClient` instance. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[rest-high-level-client-close] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-client-close] -------------------------------------------------- In the rest of this documentation about the Java High Level Client, the `RestHighLevelClient` instance diff --git a/docs/java-rest/high-level/migration.asciidoc b/docs/java-rest/high-level/migration.asciidoc index 44e895c9c712e..1349ccb35fe3b 100644 --- a/docs/java-rest/high-level/migration.asciidoc +++ b/docs/java-rest/high-level/migration.asciidoc @@ -64,7 +64,7 @@ argument: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[rest-high-level-client-init] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-client-init] -------------------------------------------------- NOTE: The `RestClient` uses Elasticsearch's HTTP service which is @@ -91,7 +91,7 @@ must be replaced with: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[rest-high-level-client-close] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-client-close] -------------------------------------------------- === Changing the application's code @@ -351,5 +351,3 @@ body, but any other JSON parser could have been use instead. We love to hear from you! Please give us your feedback about your migration experience and how to improve the Java High Level Rest Client on https://discuss.elastic.co/[our forum]. - - diff --git a/docs/java-rest/high-level/miscellaneous/main.asciidoc b/docs/java-rest/high-level/miscellaneous/main.asciidoc index b37e85ee8bd7b..eddbf4a3d0b73 100644 --- a/docs/java-rest/high-level/miscellaneous/main.asciidoc +++ b/docs/java-rest/high-level/miscellaneous/main.asciidoc @@ -8,7 +8,7 @@ Cluster information can be retrieved using the `info()` method: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[main-execute] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[main-execute] -------------------------------------------------- [[java-rest-high-main-response]] @@ -18,7 +18,7 @@ The returned `MainResponse` provides various kinds of information about the clus ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[main-response] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[main-response] -------------------------------------------------- <1> Retrieve the name of the cluster as a `ClusterName` <2> Retrieve the unique identifier of the cluster diff --git a/docs/java-rest/high-level/miscellaneous/ping.asciidoc b/docs/java-rest/high-level/miscellaneous/ping.asciidoc new file mode 100644 index 0000000000000..6cff46a62c5eb --- /dev/null +++ b/docs/java-rest/high-level/miscellaneous/ping.asciidoc @@ -0,0 +1,13 @@ +[[java-rest-high-ping]] +=== Ping API + +[[java-rest-high-ping-request]] +==== Execution + +The `ping()` method checks if the cluster is up and available to +process requests and returns a boolean: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[ping-execute] +-------------------------------------------------- diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 9fb8bd8c66700..fa2f57069ba93 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -37,8 +37,10 @@ include::search/scroll.asciidoc[] The Java High Level REST Client supports the following Miscellaneous APIs: * <> +* <> include::miscellaneous/main.asciidoc[] +include::miscellaneous/ping.asciidoc[] == Indices APIs From 8e8fdc4f0efc6ddac866009baa22552668d94a12 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 14 Mar 2018 13:47:57 -0600 Subject: [PATCH 28/89] Decouple XContentBuilder from BytesReference (#28972) * Decouple XContentBuilder from BytesReference This commit removes all mentions of `BytesReference` from `XContentBuilder`. This is needed so that we can completely decouple the XContent code and move it into its own dependency. While this change appears large, it is due to two main changes, moving `.bytes()` and `.string()` out of XContentBuilder itself into static methods `BytesReference.bytes` and `Strings.toString` respectively. The rest of the change is code reacting to these changes (the majority of it in tests). Relates to #28504 --- .../org/elasticsearch/client/Request.java | 4 +- .../java/org/elasticsearch/client/CrudIT.java | 3 +- .../client/RestHighLevelClientTests.java | 3 +- .../org/elasticsearch/client/SearchIT.java | 3 +- .../documentation/CRUDDocumentationIT.java | 4 +- .../MigrationDocumentationIT.java | 5 +- .../ingest/common/ScriptProcessor.java | 4 +- .../common/GrokProcessorGetActionTests.java | 3 +- .../ingest/common/JsonProcessorTests.java | 3 +- .../mustache/CustomMustacheFactory.java | 3 +- .../mustache/RestSearchTemplateAction.java | 3 +- .../mustache/SearchTemplateResponse.java | 5 +- .../mustache/MultiSearchTemplateIT.java | 5 +- .../script/mustache/MustacheTests.java | 5 +- .../script/mustache/SearchTemplateIT.java | 4 +- .../mapper/ScaledFloatFieldMapperTests.java | 158 ++--- .../mapper/TokenCountFieldMapperTests.java | 21 +- .../mapper/ParentJoinFieldMapperTests.java | 118 ++-- .../join/query/HasChildQueryBuilderTests.java | 3 +- .../query/HasParentQueryBuilderTests.java | 3 +- .../LegacyHasChildQueryBuilderTests.java | 9 +- .../LegacyHasParentQueryBuilderTests.java | 13 +- .../LegacyParentIdQueryBuilderTests.java | 9 +- .../join/query/ParentChildTestCase.java | 3 +- .../join/query/ParentIdQueryBuilderTests.java | 6 +- .../percolator/PercolateQueryBuilder.java | 4 +- .../percolator/PercolatorFieldMapper.java | 2 +- .../percolator/CandidateQueryTests.java | 9 +- .../PercolateQueryBuilderTests.java | 11 +- .../PercolatorFieldMapperTests.java | 118 ++-- .../percolator/PercolatorQuerySearchIT.java | 88 +-- .../PercolatorQuerySearchTests.java | 14 +- .../index/rankeval/RankEvalResponseTests.java | 2 +- .../index/rankeval/RankEvalSpecTests.java | 5 +- .../AbstractBulkByQueryRestHandler.java | 4 +- .../index/reindex/RestReindexAction.java | 7 +- .../index/reindex/TransportReindexAction.java | 4 +- .../reindex/remote/RemoteRequestBuilders.java | 10 +- .../reindex/remote/RemoteResponseParsers.java | 3 +- .../index/reindex/RestReindexActionTests.java | 4 +- .../rest/Netty4HeadBodyIsEmptyIT.java | 11 +- .../ICUCollationKeywordFieldMapperTests.java | 176 +++--- .../murmur3/Murmur3FieldMapperTests.java | 30 +- .../index/mapper/size/SizeMappingTests.java | 40 +- .../gcs/GoogleCloudStorageTestServer.java | 5 +- ...rossClusterSearchUnavailableClusterIT.java | 3 +- .../upgrades/FullClusterRestartIT.java | 25 +- .../elasticsearch/backwards/IndexingIT.java | 20 +- .../elasticsearch/bwc/QueryBuilderBWCIT.java | 3 +- .../http/ContextAndHeaderTransportIT.java | 5 +- .../elasticsearch/http/DeprecationHttpIT.java | 3 +- .../org/elasticsearch/wildfly/WildflyIT.java | 3 +- .../cluster/node/info/NodesInfoResponse.java | 4 +- .../node/stats/NodesStatsResponse.java | 3 +- .../node/usage/NodesUsageResponse.java | 6 +- .../put/PutRepositoryRequest.java | 3 +- .../ClusterUpdateSettingsRequest.java | 9 +- .../create/CreateSnapshotRequest.java | 2 +- .../restore/RestoreSnapshotRequest.java | 4 +- .../cluster/stats/ClusterStatsResponse.java | 3 +- .../action/admin/indices/alias/Alias.java | 11 +- .../indices/alias/IndicesAliasesRequest.java | 9 +- .../admin/indices/analyze/AnalyzeRequest.java | 2 +- .../indices/create/CreateIndexRequest.java | 22 +- .../mapping/get/GetFieldMappingsResponse.java | 5 +- .../mapping/put/PutMappingRequest.java | 8 +- .../settings/put/UpdateSettingsRequest.java | 3 +- .../indices/stats/IndicesStatsResponse.java | 3 +- .../template/put/PutIndexTemplateRequest.java | 11 +- .../action/index/IndexRequest.java | 5 +- .../action/search/MultiSearchRequest.java | 4 +- .../termvectors/TermVectorsRequest.java | 2 +- .../termvectors/TermVectorsResponse.java | 3 +- .../action/update/UpdateHelper.java | 2 +- .../cluster/metadata/AliasMetaData.java | 9 +- .../metadata/IndexTemplateMetaData.java | 5 +- .../cluster/metadata/MappingMetaData.java | 3 +- .../cluster/metadata/MetaData.java | 2 +- .../org/elasticsearch/common/Strings.java | 13 +- .../common/bytes/BytesReference.java | 17 + .../common/document/DocumentField.java | 11 +- .../common/settings/Setting.java | 4 +- .../common/settings/Settings.java | 2 +- .../common/settings/SettingsModule.java | 3 +- .../common/xcontent/AbstractObjectParser.java | 2 +- .../common/xcontent/XContentBuilder.java | 68 +-- .../common/xcontent/XContentHelper.java | 17 +- .../common/xcontent/XContentParser.java | 1 - .../elasticsearch/index/get/GetResult.java | 2 +- .../index/get/ShardGetService.java | 2 +- .../index/mapper/MapperService.java | 3 +- .../elasticsearch/index/mapper/Mapping.java | 3 +- .../index/query/MoreLikeThisQueryBuilder.java | 11 +- .../functionscore/DecayFunctionBuilder.java | 6 +- .../functionscore/DecayFunctionParser.java | 2 +- .../indices/recovery/RecoveryState.java | 3 +- .../ingest/PipelineConfiguration.java | 3 +- .../elasticsearch/rest/BytesRestResponse.java | 4 +- .../java/org/elasticsearch/script/Script.java | 10 +- .../elasticsearch/script/ScriptException.java | 17 +- .../script/StoredScriptSource.java | 5 +- .../org/elasticsearch/search/SearchHit.java | 2 +- .../fetch/subphase/FetchSourceSubPhase.java | 3 +- .../searchafter/SearchAfterBuilder.java | 3 +- .../elasticsearch/search/suggest/Suggest.java | 3 +- .../CompletionSuggestionBuilder.java | 9 +- .../completion/context/ContextMapping.java | 3 +- .../DirectCandidateGeneratorBuilder.java | 3 +- .../org/elasticsearch/tasks/TaskResult.java | 5 +- .../ElasticsearchExceptionTests.java | 16 +- .../action/DocWriteResponseTests.java | 5 +- .../ClusterAllocationExplainActionTests.java | 3 +- .../ClusterAllocationExplainIT.java | 3 +- .../ClusterAllocationExplanationTests.java | 3 +- .../node/tasks/TransportTasksActionTests.java | 6 +- .../reroute/ClusterRerouteRequestTests.java | 3 +- .../reroute/ClusterRerouteResponseTests.java | 7 +- .../ClusterUpdateSettingsRequestTests.java | 4 +- .../indices/alias/AliasActionsTests.java | 3 +- .../create/CreateIndexRequestTests.java | 4 +- .../mapping/put/PutMappingRequestTests.java | 3 +- .../rollover/RolloverRequestTests.java | 2 +- .../IndicesShardStoreResponseTests.java | 4 +- .../MetaDataIndexTemplateServiceTests.java | 7 +- .../put/PutIndexTemplateRequestTests.java | 5 +- .../action/bulk/BulkItemResponseTests.java | 2 +- .../action/bulk/BulkProcessorIT.java | 5 +- .../ingest/WriteableIngestDocumentTests.java | 3 +- .../action/main/MainResponseTests.java | 3 +- .../search/ClearScrollRequestTests.java | 3 +- .../SearchPhaseExecutionExceptionTests.java | 2 +- .../search/SearchScrollRequestTests.java | 3 +- ...ultShardOperationFailedExceptionTests.java | 3 +- .../replication/ReplicationResponseTests.java | 2 +- .../action/update/UpdateRequestTests.java | 3 +- .../metadata/IndexCreationTaskTests.java | 5 +- .../cluster/metadata/IndexGraveyardTests.java | 3 +- .../cluster/metadata/IndexMetaDataTests.java | 3 +- .../metadata/IndexTemplateMetaDataTests.java | 2 +- .../cluster/metadata/MetaDataTests.java | 12 +- .../cluster/routing/AllocationIdTests.java | 2 +- .../common/geo/GeoJsonShapeParserTests.java | 69 +-- .../common/settings/SettingsFilterTests.java | 3 +- .../common/settings/SettingsTests.java | 7 +- .../common/xcontent/BaseXContentTestCase.java | 43 +- .../ConstructingObjectParserTests.java | 21 +- .../common/xcontent/ObjectParserTests.java | 10 +- .../common/xcontent/XContentFactoryTests.java | 9 +- .../common/xcontent/XContentParserTests.java | 12 +- .../builder/XContentBuilderTests.java | 58 +- .../cbor/CborXContentParserTests.java | 2 +- .../support/AbstractFilteringTestCase.java | 22 +- .../support/XContentMapValuesTests.java | 31 +- .../AbstractXContentFilteringTestCase.java | 8 +- .../discovery/zen/ZenDiscoveryIT.java | 3 +- .../document/DocumentActionsIT.java | 13 +- .../gateway/RecoveryFromGatewayIT.java | 9 +- .../org/elasticsearch/get/GetActionIT.java | 16 +- .../index/IndexServiceTests.java | 3 +- .../org/elasticsearch/index/IndexTests.java | 3 +- .../index/IndexingSlowLogTests.java | 2 +- .../index/analysis/PreBuiltAnalyzerTests.java | 5 +- .../fielddata/BinaryDVFieldDataTests.java | 14 +- .../index/mapper/AllFieldMapperTests.java | 3 +- .../index/mapper/BinaryFieldMapperTests.java | 22 +- .../index/mapper/BooleanFieldMapperTests.java | 83 +-- .../index/mapper/CamelCaseFieldNameTests.java | 17 +- .../mapper/CompletionFieldMapperTests.java | 254 ++++---- .../mapper/CopyToMapperIntegrationIT.java | 5 +- .../index/mapper/CopyToMapperTests.java | 99 ++-- .../index/mapper/DateFieldMapperTests.java | 194 +++--- .../mapper/DocumentMapperMergeTests.java | 117 ++-- .../mapper/DocumentMapperParserTests.java | 19 +- .../index/mapper/DocumentParserTests.java | 551 +++++++++--------- .../index/mapper/DoubleIndexingDocTests.java | 26 +- .../index/mapper/DynamicMappingTests.java | 230 ++++---- .../mapper/DynamicMappingVersionTests.java | 16 +- .../index/mapper/DynamicTemplateTests.java | 9 +- .../index/mapper/DynamicTemplatesTests.java | 11 +- .../mapper/ExternalFieldMapperTests.java | 48 +- .../mapper/FieldNamesFieldMapperTests.java | 60 +- .../mapper/GeoPointFieldMapperTests.java | 150 ++--- .../mapper/GeoShapeFieldMapperTests.java | 85 +-- .../index/mapper/IdFieldMapperTests.java | 8 +- .../index/mapper/IndexFieldMapperTests.java | 27 +- .../index/mapper/IpFieldMapperTests.java | 124 ++-- .../index/mapper/IpRangeFieldMapperTests.java | 10 +- .../mapper/JavaMultiFieldMergeTests.java | 6 +- .../index/mapper/KeywordFieldMapperTests.java | 180 +++--- .../index/mapper/MapperServiceTests.java | 32 +- .../mapper/MultiFieldCopyToMapperTests.java | 3 +- .../index/mapper/MultiFieldTests.java | 13 +- .../index/mapper/NestedObjectMapperTests.java | 242 ++++---- .../mapper/NullValueObjectMappingTests.java | 44 +- .../index/mapper/NullValueTests.java | 6 +- .../index/mapper/NumberFieldMapperTests.java | 191 +++--- .../index/mapper/ObjectMapperTests.java | 159 +++-- .../index/mapper/ParentFieldMapperTests.java | 46 +- .../index/mapper/RangeFieldMapperTests.java | 100 ++-- ...angeFieldQueryStringQueryBuilderTests.java | 5 +- .../index/mapper/RootObjectMapperTests.java | 49 +- .../index/mapper/RoutingFieldMapperTests.java | 26 +- .../index/mapper/SourceFieldMapperTests.java | 100 ++-- .../mapper/StoredNumericValuesTests.java | 69 ++- .../index/mapper/TextFieldMapperTests.java | 227 ++++---- .../index/mapper/UpdateMappingTests.java | 32 +- .../query/GeoShapeQueryBuilderTests.java | 3 +- .../index/query/MatchQueryBuilderTests.java | 5 +- .../query/MoreLikeThisQueryBuilderTests.java | 3 +- .../index/query/NestedQueryBuilderTests.java | 5 +- .../query/QueryStringQueryBuilderTests.java | 9 +- .../index/query/RangeQueryRewriteTests.java | 9 +- .../query/SpanFirstQueryBuilderTests.java | 5 +- .../index/query/SpanNotQueryBuilderTests.java | 9 +- .../index/query/TermsQueryBuilderTests.java | 7 +- .../query/TermsSetQueryBuilderTests.java | 6 +- .../FunctionScoreQueryBuilderTests.java | 13 +- .../index/reindex/BulkByScrollTaskTests.java | 7 +- .../search/geo/GeoPointParsingTests.java | 17 +- .../index/shard/IndexShardTests.java | 7 +- .../shard/PrimaryReplicaSyncerTests.java | 3 +- .../index/similarity/SimilarityTests.java | 37 +- .../snapshots/blobstore/FileInfoTests.java | 4 +- .../index/translog/TranslogTests.java | 3 +- .../mapping/SimpleGetFieldMappingsIT.java | 9 +- .../RandomExceptionCircuitBreakerIT.java | 33 +- .../indices/state/OpenCloseIndexIT.java | 5 +- .../elasticsearch/ingest/IngestClientIT.java | 27 +- ...gestProcessorNotInstalledOnAllNodesIT.java | 4 +- .../ingest/PipelineConfigurationTests.java | 2 +- .../org/elasticsearch/mget/SimpleMgetIT.java | 4 +- .../nodesinfo/NodeInfoStreamingTests.java | 3 +- .../repositories/IndexIdTests.java | 3 +- .../repositories/RepositoryDataTests.java | 3 +- .../rest/BytesRestResponseTests.java | 2 +- .../rest/RestControllerTests.java | 4 +- .../rest/action/RestMainActionTests.java | 2 +- .../script/ScriptMetaDataTests.java | 21 +- .../script/ScriptServiceTests.java | 4 +- .../org/elasticsearch/script/ScriptTests.java | 3 +- .../script/StoredScriptSourceTests.java | 8 +- .../script/StoredScriptTests.java | 39 +- .../search/NestedIdentityTests.java | 5 +- .../elasticsearch/search/SearchHitTests.java | 3 +- .../elasticsearch/search/SearchHitsTests.java | 3 +- .../search/SearchSortValuesTests.java | 3 +- .../aggregations/AggregationsTests.java | 2 +- .../AggregatorFactoriesTests.java | 2 +- .../aggregations/FiltersAggsRewriteIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 3 +- .../SignificantTermsSignificanceScoreIT.java | 3 +- .../bucket/histogram/ExtendedBoundsTests.java | 4 +- .../SignificanceHeuristicTests.java | 3 +- .../metrics/AbstractGeoTestCase.java | 3 +- .../metrics/InternalStatsTests.java | 5 +- .../basic/SearchWithRandomExceptionsIT.java | 5 +- .../basic/SearchWithRandomIOExceptionsIT.java | 5 +- .../builder/SearchSourceBuilderTests.java | 4 +- .../subphase/FetchSourceSubPhaseTests.java | 3 +- .../highlight/HighlightFieldTests.java | 5 +- .../highlight/HighlighterSearchIT.java | 9 +- .../search/fields/SearchFieldsIT.java | 200 +++---- .../elasticsearch/search/geo/GeoFilterIT.java | 13 +- .../search/geo/GeoShapeIntegrationIT.java | 13 +- .../search/geo/GeoShapeQueryTests.java | 30 +- .../ShardSearchTransportRequestTests.java | 2 +- .../search/morelikethis/MoreLikeThisIT.java | 17 +- .../search/profile/ProfileResultTests.java | 9 +- .../profile/query/CollectorResultTests.java | 9 +- .../elasticsearch/search/query/ExistsIT.java | 3 +- .../search/query/SimpleQueryStringIT.java | 10 +- .../search/scroll/SearchScrollIT.java | 3 +- .../searchafter/SearchAfterBuilderTests.java | 3 +- .../search/slice/SearchSliceIT.java | 5 +- .../search/sort/SimpleSortIT.java | 9 +- .../search/sort/SortBuilderTests.java | 3 +- .../suggest/CompletionSuggestSearchIT.java | 23 +- .../search/suggest/SuggestSearchIT.java | 71 +-- .../search/suggest/SuggestTests.java | 2 +- .../CategoryContextMappingTests.java | 241 ++++---- .../completion/GeoContextMappingTests.java | 136 ++--- .../SharedClusterSnapshotRestoreIT.java | 4 +- .../snapshots/SnapshotRequestsTests.java | 4 +- .../elasticsearch/tasks/TaskResultTests.java | 3 +- .../ThreadPoolSerializationTests.java | 5 +- .../RemoteClusterConnectionTests.java | 5 +- .../elasticsearch/update/UpdateNoopIT.java | 3 +- .../AbstractNumericFieldMapperTestCase.java | 5 +- .../search/RandomSearchRequestGenerator.java | 3 +- .../test/AbstractQueryTestCase.java | 8 +- .../elasticsearch/test/ESIntegTestCase.java | 3 +- .../org/elasticsearch/test/ESTestCase.java | 8 +- .../org/elasticsearch/test/RandomObjects.java | 9 +- .../elasticsearch/test/XContentTestUtils.java | 6 +- .../test/rest/ESRestTestCase.java | 2 +- .../yaml/ClientYamlTestExecutionContext.java | 3 +- .../rest/yaml/ClientYamlTestResponse.java | 3 +- .../test/XContentTestUtilsTests.java | 23 +- .../ElasticsearchAssertionsTests.java | 17 +- .../test/rest/yaml/ObjectPathTests.java | 27 +- .../test/test/ESTestCaseTests.java | 2 +- 301 files changed, 3563 insertions(+), 3369 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java index 53bd6b9ecd77d..fb036bf35faf0 100755 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -328,7 +328,7 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { } metadata.endObject(); - BytesRef metadataSource = metadata.bytes().toBytesRef(); + BytesRef metadataSource = BytesReference.bytes(metadata).toBytesRef(); content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length); content.write(separator); } @@ -343,7 +343,7 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) { try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) { builder.copyCurrentStructure(parser); - source = builder.bytes().toBytesRef(); + source = BytesReference.bytes(builder).toBytesRef(); } } } else if (opType == DocWriteRequest.OpType.UPDATE) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 559dded4f4d7e..352a6a5e61d1b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -617,7 +617,8 @@ public void testBulk() throws IOException { bulkRequest.add(deleteRequest); } else { - BytesReference source = XContentBuilder.builder(xContentType.xContent()).startObject().field("id", i).endObject().bytes(); + BytesReference source = BytesReference.bytes(XContentBuilder.builder(xContentType.xContent()) + .startObject().field("id", i).endObject()); if (opType == DocWriteRequest.OpType.INDEX) { IndexRequest indexRequest = new IndexRequest("index", "test", id).source(source, xContentType); if (erroneous) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index ce932adb2858a..b8315bd59fa43 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -55,6 +55,7 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -272,7 +273,7 @@ private static HttpEntity createBinaryEntity(XContentBuilder xContentBuilder, Co builder.startObject(); builder.field("field", "value"); builder.endObject(); - return new ByteArrayEntity(builder.bytes().toBytesRef().bytes, contentType); + return new ByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 342cdf1aad4ff..01ef0598100fb 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; @@ -478,7 +479,7 @@ public void testSearchScroll() throws Exception { for (int i = 0; i < 100; i++) { XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); - HttpEntity entity = new NStringEntity(builder.string(), ContentType.APPLICATION_JSON); + HttpEntity entity = new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity); } client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 16fa4f8d69cfb..95e5364756424 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -266,13 +266,13 @@ public void testUpdate() throws Exception { assertSame(indexResponse.status(), RestStatus.CREATED); XContentType xContentType = XContentType.JSON; - String script = XContentBuilder.builder(xContentType.xContent()) + String script = Strings.toString(XContentBuilder.builder(xContentType.xContent()) .startObject() .startObject("script") .field("lang", "painless") .field("code", "ctx._source.field += params.count") .endObject() - .endObject().string(); + .endObject()); HttpEntity body = new NStringEntity(script, ContentType.create(xContentType.mediaType())); Response response = client().performRequest(HttpPost.METHOD_NAME, "/_scripts/increment-field", emptyMap(), body); assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java index 1f57ceb4bfb23..650ab882c36d2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -75,7 +76,7 @@ public void testCreateIndex() throws IOException { .put(SETTING_NUMBER_OF_REPLICAS, 0) .build(); - String payload = XContentFactory.jsonBuilder() // <2> + String payload = Strings.toString(XContentFactory.jsonBuilder() // <2> .startObject() .startObject("settings") // <3> .value(indexSettings) @@ -89,7 +90,7 @@ public void testCreateIndex() throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); // <5> diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index 90bb1f3104b0b..ddb284b9c890d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -21,13 +21,13 @@ import com.fasterxml.jackson.core.JsonFactory; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.common.xcontent.json.JsonXContentParser; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; @@ -99,7 +99,7 @@ public Factory(ScriptService scriptService) { public ScriptProcessor create(Map registry, String processorTag, Map config) throws Exception { try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config); - InputStream stream = builder.bytes().streamInput(); + InputStream stream = BytesReference.bytes(builder).streamInput(); XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Script script = Script.parse(parser); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java index aa54044f8454c..cc8ca33161be4 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; @@ -63,7 +64,7 @@ public void testResponseToXContent() throws Exception { GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(TEST_PATTERNS); try (XContentBuilder builder = JsonXContent.contentBuilder()) { response.toXContent(builder, ToXContent.EMPTY_PARAMS); - Map converted = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map converted = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); Map patterns = (Map) converted.get("patterns"); assertThat(patterns.size(), equalTo(1)); assertThat(patterns.get("PATTERN"), equalTo("foo")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index ef17935962d0e..245285259b47a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -48,7 +49,7 @@ public void testExecute() throws Exception { Map randomJsonMap = RandomDocumentPicks.randomSource(random()); XContentBuilder builder = JsonXContent.contentBuilder().map(randomJsonMap); - String randomJson = XContentHelper.convertToJson(builder.bytes(), false, XContentType.JSON); + String randomJson = XContentHelper.convertToJson(BytesReference.bytes(builder), false, XContentType.JSON); document.put(randomField, randomJson); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java index 799d378e05fc7..008613311f421 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java @@ -30,6 +30,7 @@ import com.github.mustachejava.codes.DefaultMustache; import com.github.mustachejava.codes.IterableCode; import com.github.mustachejava.codes.WriteCode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -215,7 +216,7 @@ protected Function createFunction(Object resolved) { // Do not handle as JSON return oh.stringify(resolved); } - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { throw new MustacheException("Failed to convert object to JSON", e); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index c3303cc30b528..7ab9aa6003334 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -63,7 +64,7 @@ public class RestSearchTemplateAction extends BaseRestHandler { if (parser.currentToken() == XContentParser.Token.START_OBJECT) { //convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder) try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - request.setScript(builder.copyCurrentStructure(parser).string()); + request.setScript(Strings.toString(builder.copyCurrentStructure(parser))); } catch (IOException e) { throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 22d7da774eb7c..792d993915992 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -30,6 +30,7 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.io.InputStream; public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject { @@ -83,7 +84,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } else { builder.startObject(); //we can assume the template is always json as we convert it before compiling it - builder.rawField("template_output", source, XContentType.JSON); + try (InputStream stream = source.streamInput()) { + builder.rawField("template_output", stream, XContentType.JSON); + } builder.endObject(); } return builder; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java index 91fc4db43dddd..be8be1b9c5480 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptType; @@ -61,13 +62,13 @@ public void testBasic() throws Exception { } indexRandom(true, indexRequestBuilders); - final String template = jsonBuilder().startObject() + final String template = Strings.toString(jsonBuilder().startObject() .startObject("query") .startObject("{{query_type}}") .field("{{field_name}}", "{{field_value}}") .endObject() .endObject() - .endObject().string(); + .endObject()); MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index d54063df8b4bf..ba59e9ccac002 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -30,6 +30,7 @@ import java.util.Set; import com.github.mustachejava.MustacheException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.script.ScriptEngine; @@ -248,7 +249,7 @@ public void testEmbeddedToJSON() throws Exception { .endObject(); Map ctx = - singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2()); + singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2()); assertScript("{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", ctx, equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}")); @@ -290,7 +291,7 @@ public void testEmbeddedArrayJoin() throws Exception { .endObject(); Map ctx = - singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2()); + singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2()); assertScript("{{#join}}ctx.people.0.emails{{/join}}", ctx, equalTo("john@smith.com,john.smith@email.com,jsmith@email.com")); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index 69739ff2cb8ef..1529b655a5042 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.plugins.Plugin; @@ -317,7 +318,8 @@ public void testIndexedTemplateWithArray() throws Exception { assertAcked( client().admin().cluster().preparePutStoredScript() .setId("4") - .setContent(jsonBuilder().startObject().field("template", multiQuery).endObject().bytes(), XContentType.JSON) + .setContent(BytesReference.bytes(jsonBuilder().startObject().field("template", multiQuery).endObject()), + XContentType.JSON) ); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java index 29e68c85db5e4..d95c9899c89ad 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -21,6 +21,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -54,20 +56,20 @@ protected Collection> getPlugins() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -83,9 +85,9 @@ public void testDefaults() throws Exception { } public void testMissingScalingFactor() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); @@ -93,10 +95,10 @@ public void testMissingScalingFactor() throws IOException { } public void testIllegalScalingFactor() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", -1).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); @@ -104,20 +106,20 @@ public void testIllegalScalingFactor() throws IOException { } public void testNotIndexed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("index", false).field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -128,20 +130,20 @@ public void testNotIndexed() throws Exception { } public void testNoDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("doc_values", false).field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -152,20 +154,20 @@ public void testNoDocValues() throws Exception { } public void testStore() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("store", true).field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -181,20 +183,20 @@ public void testStore() throws Exception { } public void testCoerce() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -205,20 +207,20 @@ public void testCoerce() throws Exception { IndexableField dvField = fields[1]; assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).field("coerce", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("passed as String")); @@ -234,36 +236,36 @@ public void testIgnoreMalformed() throws Exception { } private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", value) - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", value) + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString(exceptionMessageContains)); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).field("ignore_malformed", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", value) - .endObject() - .bytes(), + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", value) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -271,7 +273,7 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -279,20 +281,20 @@ public void testNullValue() throws IOException { .field("scaling_factor", 10.0) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -301,16 +303,16 @@ public void testNullValue() throws IOException { .field("null_value", 2.5) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -325,11 +327,11 @@ public void testNullValue() throws IOException { public void testEmptyName() throws IOException { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("") .field("type", "scaled_float") .field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) @@ -341,13 +343,13 @@ public void testEmptyName() throws IOException { * `index_options` was deprecated and is rejected as of 7.0 */ public void testRejectIndexOptions() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "scaled_float") .field("index_options", randomFrom(new String[] { "docs", "freqs", "positions", "offsets" })) .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertThat(e.getMessage(), containsString("index_options not allowed in field [foo] of type [scaled_float]")); } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java index 13c4e87f95efc..e906f5755de9b 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; @@ -52,7 +53,7 @@ protected Collection> getPlugins() { } public void testMerge() throws IOException { - String stage1Mapping = XContentFactory.jsonBuilder().startObject() + String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("person") .startObject("properties") .startObject("tc") @@ -60,12 +61,12 @@ public void testMerge() throws IOException { .field("analyzer", "keyword") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE); - String stage2Mapping = XContentFactory.jsonBuilder().startObject() + String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("person") .startObject("properties") .startObject("tc") @@ -73,7 +74,7 @@ public void testMerge() throws IOException { .field("analyzer", "standard") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -131,7 +132,7 @@ public TokenStreamComponents createComponents(String fieldName) { public void testEmptyName() throws IOException { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("") @@ -139,7 +140,7 @@ public void testEmptyName() throws IOException { .field("analyzer", "standard") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); // Empty name not allowed in index created after 5.0 IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -167,7 +168,7 @@ public void testParseNotNullValue() throws Exception { } private DocumentMapper createIndexWithTokenCountField() throws IOException { - final String content = XContentFactory.jsonBuilder().startObject() + final String content = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("person") .startObject("properties") .startObject("test") @@ -180,16 +181,16 @@ private DocumentMapper createIndexWithTokenCountField() throws IOException { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); return createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(content)); } private SourceToParse createDocument(String fieldValue) throws Exception { - BytesReference request = XContentFactory.jsonBuilder() + BytesReference request = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("test", fieldValue) - .endObject().bytes(); + .endObject()); return SourceToParse.source("test", "person", "1", request, XContentType.JSON); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java index 285e7e80195af..97ca8900ea0e0 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.join.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -45,7 +47,7 @@ protected Collection> getPlugins() { } public void testSingleLevel() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -54,7 +56,7 @@ public void testSingleLevel() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -62,39 +64,39 @@ public void testSingleLevel() throws Exception { // Doc without join ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0", - XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON)); + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)); assertNull(doc.rootDoc().getBinaryValue("join_field")); // Doc parent doc = docMapper.parse(SourceToParse.source("test", "type", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "parent") - .endObject().bytes(), XContentType.JSON)); + .endObject()), XContentType.JSON)); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child doc = docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", "1") .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Unkwnown join name MapperException exc = expectThrows(MapperParsingException.class, () -> docMapper.parse(SourceToParse.source("test", "type", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "unknown") - .endObject().bytes(), XContentType.JSON))); + .endObject()), XContentType.JSON))); assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]")); } public void testParentIdSpecifiedAsNumber() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -103,32 +105,32 @@ public void testParentIdSpecifiedAsNumber() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", 1) .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); doc = docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", 1.0) .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("1.0", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); } public void testMultipleLevels() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -138,7 +140,7 @@ public void testMultipleLevels() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -146,26 +148,26 @@ public void testMultipleLevels() throws Exception { // Doc without join ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0", - XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON)); + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)); assertNull(doc.rootDoc().getBinaryValue("join_field")); // Doc parent doc = docMapper.parse(SourceToParse.source("test", "type", "1", - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("join_field", "parent") - .endObject().bytes(), XContentType.JSON)); + .endObject()), XContentType.JSON)); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child doc = docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", "1") .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); @@ -173,44 +175,44 @@ public void testMultipleLevels() throws Exception { // Doc child missing parent MapperException exc = expectThrows(MapperParsingException.class, () -> docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "child") - .endObject().bytes(), XContentType.JSON).routing("1"))); + .endObject()), XContentType.JSON).routing("1"))); assertThat(exc.getRootCause().getMessage(), containsString("[parent] is missing for join field [join_field]")); // Doc child missing routing exc = expectThrows(MapperParsingException.class, () -> docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", "1") .endObject() - .endObject().bytes(), XContentType.JSON))); + .endObject()), XContentType.JSON))); assertThat(exc.getRootCause().getMessage(), containsString("[routing] is missing for join field [join_field]")); // Doc grand_child doc = docMapper.parse(SourceToParse.source("test", "type", "3", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "grand_child") .field("parent", "2") .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("grand_child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Unkwnown join name exc = expectThrows(MapperParsingException.class, () -> docMapper.parse(SourceToParse.source("test", "type", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "unknown") - .endObject().bytes(), XContentType.JSON))); + .endObject()), XContentType.JSON))); assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]")); } public void testUpdateRelations() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -218,21 +220,21 @@ public void testUpdateRelations() throws Exception { .array("child", "grand_child1", "grand_child2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test"); DocumentMapper docMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") .array("child", "grand_child1", "grand_child2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalStateException exc = expectThrows(IllegalStateException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)); @@ -240,7 +242,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -248,7 +250,7 @@ public void testUpdateRelations() throws Exception { .field("child", "grand_child1") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalStateException exc = expectThrows(IllegalStateException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)); @@ -256,7 +258,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -265,7 +267,7 @@ public void testUpdateRelations() throws Exception { .array("child", "grand_child1", "grand_child2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalStateException exc = expectThrows(IllegalStateException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)); @@ -273,7 +275,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -282,7 +284,7 @@ public void testUpdateRelations() throws Exception { .field("grand_child2", "grand_grand_child") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalStateException exc = expectThrows(IllegalStateException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)); @@ -290,7 +292,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -298,7 +300,7 @@ public void testUpdateRelations() throws Exception { .array("child", "grand_child1", "grand_child2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); @@ -310,7 +312,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -319,7 +321,7 @@ public void testUpdateRelations() throws Exception { .array("other", "child_other1", "child_other2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); @@ -334,7 +336,7 @@ public void testUpdateRelations() throws Exception { } public void testInvalidJoinFieldInsideObject() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("object") .startObject("properties") .startObject("join_field") @@ -345,7 +347,7 @@ public void testInvalidJoinFieldInsideObject() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test"); MapperParsingException exc = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), @@ -355,7 +357,7 @@ public void testInvalidJoinFieldInsideObject() throws Exception { } public void testInvalidJoinFieldInsideMultiFields() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("number") .field("type", "integer") .startObject("fields") @@ -367,7 +369,7 @@ public void testInvalidJoinFieldInsideMultiFields() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test"); MapperParsingException exc = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), @@ -379,7 +381,7 @@ public void testInvalidJoinFieldInsideMultiFields() throws Exception { public void testMultipleJoinFields() throws Exception { IndexService indexService = createIndex("test"); { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -395,14 +397,14 @@ public void testMultipleJoinFields() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]")); } { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -412,16 +414,16 @@ public void testMultipleJoinFields() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - String updateMapping = XContentFactory.jsonBuilder().startObject() + String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("another_join_field") .field("type", "join") .endObject() .endObject() - .endObject().string(); + .endObject()); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)); assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]")); @@ -429,7 +431,7 @@ public void testMultipleJoinFields() throws Exception { } public void testEagerGlobalOrdinals() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -439,7 +441,7 @@ public void testEagerGlobalOrdinals() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -450,7 +452,7 @@ public void testEagerGlobalOrdinals() throws Exception { assertNotNull(service.mapperService().fullName("join_field#child")); assertTrue(service.mapperService().fullName("join_field#child").eagerGlobalOrdinals()); - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -461,7 +463,7 @@ public void testEagerGlobalOrdinals() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertFalse(service.mapperService().fullName("join_field").eagerGlobalOrdinals()); diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index 0ec6bec977e2e..0dcf5933f4f23 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -132,7 +133,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject().endObject().endObject(); mapperService.merge(TYPE, - new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE); + new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); } /** diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java index 67b0051358b17..c7ded186c9aee 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -112,7 +113,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject().endObject().endObject(); mapperService.merge(TYPE, - new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE); + new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); } /** diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java index a0883d5090adb..a52cc1db3d088 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; @@ -89,7 +90,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws similarity = randomFrom("classic", "BM25"); // TODO: use a single type when inner hits have been changed to work with join field, // this test randomly generates queries with inner hits - mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", @@ -97,8 +98,8 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); - mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + ))), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=text", "custom_string", "type=text,similarity=" + similarity, @@ -107,7 +108,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); + ))), MapperService.MergeReason.MAPPING_UPDATE); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java index bd2c816b56566..468c1f48ea4f0 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; @@ -80,7 +81,7 @@ protected Settings indexSettings() { protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { // TODO: use a single type when inner hits have been changed to work with join field, // this test randomly generates queries with inner hits - mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", @@ -88,8 +89,8 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); - mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + ))), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", @@ -98,9 +99,9 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); - mapperService.merge("just_a_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("just_a_type" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); + ))), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("just_a_type", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("just_a_type" + ))), MapperService.MergeReason.MAPPING_UPDATE); } /** diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java index d88f5b944c32d..961ae943475a5 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; @@ -65,15 +66,15 @@ protected Settings indexSettings() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { - mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, STRING_FIELD_NAME, "type=text", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); - mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + ))), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=text", INT_FIELD_NAME, "type=integer", @@ -81,7 +82,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); + ))), MapperService.MergeReason.MAPPING_UPDATE); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java index 5d9b03045acfe..2e2cdfb200453 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java @@ -20,6 +20,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -83,7 +84,7 @@ protected IndexRequestBuilder createIndexRequest(String index, String type, Stri protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, XContentBuilder builder) throws IOException { - Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, builder.string(), false); + Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(builder), false); return createIndexRequest(index, type, id, parentId, source); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java index 7c6dea967f344..5a128f4d305fb 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java @@ -22,18 +22,16 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DocValuesTermsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; @@ -104,7 +102,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject().endObject().endObject(); mapperService.merge(TYPE, - new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE); + new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); } @Override diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 6ba8394e1e598..0c35876ada63d 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -416,7 +416,7 @@ public static PercolateQueryBuilder fromXContent(XContentParser parser) throws I try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.copyCurrentStructure(parser); builder.flush(); - documents.add(builder.bytes()); + documents.add(BytesReference.bytes(builder)); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME + @@ -437,7 +437,7 @@ public static PercolateQueryBuilder fromXContent(XContentParser parser) throws I try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.copyCurrentStructure(parser); builder.flush(); - documents.add(builder.bytes()); + documents.add(BytesReference.bytes(builder)); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME + diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 986ccd809fd50..5babcef2e8d65 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -422,7 +422,7 @@ static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbFi try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); - byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); + byte[] queryBuilderAsBytes = BytesReference.toBytes(BytesReference.bytes(builder)); context.doc().add(new Field(qbField.name(), queryBuilderAsBytes, qbField.fieldType())); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 80a6fabeca76c..59f4e091140ea 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -75,6 +75,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -140,7 +141,7 @@ public void init() throws Exception { IndexService indexService = createIndex(indexName, Settings.EMPTY); mapperService = indexService.mapperService(); - String mapper = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("int_field").field("type", "integer").endObject() .startObject("long_field").field("type", "long").endObject() @@ -149,13 +150,13 @@ public void init() throws Exception { .startObject("double_field").field("type", "double").endObject() .startObject("ip_field").field("type", "ip").endObject() .startObject("field").field("type", "keyword").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); String queryField = "query_field"; - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("type") + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField); fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 428a10b809d68..16be166b0addc 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -96,12 +97,12 @@ protected Collection> getPlugins() { protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { queryField = randomAlphaOfLength(4); String docType = "_doc"; - mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType, + mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, queryField, "type=percolator" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); - mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType, + ))), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, STRING_FIELD_NAME, "type=text" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); + ))), MapperService.MergeReason.MAPPING_UPDATE); if (mapperService.getIndexSettings().isSingleType() == false) { PercolateQueryBuilderTests.docType = docType; } @@ -339,7 +340,7 @@ private static BytesReference randomSource(Set usedFields) { XContentBuilder xContent = XContentFactory.jsonBuilder(); xContent.map(source); - return xContent.bytes(); + return BytesReference.bytes(xContent); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 1bd0dff132d1d..b338151c5acd0 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -42,7 +42,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.hash.MurmurHash3; @@ -141,7 +143,7 @@ public void init() throws Exception { indexService = createIndex("test"); mapperService = indexService.mapperService(); - String mapper = XContentFactory.jsonBuilder().startObject().startObject("doc") + String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("field").field("type", "text").endObject() @@ -157,15 +159,15 @@ public void init() throws Exception { .startObject("number_field6").field("type", "double").endObject() .startObject("number_field7").field("type", "ip").endObject() .startObject("date_field").field("type", "date").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); } private void addQueryFieldMappings() throws Exception { fieldName = randomAlphaOfLength(4); - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("doc") + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") .startObject("properties").startObject(fieldName).field("type", "percolator").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); fieldType = (PercolatorFieldMapper.FieldType) mapperService.fullName(fieldName); } @@ -459,12 +461,13 @@ public void testExtractTermsAndRanges_numberFields() throws Exception { public void testPercolatorFieldMapper() throws Exception { addQueryFieldMappings(); QueryBuilder queryBuilder = termQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, queryBuilder) - .endObject().bytes(), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + BytesReference.bytes(XContentFactory + .jsonBuilder() + .startObject() + .field(fieldName, queryBuilder) + .endObject()), + XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value")); @@ -477,11 +480,11 @@ public void testPercolatorFieldMapper() throws Exception { // add an query for which we don't extract terms from queryBuilder = rangeQuery("field").from("a").to("z"); - doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory + doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .field(fieldName, queryBuilder) - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), @@ -504,9 +507,9 @@ public void testStoringQueries() throws Exception { for (QueryBuilder query : queries) { ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, query) - .endObject().bytes(), + .endObject()), XContentType.JSON)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, query); @@ -517,12 +520,13 @@ public void testQueryWithRewrite() throws Exception { addQueryFieldMappings(); client().prepareIndex("remote", "doc", "1").setSource("field", "value").get(); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "doc", "1", "field")); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, queryBuilder) - .endObject().bytes(), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + BytesReference.bytes(XContentFactory + .jsonBuilder() + .startObject() + .field(fieldName, queryBuilder) + .endObject()), + XContentType.JSON)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); QueryShardContext shardContext = indexService.newQueryShardContext( randomInt(20), null, () -> { @@ -537,11 +541,11 @@ public void testQueryWithRewrite() throws Exception { public void testPercolatorFieldMapperUnMappedField() throws Exception { addQueryFieldMappings(); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory + mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .field(fieldName, termQuery("unmapped_field", "value")) - .endObject().bytes(), + .endObject()), XContentType.JSON)); }); assertThat(exception.getCause(), instanceOf(QueryShardException.class)); @@ -551,20 +555,20 @@ public void testPercolatorFieldMapperUnMappedField() throws Exception { public void testPercolatorFieldMapper_noQuery() throws Exception { addQueryFieldMappings(); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory - .jsonBuilder() - .startObject() - .endObject() - .bytes(), + ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference + .bytes(XContentFactory + .jsonBuilder() + .startObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); try { - mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory + mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .nullField(fieldName) - .endObject().bytes(), + .endObject()), XContentType.JSON)); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object")); @@ -576,9 +580,9 @@ public void testAllowNoAdditionalSettings() throws Exception { IndexService indexService = createIndex("test1", Settings.EMPTY); MapperService mapperService = indexService.mapperService(); - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("doc") + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") .startObject("properties").startObject(fieldName).field("type", "percolator").field("index", "no").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE)); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); @@ -587,21 +591,21 @@ public void testAllowNoAdditionalSettings() throws Exception { // multiple percolator fields are allowed in the mapping, but only one field can be used at index time. public void testMultiplePercolatorFields() throws Exception { String typeName = "doc"; - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName) .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("query_field1").field("type", "percolator").endObject() .startObject("query_field2").field("type", "percolator").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", - jsonBuilder().startObject() + BytesReference.bytes(jsonBuilder().startObject() .field("query_field1", queryBuilder) .field("query_field2", queryBuilder) - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue(); @@ -614,7 +618,7 @@ public void testMultiplePercolatorFields() throws Exception { // percolator field can be nested under an object field, but only one query can be specified per document public void testNestedPercolatorField() throws Exception { String typeName = "doc"; - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName) .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("object_field") @@ -624,25 +628,25 @@ public void testNestedPercolatorField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", - jsonBuilder().startObject().startObject("object_field") + BytesReference.bytes(jsonBuilder().startObject().startObject("object_field") .field("query_field", queryBuilder) - .endObject().endObject().bytes(), + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", - jsonBuilder().startObject() + BytesReference.bytes(jsonBuilder().startObject() .startArray("object_field") .startObject().field("query_field", queryBuilder).endObject() .endArray() - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); @@ -650,12 +654,12 @@ public void testNestedPercolatorField() throws Exception { MapperParsingException e = expectThrows(MapperParsingException.class, () -> { mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", - jsonBuilder().startObject() + BytesReference.bytes(jsonBuilder().startObject() .startArray("object_field") .startObject().field("query_field", queryBuilder).endObject() .startObject().field("query_field", queryBuilder).endObject() .endArray() - .endObject().bytes(), + .endObject()), XContentType.JSON)); } ); @@ -708,9 +712,9 @@ private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws I } public void testEmptyName() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("").field("type", "percolator").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = mapperService.documentMapperParser(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -735,9 +739,9 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() - .rawField(fieldName, new BytesArray(query.string()), query.contentType()) - .endObject().bytes(), + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) + .endObject()), XContentType.JSON)); BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) { @@ -773,9 +777,9 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() - .rawField(fieldName, new BytesArray(query.string()), query.contentType()) - .endObject().bytes(), + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) + .endObject()), XContentType.JSON)); querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) { @@ -859,9 +863,9 @@ public void testDuplicatedClauses() throws Exception { .must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))); ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) - .endObject().bytes(), + .endObject()), XContentType.JSON)); List values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) @@ -881,9 +885,9 @@ public void testDuplicatedClauses() throws Exception { .must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4"))) .must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) - .endObject().bytes(), + .endObject()), XContentType.JSON)); values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) @@ -906,9 +910,9 @@ public void testDuplicatedClauses() throws Exception { .should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4"))) .should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) - .endObject().bytes(), + .endObject()), XContentType.JSON)); values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index db2d85b9e39e9..3e11f91c4bcea 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -85,7 +85,7 @@ public void testPercolatorQuery() throws Exception { ).endObject()).get(); client().admin().indices().prepareRefresh().get(); - BytesReference source = jsonBuilder().startObject().endObject().bytes(); + BytesReference source = BytesReference.bytes(jsonBuilder().startObject().endObject()); logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) @@ -93,7 +93,7 @@ public void testPercolatorQuery() throws Exception { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - source = jsonBuilder().startObject().field("field1", "value").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); logger.info("percolating doc with 1 field"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) @@ -105,7 +105,7 @@ public void testPercolatorQuery() throws Exception { assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); - source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) @@ -122,8 +122,8 @@ public void testPercolatorQuery() throws Exception { logger.info("percolating doc with 2 fields"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( - jsonBuilder().startObject().field("field1", "value").endObject().bytes(), - jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes() + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()) ), XContentType.JSON)) .addSort("_uid", SortOrder.ASC) .get(); @@ -189,7 +189,7 @@ public void testPercolatorRangeQueries() throws Exception { client().admin().indices().prepareRefresh().get(); // Test long range: - BytesReference source = jsonBuilder().startObject().field("field1", 12).endObject().bytes(); + BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 12).endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -198,7 +198,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); - source = jsonBuilder().startObject().field("field1", 11).endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 11).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -206,7 +206,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("1")); // Test double range: - source = jsonBuilder().startObject().field("field2", 12).endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 12).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -214,7 +214,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("6")); assertThat(response.getHits().getAt(1).getId(), equalTo("4")); - source = jsonBuilder().startObject().field("field2", 11).endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 11).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -222,7 +222,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("4")); // Test IP range: - source = jsonBuilder().startObject().field("field3", "192.168.1.5").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.5").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -230,7 +230,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("9")); assertThat(response.getHits().getAt(1).getId(), equalTo("7")); - source = jsonBuilder().startObject().field("field3", "192.168.1.4").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.4").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -238,7 +238,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("7")); // Test date range: - source = jsonBuilder().startObject().field("field4", "2016-05-15").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field4", "2016-05-15").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -267,9 +267,9 @@ public void testPercolatorGeoQueries() throws Exception { .endObject()).get(); refresh(); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject() - .endObject().bytes(); + .endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("_id", SortOrder.ASC) @@ -402,10 +402,10 @@ public void testPercolatorSpecificQueries() throws Exception { .get(); client().admin().indices().prepareRefresh().get(); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "the quick brown fox jumps over the lazy dog") .field("field2", "the quick brown fox falls down into the well") - .endObject().bytes(); + .endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("_uid", SortOrder.ASC) @@ -449,9 +449,9 @@ public void testPercolatorQueryWithHighlighting() throws Exception { .execute().actionGet(); client().admin().indices().prepareRefresh().get(); - BytesReference document = jsonBuilder().startObject() + BytesReference document = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "The quick brown fox jumps over the lazy dog") - .endObject().bytes(); + .endObject()); SearchResponse searchResponse = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON)) .highlighter(new HighlightBuilder().field("field1")) @@ -470,12 +470,12 @@ public void testPercolatorQueryWithHighlighting() throws Exception { assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy dog")); - BytesReference document1 = jsonBuilder().startObject() + BytesReference document1 = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "The quick brown fox jumps") - .endObject().bytes(); - BytesReference document2 = jsonBuilder().startObject() + .endObject()); + BytesReference document2 = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "over the lazy dog") - .endObject().bytes(); + .endObject()); searchResponse = client().prepareSearch() .setQuery(boolQuery() .should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1")) @@ -500,10 +500,10 @@ public void testPercolatorQueryWithHighlighting() throws Exception { searchResponse = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( - jsonBuilder().startObject().field("field1", "dog").endObject().bytes(), - jsonBuilder().startObject().field("field1", "fox").endObject().bytes(), - jsonBuilder().startObject().field("field1", "jumps").endObject().bytes(), - jsonBuilder().startObject().field("field1", "brown fox").endObject().bytes() + BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) ), XContentType.JSON)) .highlighter(new HighlightBuilder().field("field1")) .addSort("_uid", SortOrder.ASC) @@ -537,12 +537,12 @@ public void testPercolatorQueryWithHighlighting() throws Exception { searchResponse = client().prepareSearch() .setQuery(boolQuery() .should(new PercolateQueryBuilder("query", Arrays.asList( - jsonBuilder().startObject().field("field1", "dog").endObject().bytes(), - jsonBuilder().startObject().field("field1", "fox").endObject().bytes() + BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()) ), XContentType.JSON).setName("query1")) .should(new PercolateQueryBuilder("query", Arrays.asList( - jsonBuilder().startObject().field("field1", "jumps").endObject().bytes(), - jsonBuilder().startObject().field("field1", "brown fox").endObject().bytes() + BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) ), XContentType.JSON).setName("query2")) ) .highlighter(new HighlightBuilder().field("field1")) @@ -664,7 +664,7 @@ public void testWithMultiplePercolatorFields() throws Exception { .get(); client().admin().indices().prepareRefresh().get(); - BytesReference source = jsonBuilder().startObject().field("field", "value").endObject().bytes(); + BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field", "value").endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON)) .setIndices("test1") @@ -718,13 +718,13 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "virginia potts").endObject() .startObject().field("name", "tony stark").endObject() .endArray() - .endObject().bytes(), XContentType.JSON)) + .endObject()), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 1); @@ -732,20 +732,20 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "notstark") .startArray("employee") .startObject().field("name", "virginia stark").endObject() .startObject().field("name", "tony stark").endObject() .endArray() - .endObject().bytes(), XContentType.JSON)) + .endObject()), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 0); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject().bytes(), + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); @@ -753,20 +753,20 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "virginia potts").endObject() .startObject().field("name", "tony stark").endObject() .endArray() - .endObject().bytes(), - XContentFactory.jsonBuilder() + .endObject()), + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "peter parker").endObject() .startObject().field("name", "virginia potts").endObject() .endArray() - .endObject().bytes() + .endObject()) ), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); @@ -803,16 +803,16 @@ public void testPercolatorQueryViaMultiSearch() throws Exception { MultiSearchResponse response = client().prepareMultiSearch() .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", - jsonBuilder().startObject().field("field1", "b").endObject().bytes(), XContentType.JSON))) + BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), XContentType.JSON))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", - yamlBuilder().startObject().field("field1", "c").endObject().bytes(), XContentType.YAML))) + BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), XContentType.YAML))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", - smileBuilder().startObject().field("field1", "b c").endObject().bytes(), XContentType.SMILE))) + BytesReference.bytes(smileBuilder().startObject().field("field1", "b c").endObject()), XContentType.SMILE))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", - jsonBuilder().startObject().field("field1", "d").endObject().bytes(), XContentType.JSON))) + BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), XContentType.JSON))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null))) .add(client().prepareSearch("test") // non existing doc, so error element diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java index 3d8f5b1deb532..0650461e1a9d9 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -79,7 +80,7 @@ public void testPercolateScriptQuery() throws IOException { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .execute().actionGet(); SearchResponse response = client().prepareSearch("index") - .setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "b").endObject().bytes(), + .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), XContentType.JSON)) .get(); assertHitCount(response, 1); @@ -108,13 +109,13 @@ public void testPercolateQueryWithNestedDocuments_doNotLeakBitsetCacheEntries() for (int i = 0; i < 32; i++) { SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "virginia potts").endObject() .startObject().field("name", "tony stark").endObject() .endArray() - .endObject().bytes(), XContentType.JSON)) + .endObject()), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) // size 0, because other wise load bitsets for normal document in FetchPhase#findRootDocumentIfNested(...) .setSize(0) @@ -192,7 +193,7 @@ public void testPercolateQueryWithNestedDocuments_doLeakFieldDataCacheEntries() doc.endObject(); for (int i = 0; i < 32; i++) { SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", doc.bytes(), XContentType.JSON)) + .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(doc), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 1); @@ -212,8 +213,9 @@ public void testMapUnmappedFieldAsText() throws IOException { client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "value").endObject().bytes(), - XContentType.JSON)) + .setQuery(new PercolateQueryBuilder("query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), + XContentType.JSON)) .get(); assertHitCount(response, 1); assertSearchHits(response, "1"); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 26492d3566fc4..e4fe48482377c 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -152,7 +152,7 @@ public void testToXContent() throws IOException { RankEvalResponse response = new RankEvalResponse(0.123, Collections.singletonMap("coffee_query", coffeeQueryQuality), Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg"))); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - String xContent = response.toXContent(builder, ToXContent.EMPTY_PARAMS).bytes().utf8ToString(); + String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString(); assertEquals(("{" + " \"quality_level\": 0.123," + " \"details\": {" + diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java index 2fed2c8311beb..0b0b30c36e90f 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.rankeval; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -85,7 +86,7 @@ private static RankEvalSpec createTestItem() throws IOException { builder.startObject(); builder.field("field", randomAlphaOfLengthBetween(1, 5)); builder.endObject(); - script = builder.string(); + script = Strings.toString(builder); } templates = new HashSet<>(); @@ -115,7 +116,7 @@ private static RankEvalSpec createTestItem() throws IOException { public void testXContentRoundtrip() throws IOException { RankEvalSpec testItem = createTestItem(); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, shuffled.bytes())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(shuffled))) { RankEvalSpec parsedItem = RankEvalSpec.parse(parser); // indices, come from URL parameters, so they don't survive xContent roundtrip diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index ad1385541a6b3..230828ed3fca9 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -21,8 +21,8 @@ import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -91,7 +91,7 @@ private XContentParser extractRequestSpecificFields(RestRequest restRequest, } } return parser.contentType().xContent().createParser(parser.getXContentRegistry(), - parser.getDeprecationHandler(), builder.map(body).bytes().streamInput()); + parser.getDeprecationHandler(), BytesReference.bytes(builder.map(body)).streamInput()); } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 7400889439202..2e85d567743ee 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContent; @@ -75,7 +74,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler source) throws XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); Object query = source.remove("query"); if (query == null) { - return matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS).bytes(); + return BytesReference.bytes(matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS)); } if (!(query instanceof Map)) { throw new IllegalArgumentException("Expected [query] to be an object but was [" + query + "]"); } @SuppressWarnings("unchecked") Map map = (Map) query; - return builder.map(map).bytes(); + return BytesReference.bytes(builder.map(map)); } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 2ffa07cb8be56..650cf5000a745 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -37,8 +37,8 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -345,7 +345,7 @@ protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) XContentBuilder builder = XContentBuilder.builder(mainRequestXContentType.xContent())) { parser.nextToken(); builder.copyCurrentStructure(parser); - index.source(builder.bytes(), builder.contentType()); + index.source(BytesReference.bytes(builder), builder.contentType()); } catch (IOException e) { throw new UncheckedIOException("failed to convert hit from " + sourceXContentType + " to " + mainRequestXContentType, e); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java index ccb19fd62c814..3f6f4bcbc4fe3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -155,7 +155,7 @@ static HttpEntity initialSearchEntity(SearchRequest searchRequest, BytesReferenc } entity.endObject(); - BytesRef bytes = entity.bytes().toBytesRef(); + BytesRef bytes = BytesReference.bytes(entity).toBytesRef(); return new ByteArrayEntity(bytes.bytes, bytes.offset, bytes.length, ContentType.APPLICATION_JSON); } catch (IOException e) { throw new ElasticsearchException("unexpected error building entity", e); @@ -209,9 +209,9 @@ static HttpEntity scrollEntity(String scroll, Version remoteVersion) { return new StringEntity(scroll, ContentType.TEXT_PLAIN); } try (XContentBuilder entity = JsonXContent.contentBuilder()) { - return new StringEntity(entity.startObject() + return new StringEntity(Strings.toString(entity.startObject() .field("scroll_id", scroll) - .endObject().string(), ContentType.APPLICATION_JSON); + .endObject()), ContentType.APPLICATION_JSON); } catch (IOException e) { throw new ElasticsearchException("failed to build scroll entity", e); } @@ -223,9 +223,9 @@ static HttpEntity clearScrollEntity(String scroll, Version remoteVersion) { return new StringEntity(scroll, ContentType.TEXT_PLAIN); } try (XContentBuilder entity = JsonXContent.contentBuilder()) { - return new StringEntity(entity.startObject() + return new StringEntity(Strings.toString(entity.startObject() .array("scroll_id", scroll) - .endObject().string(), ContentType.APPLICATION_JSON); + .endObject()), ContentType.APPLICATION_JSON); } catch (IOException e) { throw new ElasticsearchException("failed to build clear scroll entity", e); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java index d9a897026d293..d18e9c85bcdab 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.reindex.remote; import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.reindex.ScrollableHitSource.BasicHit; import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; import org.elasticsearch.index.reindex.ScrollableHitSource.Response; @@ -78,7 +79,7 @@ private RemoteResponseParsers() {} try (XContentBuilder b = XContentBuilder.builder(s.xContent())) { b.copyCurrentStructure(p); // a hack but this lets us get the right xcontent type to go with the source - return new Tuple<>(b.bytes(), s); + return new Tuple<>(BytesReference.bytes(b), s); } } catch (IOException e) { throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java index 9215459c1ce93..1c33ccdaaa289 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -118,7 +118,7 @@ public void testReindexFromRemoteRequestParsing() throws IOException { b.endObject(); } b.endObject(); - request = b.bytes(); + request = BytesReference.bytes(b); } try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) { ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest()); @@ -144,7 +144,7 @@ public void testPipelineQueryParameterIsError() throws IOException { body.endObject(); } body.endObject(); - request.withContent(body.bytes(), body.contentType()); + request.withContent(BytesReference.bytes(body), body.contentType()); } request.withParams(singletonMap("pipeline", "doesn't matter")); Exception e = expectThrows(IllegalArgumentException.class, () -> action.buildRequest(request.build())); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java index d1a5c15a29c22..20b18ebdaddda 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -22,6 +22,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.rest.ESRestTestCase; import org.hamcrest.Matcher; @@ -57,7 +58,7 @@ private void createTestDoc(final String indexName, final String typeName) throws } builder.endObject(); client().performRequest("PUT", "/" + indexName + "/" + typeName + "/" + "1", emptyMap(), - new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON)); } } @@ -108,7 +109,8 @@ public void testAliasExists() throws IOException { } builder.endObject(); - client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); + client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(Strings.toString(builder), + ContentType.APPLICATION_JSON)); headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0)); headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0)); } @@ -134,7 +136,7 @@ public void testTemplateExists() throws IOException { builder.endObject(); client().performRequest("PUT", "/_template/template", emptyMap(), - new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON)); headTestCase("/_template/template", emptyMap(), greaterThan(0)); } } @@ -162,7 +164,8 @@ public void testGetSourceAction() throws IOException { builder.endObject(); } builder.endObject(); - client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); + client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(Strings.toString(builder), + ContentType.APPLICATION_JSON)); createTestDoc("test-no-source", "test-no-source"); headTestCase("/test-no-source/test-no-source/1/_source", emptyMap(), NOT_FOUND.getStatus(), equalTo(0)); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java index 88f92d0aad8ba..fff255970113d 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java @@ -29,6 +29,8 @@ import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -64,19 +66,19 @@ public void setup() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -108,19 +110,19 @@ public void testBackCompat() throws Exception { indexService = createIndex("oldindex", Settings.builder().put("index.version.created", Version.V_5_5_0).build()); parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -149,44 +151,44 @@ public void testBackCompat() throws Exception { } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("null_value", "1234").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); Collator collator = Collator.getInstance(ULocale.ROOT); @@ -199,20 +201,20 @@ public void testNullValue() throws IOException { } public void testEnableStore() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -221,20 +223,20 @@ public void testEnableStore() throws IOException { } public void testDisableIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -244,20 +246,20 @@ public void testDisableIndex() throws IOException { } public void testDisableDocValues() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -266,19 +268,19 @@ public void testDisableDocValues() throws IOException { } public void testMultipleValues() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", Arrays.asList("1234", "5678")) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", Arrays.asList("1234", "5678")) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -328,20 +330,20 @@ public void testMultipleValues() throws IOException { } public void testIndexOptions() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("index_options", "freqs").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -349,10 +351,10 @@ public void testIndexOptions() throws IOException { assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions()); for (String indexOptions : Arrays.asList("positions", "offsets")) { - final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + final String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("index_options", indexOptions).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping2))); assertEquals("The [" + FIELD_TYPE + "] field does not support positions, got [index_options]=" + indexOptions, @@ -361,20 +363,20 @@ public void testIndexOptions() throws IOException { } public void testEnableNorms() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("norms", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -383,22 +385,22 @@ public void testEnableNorms() throws IOException { } public void testCollator() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", FIELD_TYPE) .field("language", "tr") .field("strength", "primary") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "I WÄ°LL USE TURKÄ°SH CASING") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "I WÄ°LL USE TURKÄ°SH CASING") + .endObject()), XContentType.JSON)); Collator collator = Collator.getInstance(new ULocale("tr")); @@ -428,19 +430,19 @@ public void testCollator() throws IOException { } public void testUpdateCollator() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", FIELD_TYPE) .field("language", "tr") .field("strength", "primary") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", FIELD_TYPE) .field("language", "en") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)); diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index ad5f06b6ee3f1..1df613ab3f9b3 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -22,6 +22,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -71,15 +73,15 @@ protected Collection> getPlugins() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); assertNotNull(fields); @@ -90,11 +92,11 @@ public void testDefaults() throws Exception { } public void testDocValuesSettingNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") .field("doc_values", false) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); try { parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); @@ -103,11 +105,11 @@ public void testDocValuesSettingNotAllowed() throws Exception { } // even setting to the default is not allowed, the setting is invalid - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") .field("doc_values", true) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); try { parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); @@ -117,11 +119,11 @@ public void testDocValuesSettingNotAllowed() throws Exception { } public void testIndexSettingNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") .field("index", "not_analyzed") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); try { parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); @@ -130,11 +132,11 @@ public void testIndexSettingNotAllowed() throws Exception { } // even setting to the default is not allowed, the setting is invalid - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") .field("index", "no") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); try { parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); @@ -144,10 +146,10 @@ public void testIndexSettingNotAllowed() throws Exception { } public void testEmptyName() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("") .field("type", "murmur3") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index c433f0d256a97..6566063d220d3 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -21,8 +21,7 @@ import java.util.Collection; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -30,11 +29,9 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -42,7 +39,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.instanceOf; import org.apache.lucene.index.IndexableField; @@ -56,11 +52,11 @@ public void testSizeEnabled() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); DocumentMapper docMapper = service.mapperService().documentMapper("type"); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + BytesReference source = BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); boolean stored = false; @@ -77,11 +73,11 @@ public void testSizeDisabled() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false"); DocumentMapper docMapper = service.mapperService().documentMapper("type"); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + BytesReference source = BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); @@ -91,11 +87,11 @@ public void testSizeNotSet() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type"); DocumentMapper docMapper = service.mapperService().documentMapper("type"); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + BytesReference source = BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); @@ -106,9 +102,9 @@ public void testThatDisablingWorksWhenMerging() throws Exception { DocumentMapper docMapper = service.mapperService().documentMapper("type"); assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true)); - String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String disabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); docMapper = service.mapperService().merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 6610895e1f497..2330e230f4505 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -19,6 +19,7 @@ package org.elasticsearch.repositories.gcs; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -522,7 +523,7 @@ static class Response { */ private static Response newResponse(final RestStatus status, final Map headers, final XContentBuilder xContentBuilder) { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - xContentBuilder.bytes().writeTo(out); + BytesReference.bytes(xContentBuilder).writeTo(out); return new Response(status, headers, XContentType.JSON.mediaType(), out.toByteArray()); } catch (IOException e) { return newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage()); @@ -548,7 +549,7 @@ private static Response newError(final RestStatus status, final String message) .endArray() .endObject() .endObject(); - builder.bytes().writeTo(out); + BytesReference.bytes(builder).writeTo(out); } return new Response(status, emptyMap(), XContentType.JSON.mediaType(), out.toByteArray()); } catch (IOException e) { diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 4835c881e034c..c373adb5d743d 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -43,6 +43,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -306,7 +307,7 @@ private static HttpEntity buildUpdateSettingsRequestBody(Map set builder.endObject(); } builder.endObject(); - requestBody = builder.string(); + requestBody = Strings.toString(builder); } return new NStringEntity(requestBody, ContentType.APPLICATION_JSON); } diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 06b8406b078dd..2589f3a51f714 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -140,7 +141,7 @@ public void testSearch() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); count = randomIntBetween(2000, 3000); byte[] randomByteArray = new byte[16]; @@ -203,7 +204,7 @@ public void testNewReplicasWork() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); int numDocs = randomIntBetween(2000, 3000); indexRandomDocuments(numDocs, true, false, i -> { @@ -280,7 +281,7 @@ public void testAliasWithBadName() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); String aliasName = "%23" + index; // %23 == # client().performRequest("PUT", "/" + index + "/_alias/" + aliasName); @@ -328,7 +329,7 @@ public void testClusterState() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); client().performRequest("PUT", "/" + index); } @@ -379,7 +380,7 @@ public void testShrink() throws IOException { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); numDocs = randomIntBetween(512, 1024); indexRandomDocuments(numDocs, true, true, i -> { @@ -446,7 +447,7 @@ public void testShrinkAfterUpgrade() throws IOException { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); numDocs = randomIntBetween(512, 1024); indexRandomDocuments(numDocs, true, true, i -> { @@ -836,7 +837,7 @@ public void testSnapshotRestore() throws IOException { } templateBuilder.endObject().endObject(); client().performRequest("PUT", "/_template/test_template", emptyMap(), - new StringEntity(templateBuilder.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON)); if (runningAgainstOldCluster) { // Create the repo @@ -850,7 +851,7 @@ public void testSnapshotRestore() throws IOException { } repoConfig.endObject(); client().performRequest("PUT", "/_snapshot/repo", emptyMap(), - new StringEntity(repoConfig.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON)); } client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"), @@ -875,7 +876,7 @@ public void testHistoryUUIDIsAdded() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); } else { Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards")); List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); @@ -919,7 +920,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion restoreCommand.field("rename_replacement", "restored_" + index); restoreCommand.endObject(); client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"), - new StringEntity(restoreCommand.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON)); // Make sure search finds all documents String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0"))); @@ -997,7 +998,7 @@ private void indexRandomDocuments(int count, boolean flushAllowed, boolean saveI for (int i = 0; i < count; i++) { logger.debug("Indexing document [{}]", i); client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(), - new StringEntity(docSupplier.apply(i).string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON)); if (rarely()) { refresh(); } @@ -1022,7 +1023,7 @@ private void saveInfoDocument(String type, String value) throws IOException { // Only create the first version so we know how many documents are created when the index is first created Map params = singletonMap("op_type", "create"); client().performRequest("PUT", "/info/doc/" + index + "_" + type, params, - new StringEntity(infoDoc.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON)); } private String loadInfoDocument(String type) throws IOException { diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java index a46056e98b434..a38ff284a1a05 100644 --- a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.seqno.SeqNoStats; @@ -42,7 +43,6 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -237,15 +237,15 @@ public void testUpdateSnapshotStatus() throws Exception { logger.info("cluster discovered: {}", nodes.toString()); // Create the repository before taking the snapshot. - String repoConfig = JsonXContent.contentBuilder() - .startObject() - .field("type", "fs") - .startObject("settings") - .field("compress", randomBoolean()) - .field("location", System.getProperty("tests.path.repo")) - .endObject() - .endObject() - .string(); + String repoConfig = Strings + .toString(JsonXContent.contentBuilder() + .startObject() + .field("type", "fs") + .startObject("settings") + .field("compress", randomBoolean()) + .field("location", System.getProperty("tests.path.repo")) + .endObject() + .endObject()); assertOK( client().performRequest("PUT", "/_snapshot/repo", emptyMap(), diff --git a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java b/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java index bff28d6f375bf..f3e03f006c5aa 100644 --- a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java +++ b/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Response; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -189,7 +190,7 @@ public void testQueryBuilderBWC() throws Exception { } mappingsAndSettings.endObject(); Response rsp = client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); assertEquals(200, rsp.getStatusLine().getStatusCode()); for (int i = 0; i < CANDIDATES.size(); i++) { diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java index 749c03598a378..9d05ef3f05db2 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; @@ -103,12 +104,12 @@ protected Collection> nodePlugins() { @Before public void createIndices() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("location").field("type", "geo_shape").endObject() .startObject("name").field("type", "text").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings settings = Settings.builder() .put(indexSettings()) diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java index 948f573a05c8a..a795c295d2b1c 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java @@ -23,6 +23,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.settings.Setting; @@ -210,7 +211,7 @@ private HttpEntity buildSettingsRequest(List> settings, boolean builder.endArray().endObject(); - return new StringEntity(builder.string(), ContentType.APPLICATION_JSON); + return new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); } } diff --git a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java index 72d9d1b74b49d..46fafebeb4e2d 100644 --- a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java +++ b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterModule; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -76,7 +77,7 @@ public void testTransportClient() throws URISyntaxException, IOException { builder.endArray(); } builder.endObject(); - body = builder.string(); + body = Strings.toString(builder); } put.setEntity(new StringEntity(body, ContentType.APPLICATION_JSON)); try (CloseableHttpResponse response = client.execute(put)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java index a7f4ea25fdbee..952589766773f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -23,10 +23,10 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -130,7 +130,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java index a9ff7a4c67b9c..78b33021a4b5c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -72,7 +73,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java index 24fa2817b1e3b..f84ccb738df03 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java @@ -22,9 +22,9 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -77,10 +77,10 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java index 9db3fac299edf..ad81302918eb3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -161,7 +162,7 @@ public PutRepositoryRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), builder.contentType()); + settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index f282cedff3608..38d3a9d5caf54 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -76,7 +77,7 @@ public ActionRequestValidationException validate() { /** * Sets the value of "flat_settings". * Used only by the high-level REST client. - * + * * @param flatSettings * value of "flat_settings" flag to be set * @return this request @@ -89,7 +90,7 @@ public ClusterUpdateSettingsRequest flatSettings(boolean flatSettings) { /** * Return settings in flat format. * Used only by the high-level REST client. - * + * * @return true if settings need to be returned in flat format; false otherwise. */ public boolean flatSettings() { @@ -136,7 +137,7 @@ public ClusterUpdateSettingsRequest transientSettings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - transientSettings(builder.string(), builder.contentType()); + transientSettings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -175,7 +176,7 @@ public ClusterUpdateSettingsRequest persistentSettings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - persistentSettings(builder.string(), builder.contentType()); + persistentSettings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 90ba0ba187cd4..5d5f4685f03d2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -338,7 +338,7 @@ public CreateSnapshotRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), builder.contentType()); + settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 0feb04e2823c8..c1b8c73c9ef0f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -373,7 +373,7 @@ public RestoreSnapshotRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), builder.contentType()); + settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -485,7 +485,7 @@ public RestoreSnapshotRequest indexSettings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - indexSettings(builder.string(), builder.contentType()); + indexSettings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java index b8806daaec6ca..469106c9a6102 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -130,7 +131,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java index 10f544ce3abd0..bd3c77cdb264a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -35,8 +36,8 @@ import org.elasticsearch.index.query.QueryBuilder; import java.io.IOException; +import java.io.InputStream; import java.util.Map; -import java.util.Objects; /** * Represents an alias, to be associated with an index @@ -100,7 +101,7 @@ public Alias filter(Map filter) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(filter); - this.filter = builder.string(); + this.filter = Strings.toString(builder); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e); @@ -119,7 +120,7 @@ public Alias filter(QueryBuilder filterBuilder) { XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - this.filter = builder.string(); + this.filter = Strings.toString(builder); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); @@ -227,7 +228,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(name); if (filter != null) { - builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON); + try (InputStream stream = new BytesArray(filter).streamInput()) { + builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON); + } } if (indexRouting != null && indexRouting.equals(searchRouting)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index 61be2778845ac..6332f50c1452e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.query.QueryBuilder; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -377,7 +378,7 @@ public AliasActions filter(Map filter) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(filter); - this.filter = builder.string(); + this.filter = Strings.toString(builder); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e); @@ -393,7 +394,7 @@ public AliasActions filter(QueryBuilder filter) { XContentBuilder builder = XContentFactory.jsonBuilder(); filter.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - this.filter = builder.string(); + this.filter = Strings.toString(builder); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); @@ -432,7 +433,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.array(ALIASES.getPreferredName(), aliases); } if (false == Strings.isEmpty(filter)) { - builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON); + try (InputStream stream = new BytesArray(filter).streamInput()) { + builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON); + } } if (false == Strings.isEmpty(routing)) { builder.field(ROUTING.getPreferredName(), routing); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java index 9afdf57ebafde..d9c018848d7e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java @@ -78,7 +78,7 @@ public static class NameOrDefinition implements Writeable { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(definition); - this.definition = Settings.builder().loadFromSource(builder.string(), builder.contentType()).build(); + this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build(); } catch (IOException e) { throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index e22a8be968ed1..5f5ba0e24baef 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; @@ -48,6 +49,7 @@ import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; +import java.io.InputStream; import java.io.UncheckedIOException; import java.util.HashMap; import java.util.HashSet; @@ -180,11 +182,7 @@ public CreateIndexRequest settings(String source, XContentType xContentType) { * Allows to set the settings using a json builder. */ public CreateIndexRequest settings(XContentBuilder builder) { - try { - settings(builder.string(), builder.contentType()); - } catch (IOException e) { - throw new ElasticsearchGenerationException("Failed to generate json settings from builder", e); - } + settings(Strings.toString(builder), builder.contentType()); return this; } @@ -196,7 +194,7 @@ public CreateIndexRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), XContentType.JSON); + settings(Strings.toString(builder), XContentType.JSON); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -249,7 +247,7 @@ public CreateIndexRequest cause(String cause) { * @param source The mapping source */ public CreateIndexRequest mapping(String type, XContentBuilder source) { - return mapping(type, source.bytes(), source.contentType()); + return mapping(type, BytesReference.bytes(source), source.contentType()); } /** @@ -293,7 +291,7 @@ public CreateIndexRequest aliases(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - return aliases(builder.bytes()); + return aliases(BytesReference.bytes(builder)); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -303,7 +301,7 @@ public CreateIndexRequest aliases(Map source) { * Sets the aliases that will be associated with the index when it gets created */ public CreateIndexRequest aliases(XContentBuilder source) { - return aliases(source.bytes()); + return aliases(BytesReference.bytes(source)); } /** @@ -350,7 +348,7 @@ public CreateIndexRequest source(String source, XContentType xContentType) { * Sets the settings and mappings as a single source. */ public CreateIndexRequest source(XContentBuilder source) { - return source(source.bytes(), source.contentType()); + return source(BytesReference.bytes(source), source.contentType()); } /** @@ -536,7 +534,9 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t builder.startObject(MAPPINGS.getPreferredName()); for (Map.Entry entry : mappings.entrySet()) { - builder.rawField(entry.getKey(), new BytesArray(entry.getValue()), XContentType.JSON); + try (InputStream stream = new BytesArray(entry.getValue()).streamInput()) { + builder.rawField(entry.getKey(), stream, XContentType.JSON); + } } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 2dc27317c78e7..d837c1cbd199b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.Mapper; import java.io.IOException; +import java.io.InputStream; import java.util.HashMap; import java.util.Map; @@ -127,7 +128,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (params.paramAsBoolean("pretty", false)) { builder.field("mapping", sourceAsMap()); } else { - builder.rawField("mapping", source, XContentType.JSON); + try (InputStream stream = source.streamInput()) { + builder.rawField("mapping", stream, XContentType.JSON); + } } return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index 0b9fabba365a0..ec825a2a5ed96 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -250,11 +250,7 @@ public static XContentBuilder buildFromSimplifiedDef(String type, Object... sour * The mapping source definition. */ public PutMappingRequest source(XContentBuilder mappingBuilder) { - try { - return source(mappingBuilder.string(), mappingBuilder.contentType()); - } catch (IOException e) { - throw new IllegalArgumentException("Failed to build json for mapping request", e); - } + return source(Strings.toString(mappingBuilder), mappingBuilder.contentType()); } /** @@ -265,7 +261,7 @@ public PutMappingRequest source(Map mappingSource) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(mappingSource); - return source(builder.string(), XContentType.JSON); + return source(Strings.toString(builder), XContentType.JSON); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + mappingSource + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index dcea5673cb51d..686bf8a74b85d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -153,7 +154,7 @@ public UpdateSettingsRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), builder.contentType()); + settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 24a0e10e86695..46aef007e6bab 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -213,7 +214,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 766c3323c9409..83c3f474e6616 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; @@ -196,7 +197,7 @@ public PutIndexTemplateRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), XContentType.JSON); + settings(Strings.toString(builder), XContentType.JSON); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -237,7 +238,7 @@ public String cause() { * @param source The mapping source */ public PutIndexTemplateRequest mapping(String type, XContentBuilder source) { - return mapping(type, source.bytes(), source.contentType()); + return mapping(type, BytesReference.bytes(source), source.contentType()); } /** @@ -295,7 +296,7 @@ public Map mappings() { */ public PutIndexTemplateRequest source(XContentBuilder templateBuilder) { try { - return source(templateBuilder.bytes(), templateBuilder.contentType()); + return source(BytesReference.bytes(templateBuilder), templateBuilder.contentType()); } catch (Exception e) { throw new IllegalArgumentException("Failed to build json for template request", e); } @@ -412,7 +413,7 @@ public PutIndexTemplateRequest aliases(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - return aliases(builder.bytes()); + return aliases(BytesReference.bytes(builder)); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -422,7 +423,7 @@ public PutIndexTemplateRequest aliases(Map source) { * Sets the aliases that will be associated with the index when it gets created */ public PutIndexTemplateRequest aliases(XContentBuilder source) { - return aliases(source.bytes()); + return aliases(BytesReference.bytes(source)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index f2ddca1955878..024000ade806d 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.shard.ShardId; @@ -75,7 +74,7 @@ public class IndexRequest extends ReplicatedWriteRequest implements DocWriteRequest, CompositeIndicesRequest { /** - * Max length of the source document to include into toString() + * Max length of the source document to include into string() * * @see ReplicationRequest#createTask */ @@ -332,7 +331,7 @@ public IndexRequest source(String source, XContentType xContentType) { * Sets the content source to index. */ public IndexRequest source(XContentBuilder sourceBuilder) { - return source(sourceBuilder.bytes(), sourceBuilder.contentType()); + return source(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java index 26e47f8232e41..c7c711253baeb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java @@ -306,7 +306,7 @@ public static byte[] writeMultiLineFormat(MultiSearchRequest multiSearchRequest, xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults()); } xContentBuilder.endObject(); - xContentBuilder.bytes().writeTo(output); + BytesReference.bytes(xContentBuilder).writeTo(output); } output.write(xContent.streamSeparator()); try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) { @@ -316,7 +316,7 @@ public static byte[] writeMultiLineFormat(MultiSearchRequest multiSearchRequest, xContentBuilder.startObject(); xContentBuilder.endObject(); } - xContentBuilder.bytes().writeTo(output); + BytesReference.bytes(xContentBuilder).writeTo(output); } output.write(xContent.streamSeparator()); } diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index 3aa7b832f1f4e..68841fe71e5b4 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -256,7 +256,7 @@ public XContentType xContentType() { * Sets an artificial document from which term vectors are requested for. */ public TermVectorsRequest doc(XContentBuilder documentBuilder) { - return this.doc(documentBuilder.bytes(), true, documentBuilder.contentType()); + return this.doc(BytesReference.bytes(documentBuilder), true, documentBuilder.contentType()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java index 21a77c2e0f2b3..01a9812516bf7 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java @@ -259,7 +259,8 @@ private void buildValues(XContentBuilder builder, Terms curTerms, int termFreq) builder.field(FieldStrings.END_OFFSET, currentEndOffset[i]); } if (curTerms.hasPayloads() && (currentPayloads[i].length() > 0)) { - builder.field(FieldStrings.PAYLOAD, currentPayloads[i]); + BytesRef bytesRef = currentPayloads[i].toBytesRef(); + builder.field(FieldStrings.PAYLOAD, bytesRef.bytes, bytesRef.offset, bytesRef.length); } builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index fbf005415d96d..4ee49f2407b5d 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -356,7 +356,7 @@ public static GetResult extractGetResult(final UpdateRequest request, String con BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) { builder.value(value); - sourceFilteredAsBytes = builder.bytes(); + sourceFilteredAsBytes = BytesReference.bytes(builder); } } catch (IOException e) { throw new ElasticsearchException("Error filtering source", e); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java index c0262a6d01d0b..3293be21859bd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -244,7 +245,7 @@ public Builder filter(Map filter) { } try { XContentBuilder builder = XContentFactory.jsonBuilder().map(filter); - this.filter = new CompressedXContent(builder.bytes()); + this.filter = new CompressedXContent(BytesReference.bytes(builder)); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); @@ -252,11 +253,7 @@ public Builder filter(Map filter) { } public Builder filter(XContentBuilder filterBuilder) { - try { - return filter(filterBuilder.string()); - } catch (IOException e) { - throw new ElasticsearchGenerationException("Failed to build json for alias request", e); - } + return filter(Strings.toString(filterBuilder)); } public Builder routing(String routing) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index 74233b5cec7d4..c0d254509c118 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; @@ -459,7 +460,7 @@ public static IndexTemplateMetaData fromXContent(XContentParser parser, String t String mappingType = currentFieldName; Map mappingSource = MapBuilder.newMapBuilder().put(mappingType, parser.mapOrdered()).map(); - builder.putMapping(mappingType, XContentFactory.jsonBuilder().map(mappingSource).string()); + builder.putMapping(mappingType, Strings.toString(XContentFactory.jsonBuilder().map(mappingSource))); } } } else if ("aliases".equals(currentFieldName)) { @@ -483,7 +484,7 @@ public static IndexTemplateMetaData fromXContent(XContentParser parser, String t Map mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); - String mappingSource = XContentFactory.jsonBuilder().map(mapping).string(); + String mappingSource = Strings.toString(XContentFactory.jsonBuilder().map(mapping)); if (mappingSource == null) { // crap, no mapping source, warn? diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 9cbfb2ec71f16..c5e8ed729674d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.Diff; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -99,7 +100,7 @@ public MappingMetaData(CompressedXContent mapping) throws IOException { public MappingMetaData(String type, Map mapping) throws IOException { this.type = type; XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); - this.source = new CompressedXContent(mappingBuilder.bytes()); + this.source = new CompressedXContent(BytesReference.bytes(mappingBuilder)); Map withoutType = mapping; if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map) mapping.get(type); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 06aa51f612bcc..9fff294daea19 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -1081,7 +1081,7 @@ public static String toXContent(MetaData metaData) throws IOException { builder.startObject(); toXContent(metaData, builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } public static void toXContent(MetaData metaData, XContentBuilder builder, ToXContent.Params params) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/Strings.java b/server/src/main/java/org/elasticsearch/common/Strings.java index 8c823f401a0f8..6c2068197866e 100644 --- a/server/src/main/java/org/elasticsearch/common/Strings.java +++ b/server/src/main/java/org/elasticsearch/common/Strings.java @@ -755,6 +755,14 @@ public static String toString(ToXContent toXContent) { return toString(toXContent, false, false); } + /** + * Returns a string representation of the builder (only applicable for text based xcontent). + * @param xContentBuilder + */ + public static String toString(XContentBuilder xContentBuilder) { + return BytesReference.bytes(xContentBuilder).utf8ToString(); + } + /** * Return a {@link String} that is the json representation of the provided {@link ToXContent}. * Wraps the output into an anonymous object if needed. Allows to control whether the outputted @@ -771,7 +779,7 @@ public static String toString(ToXContent toXContent, boolean pretty, boolean hum if (toXContent.isFragment()) { builder.endObject(); } - return builder.string(); + return toString(builder); } catch (IOException e) { try { XContentBuilder builder = createBuilder(pretty, human); @@ -779,7 +787,7 @@ public static String toString(ToXContent toXContent, boolean pretty, boolean hum builder.field("error", "error building toString out of XContent: " + e.getMessage()); builder.field("stack_trace", ExceptionsHelper.stackTrace(e)); builder.endObject(); - return builder.string(); + return toString(builder); } catch (IOException e2) { throw new ElasticsearchException("cannot generate error message for deserialization", e); } @@ -845,5 +853,4 @@ public static String padStart(String s, int minimumLength, char c) { return sb.toString(); } } - } diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java index d7f9de345a438..b9b6bce7969c4 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java @@ -21,8 +21,11 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; +import org.elasticsearch.common.io.stream.BytesStream; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import java.io.ByteArrayOutputStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; @@ -38,6 +41,20 @@ public abstract class BytesReference implements Accountable, Comparable> additionalSettings, Li xContentBuilder.startObject(); indexSettings.toXContent(xContentBuilder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true"))); xContentBuilder.endObject(); - builder.append(xContentBuilder.string()); + builder.append(Strings.toString(xContentBuilder)); } builder.append("'"); builder.append(System.lineSeparator()); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java b/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java index 0f25231634d07..aeb4e53690a69 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java @@ -219,7 +219,7 @@ public void declareRawObject(BiConsumer consumer, ParseFi try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.prettyPrint(); builder.copyCurrentStructure(p); - return builder.bytes(); + return BytesReference.bytes(builder); } }; declareField(consumer, bytesParser, field, ValueType.OBJECT); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index 16f0ac83a849f..9e1bb362d4879 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -20,10 +20,7 @@ package org.elasticsearch.common.xcontent; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.io.stream.BytesStream; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.ByteSizeValue; @@ -173,6 +170,13 @@ public XContentType contentType() { return generator.contentType(); } + /** + * @return the output stream to which the built object is being written. Note that is dangerous to modify the stream. + */ + public OutputStream getOutputStream() { + return bos; + } + public XContentBuilder prettyPrint() { generator.usePrettyPrint(); return this; @@ -626,24 +630,6 @@ public XContentBuilder utf8Value(BytesRef value) throws IOException { return this; } - /** - * Writes the binary content of the given {@link BytesReference}. - * - * Use {@link org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back - */ - public XContentBuilder field(String name, BytesReference value) throws IOException { - return field(name).value(value); - } - - /** - * Writes the binary content of the given {@link BytesReference}. - * - * Use {@link org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back - */ - public XContentBuilder value(BytesReference value) throws IOException { - return (value == null) ? nullValue() : binaryValue(value.toBytesRef()); - } - //////////////////////////////////////////////////////////////////////////// // Text ////////////////////////////////// @@ -810,8 +796,6 @@ private void unknownValue(Object value, boolean ensureNoSelfReferences) throws I value((Calendar) value); } else if (value instanceof ReadableInstant) { value((ReadableInstant) value); - } else if (value instanceof BytesReference) { - value((BytesReference) value); } else if (value instanceof ToXContent) { value((ToXContent) value); } else { @@ -982,28 +966,6 @@ public XContentBuilder rawField(String name, InputStream value, XContentType con return this; } - /** - * Writes a raw field with the given bytes as the value - * @deprecated use {@link #rawField(String name, BytesReference, XContentType)} to avoid content type auto-detection - */ - @Deprecated - public XContentBuilder rawField(String name, BytesReference value) throws IOException { - try (InputStream stream = value.streamInput()) { - generator.writeRawField(name, stream); - } - return this; - } - - /** - * Writes a raw field with the given bytes as the value - */ - public XContentBuilder rawField(String name, BytesReference value, XContentType contentType) throws IOException { - try (InputStream stream = value.streamInput()) { - generator.writeRawField(name, stream, contentType); - } - return this; - } - /** * Writes a value with the source coming directly from the bytes in the stream */ @@ -1035,22 +997,6 @@ public XContentGenerator generator() { return this.generator; } - public BytesReference bytes() { - close(); - if (bos instanceof ByteArrayOutputStream) { - return new BytesArray(((ByteArrayOutputStream) bos).toByteArray()); - } else { - return ((BytesStream) bos).bytes(); - } - } - - /** - * Returns a string representation of the builder (only applicable for text based xcontent). - */ - public String string() throws IOException { - return bytes().utf8ToString(); - } - static void ensureNameNotNull(String name) { ensureNotNull(name, "Field name cannot be null"); } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 48f3685720f9a..e392295722959 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.Compressor; @@ -174,7 +175,7 @@ public static String convertToJson(BytesReference bytes, boolean reformatJson, b builder.prettyPrint(); } builder.copyCurrentStructure(parser); - return builder.string(); + return Strings.toString(builder); } } @@ -371,7 +372,7 @@ public static void copyCurrentEvent(XContentGenerator generator, XContentParser /** * Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using - * {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference)}. + * {@link XContentBuilder#rawField(String, InputStream)}. * @deprecated use {@link #writeRawField(String, BytesReference, XContentType, XContentBuilder, Params)} to avoid content type * auto-detection */ @@ -383,13 +384,15 @@ public static void writeRawField(String field, BytesReference source, XContentBu builder.rawField(field, compressedStreamInput); } } else { - builder.rawField(field, source); + try (InputStream stream = source.streamInput()) { + builder.rawField(field, stream); + } } } /** * Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using - * {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference, XContentType)}. + * {@link XContentBuilder#rawField(String, InputStream, XContentType)}. */ public static void writeRawField(String field, BytesReference source, XContentType xContentType, XContentBuilder builder, ToXContent.Params params) throws IOException { @@ -400,7 +403,9 @@ public static void writeRawField(String field, BytesReference source, XContentTy builder.rawField(field, compressedStreamInput, xContentType); } } else { - builder.rawField(field, source, xContentType); + try (InputStream stream = source.streamInput()) { + builder.rawField(field, stream, xContentType); + } } } @@ -428,7 +433,7 @@ public static BytesReference toXContent(ToXContent toXContent, XContentType xCon if (toXContent.isFragment()) { builder.endObject(); } - return builder.bytes(); + return BytesReference.bytes(builder); } } } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java index a9037b74ce9ed..a645bf81da343 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java @@ -229,7 +229,6 @@ enum NumberType { * *
    *
  • {@link XContentBuilder#field(String, org.apache.lucene.util.BytesRef)}
  • - *
  • {@link XContentBuilder#field(String, org.elasticsearch.common.bytes.BytesReference)}
  • *
  • {@link XContentBuilder#field(String, byte[], int, int)}}
  • *
  • {@link XContentBuilder#field(String, byte[])}}
  • *
diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index 4cdf2a4892690..ae59c6f507749 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -304,7 +304,7 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index //the original document gets slightly modified: whitespaces or pretty printing are not preserved, //it all depends on the current builder settings builder.copyCurrentStructure(parser); - source = builder.bytes(); + source = BytesReference.bytes(builder); } } else if (FIELDS.equals(currentFieldName)) { while(parser.nextToken() != XContentParser.Token.END_OBJECT) { diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 0aeb4f3f19d58..dcd18c8f313f9 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -227,7 +227,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[] sourceAsMap = typeMapTuple.v2(); sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes()); try { - source = XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap).bytes(); + source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap)); } catch (IOException e) { throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "] with includes/excludes set", e); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index dd737af1ddf32..1dc714a899845 100755 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; @@ -256,7 +257,7 @@ public void merge(Map> mappings, MergeReason reason) Map mappingSourcesCompressed = new LinkedHashMap<>(mappings.size()); for (Map.Entry> entry : mappings.entrySet()) { try { - mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string())); + mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(Strings.toString(XContentFactory.jsonBuilder().map(entry.getValue())))); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index bd92cf6d00970..662f33572d927 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -146,7 +147,7 @@ public String toString() { try { XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); toXContent(builder, new ToXContent.MapParams(emptyMap())); - return builder.endObject().string(); + return Strings.toString(builder.endObject()); } catch (IOException bogus) { throw new UncheckedIOException(bogus); } diff --git a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index d0aa588aeb997..d362a4c534aaf 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -57,6 +57,7 @@ import org.elasticsearch.index.mapper.UidFieldMapper; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -208,7 +209,7 @@ public Item(@Nullable String index, @Nullable String type, XContentBuilder doc) } this.index = index; this.type = type; - this.doc = doc.bytes(); + this.doc = BytesReference.bytes(doc); this.xContentType = doc.contentType(); } @@ -373,7 +374,7 @@ public static Item parse(XContentParser parser, Item item) throws IOException { } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { item.id = parser.text(); } else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) { - item.doc = jsonBuilder().copyCurrentStructure(parser).bytes(); + item.doc = BytesReference.bytes(jsonBuilder().copyCurrentStructure(parser)); item.xContentType = XContentType.JSON; } else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.START_ARRAY) { @@ -424,7 +425,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(ID.getPreferredName(), this.id); } if (this.doc != null) { - builder.rawField(DOC.getPreferredName(), this.doc, xContentType); + try (InputStream stream = this.doc.streamInput()) { + builder.rawField(DOC.getPreferredName(), stream, xContentType); + } } if (this.fields != null) { builder.array(FIELDS.getPreferredName(), this.fields); @@ -450,7 +453,7 @@ public String toString() { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.prettyPrint(); toXContent(builder, EMPTY_PARAMS); - return builder.string(); + return Strings.toString(builder); } catch (Exception e) { return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index fd55cff0ccefb..aa39d5f7417fa 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -104,7 +104,7 @@ protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Ob } builder.field(DECAY, decay); builder.endObject(); - this.functionBytes = builder.bytes(); + this.functionBytes = BytesReference.bytes(builder); } catch (IOException e) { throw new IllegalArgumentException("unable to build inner function object",e); } @@ -149,7 +149,9 @@ public BytesReference getFunctionBytes() { @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); - builder.rawField(fieldName, functionBytes); + try (InputStream stream = functionBytes.streamInput()) { + builder.rawField(fieldName, stream); + } builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name()); builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java index 989c52d8fd46e..2a05cc62f0091 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java @@ -109,7 +109,7 @@ public DFB fromXContent(XContentParser parser) throws IOException, ParsingExcept fieldName = currentFieldName; XContentBuilder builder = XContentFactory.jsonBuilder(); builder.copyCurrentStructure(parser); - functionBytes = builder.bytes(); + functionBytes = BytesReference.bytes(builder); } else if (MULTI_VALUE_MODE.match(currentFieldName, parser.getDeprecationHandler())) { multiValueMode = MultiValueMode.fromString(parser.text()); } else { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java index 3faf5e3ec8a73..a89fdcacb2bc3 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -936,7 +937,7 @@ public synchronized String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index 3d96493746805..4dea9eb6b5f68 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -50,7 +49,7 @@ public final class PipelineConfiguration extends AbstractDiffable { XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); XContentHelper.copyCurrentStructure(contentBuilder.generator(), parser); - builder.setConfig(contentBuilder.bytes(), contentBuilder.contentType()); + builder.setConfig(BytesReference.bytes(contentBuilder), contentBuilder.contentType()); }, new ParseField("config"), ObjectParser.ValueType.OBJECT); } diff --git a/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index 11daaddd14720..f8575b4a0127e 100644 --- a/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -54,7 +54,7 @@ public class BytesRestResponse extends RestResponse { * Creates a new response based on {@link XContentBuilder}. */ public BytesRestResponse(RestStatus status, XContentBuilder builder) { - this(status, builder.contentType().mediaType(), builder.bytes()); + this(status, builder.contentType().mediaType(), BytesReference.bytes(builder)); } /** @@ -94,7 +94,7 @@ public BytesRestResponse(RestChannel channel, Exception e) throws IOException { public BytesRestResponse(RestChannel channel, RestStatus status, Exception e) throws IOException { this.status = status; try (XContentBuilder builder = build(channel, status, e)) { - this.content = builder.bytes(); + this.content = BytesReference.bytes(builder); this.contentType = builder.contentType().mediaType(); } if (e instanceof ElasticsearchException) { diff --git a/server/src/main/java/org/elasticsearch/script/Script.java b/server/src/main/java/org/elasticsearch/script/Script.java index 7361bd2fc2a71..a64a3ecd37640 100644 --- a/server/src/main/java/org/elasticsearch/script/Script.java +++ b/server/src/main/java/org/elasticsearch/script/Script.java @@ -21,7 +21,9 @@ import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -159,7 +161,7 @@ private void setInline(XContentParser parser) { if (parser.currentToken() == Token.START_OBJECT) { //this is really for search templates, that need to be converted to json format XContentBuilder builder = XContentFactory.jsonBuilder(); - idOrCode = builder.copyCurrentStructure(parser).string(); + idOrCode = Strings.toString(builder.copyCurrentStructure(parser)); options.put(CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()); } else { idOrCode = parser.text(); @@ -283,7 +285,7 @@ public static Script parse(Settings settings) { builder.startObject(); settings.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - try (InputStream stream = builder.bytes().streamInput(); + try (InputStream stream = BytesReference.bytes(builder).streamInput(); XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return parse(parser); @@ -639,7 +641,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) if (type == ScriptType.INLINE) { if (contentType != null && builder.contentType().mediaType().equals(contentType)) { - builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), new BytesArray(idOrCode)); + try (InputStream stream = new BytesArray(idOrCode).streamInput()) { + builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), stream); + } } else { builder.field(SOURCE_PARSE_FIELD.getPreferredName(), idOrCode); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptException.java b/server/src/main/java/org/elasticsearch/script/ScriptException.java index 91e6ad401fc88..726f218610833 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptException.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptException.java @@ -26,6 +26,7 @@ import java.util.Objects; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -49,11 +50,11 @@ public class ScriptException extends ElasticsearchException { private final List scriptStack; private final String script; private final String lang; - + /** * Create a new ScriptException. - * @param message A short and simple summary of what happened, such as "compile error". - * Must not be {@code null}. + * @param message A short and simple summary of what happened, such as "compile error". + * Must not be {@code null}. * @param cause The underlying cause of the exception. Must not be {@code null}. * @param scriptStack An implementation-specific "stacktrace" for the error in the script. * Must not be {@code null}, but can be empty (though this should be avoided if possible). @@ -85,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(script); out.writeString(lang); } - + @Override protected void metadataToXContent(XContentBuilder builder, Params params) throws IOException { builder.field("script_stack", scriptStack); @@ -100,7 +101,7 @@ protected void metadataToXContent(XContentBuilder builder, Params params) throws public List getScriptStack() { return scriptStack; } - + /** * Returns the identifier for which script. * @return script's name or source text that identifies the script. @@ -108,7 +109,7 @@ public List getScriptStack() { public String getScript() { return script; } - + /** * Returns the language of the script. * @return the {@code lang} parameter of the scripting engine. @@ -117,7 +118,7 @@ public String getLang() { return lang; } - /** + /** * Returns a JSON version of this exception for debugging. */ public String toJsonString() { @@ -126,7 +127,7 @@ public String toJsonString() { json.startObject(); toXContent(json, ToXContent.EMPTY_PARAMS); json.endObject(); - return json.string(); + return Strings.toString(json); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java index f85ac50689d47..9c52ff943d2a1 100644 --- a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java +++ b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -110,7 +111,7 @@ private void setSource(XContentParser parser) { if (parser.currentToken() == Token.START_OBJECT) { //this is really for search templates, that need to be converted to json format XContentBuilder builder = XContentFactory.jsonBuilder(); - source = builder.copyCurrentStructure(parser).string(); + source = Strings.toString(builder.copyCurrentStructure(parser)); options.put(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()); } else { source = parser.text(); @@ -292,7 +293,7 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon builder.copyCurrentStructure(parser); } - return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, builder.string(), Collections.emptyMap()); + return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, Strings.toString(builder), Collections.emptyMap()); } } } catch (IOException ioe) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 04af2406bb8f4..96a5ebc25e2da 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -585,7 +585,7 @@ private static BytesReference parseSourceBytes(XContentParser parser) throws IOE // pretty printing are not preserved, // it all depends on the current builder settings builder.copyCurrentStructure(parser); - return builder.bytes(); + return BytesReference.bytes(builder); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java index 403bf833878bf..2da74c56f6a33 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchHit; @@ -71,7 +72,7 @@ public void hitExecute(SearchContext context, HitContext hitContext) { builder.startObject(); builder.endObject(); } - hitContext.hit().sourceRef(builder.bytes()); + hitContext.hit().sourceRef(BytesReference.bytes(builder)); } catch (IOException e) { throw new ElasticsearchException("Error filtering source", e); } diff --git a/server/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java b/server/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java index 389b81ffcbad4..7b2cedea64abc 100644 --- a/server/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java @@ -26,6 +26,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -284,7 +285,7 @@ public String toString() { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.prettyPrint(); toXContent(builder, EMPTY_PARAMS); - return builder.string(); + return Strings.toString(builder); } catch (Exception e) { throw new ElasticsearchException("Failed to build xcontent.", e); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java index c743eb259e96f..a54f1193df008 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -767,7 +768,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 97dcd27b5d99f..4f5c3b789f892 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -44,6 +44,7 @@ import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; +import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -94,7 +95,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder explain json output: \n{}", explanation.toXContent(builder, ToXContent.EMPTY_PARAMS).string()); + logger.debug("--> explain json output: \n{}", Strings.toString(explanation.toXContent(builder, ToXContent.EMPTY_PARAMS))); } return explanation; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java index bce9afd1c1f57..7e61be59d9518 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.allocation.MoveDecision; import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; @@ -90,7 +91,7 @@ public void testExplanationToXContent() throws Exception { "{\"id\":\"node-0\",\"name\":\"\",\"transport_address\":\"" + cae.getCurrentNode().getAddress() + "\",\"weight_ranking\":3},\"can_remain_on_current_node\":\"yes\",\"can_rebalance_cluster\":\"yes\"," + "\"can_rebalance_to_other_node\":\"no\",\"rebalance_explanation\":\"cannot rebalance as no target node exists " + - "that can both allocate this shard and improve the cluster balance\"}", builder.string()); + "that can both allocate this shard and improve the cluster balance\"}", Strings.toString(builder)); } private static ClusterAllocationExplanation randomClusterAllocationExplanation(boolean assignedShard) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 2fb23b26709bd..cb6f2b57b2bd0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -38,6 +38,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -796,7 +798,7 @@ private Map serialize(ListTasksResponse response, boolean byPare } builder.endObject(); builder.flush(); - logger.info(builder.string()); - return XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + logger.info(Strings.toString(builder)); + return XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index 77b5ccc09abad..8d1a306f26094 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -209,7 +210,7 @@ private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOExcep FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withParams(params); if (hasBody) { - requestBuilder.withContent(builder.bytes(), builder.contentType()); + requestBuilder.withContent(BytesReference.bytes(builder), builder.contentType()); } return requestBuilder.build(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java index 79b4fa6dedc1e..4ced505717a2e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -123,7 +124,7 @@ public void testToXContent() throws IOException { " }\n" + " }\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } { @@ -156,7 +157,7 @@ public void testToXContent() throws IOException { " ]\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } { XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); @@ -196,7 +197,7 @@ public void testToXContent() throws IOException { " }\n" + " }\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java index fdca03ebcda4e..6c9277a61bdee 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java @@ -49,8 +49,8 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws if (addRandomFields) { String unsupportedField = "unsupported_field"; - BytesReference mutated = XContentTestUtils.insertIntoXContent(xContentType.xContent(), originalBytes, - Collections.singletonList(""), () -> unsupportedField, () -> randomAlphaOfLengthBetween(3, 10)).bytes(); + BytesReference mutated = BytesReference.bytes(XContentTestUtils.insertIntoXContent(xContentType.xContent(), originalBytes, + Collections.singletonList(""), () -> unsupportedField, () -> randomAlphaOfLengthBetween(3, 10))); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> ClusterUpdateSettingsRequest.fromXContent(createParser(xContentType.xContent(), mutated))); assertThat(iae.getMessage(), diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java index 01c2457f96744..1811bfb89a62d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -153,7 +154,7 @@ public void testParseAdd() throws IOException { if (filter == null || filter.isEmpty()) { assertNull(action.filter()); } else { - assertEquals(XContentFactory.contentBuilder(XContentType.JSON).map(filter).string(), action.filter()); + assertEquals(Strings.toString(XContentFactory.contentBuilder(XContentType.JSON).map(filter)), action.filter()); } assertEquals(Objects.toString(searchRouting, null), action.searchRouting()); assertEquals(Objects.toString(indexRouting, null), action.indexRouting()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java index 034570bbc5c11..dbca9f7a98f13 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -46,7 +46,7 @@ public class CreateIndexRequestTests extends ESTestCase { public void testSerialization() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject()); request.mapping("my_type", mapping, XContentType.JSON); try (BytesStreamOutput output = new BytesStreamOutput()) { @@ -87,7 +87,7 @@ public void testTopLevelKeys() { public void testToXContent() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject()); request.mapping("my_type", mapping, XContentType.JSON); Alias alias = new Alias("test_alias"); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index 0030dc3c7aee5..e816b08187f1b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestTests; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -86,7 +85,7 @@ public void testBuildFromSimplifiedDef() { public void testPutMappingRequestSerialization() throws IOException { PutMappingRequest request = new PutMappingRequest("foo"); - String mapping = YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject().string(); + String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); request.source(mapping, XContentType.YAML); assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.source()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index a2ef02af486a7..16afa92fb0377 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -171,7 +171,7 @@ public void testUnknownFields() throws IOException { builder.endObject(); } builder.endObject(); - BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, builder.bytes(), null, random()); + BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, BytesReference.bytes(builder), null, random()); expectThrows(ParsingException.class, () -> request.fromXContent(createParser(xContentType.xContent(), mutated))); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java index fbd8f8764cf73..d40199d1d103e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java @@ -30,9 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.shard.ShardStateMetaData; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.NodeDisconnectedException; @@ -72,7 +70,7 @@ public void testBasicSerialization() throws Exception { contentBuilder.startObject(); storesResponse.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - BytesReference bytes = contentBuilder.bytes(); + BytesReference bytes = BytesReference.bytes(contentBuilder); try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { Map map = parser.map(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index d3c133915e7b8..f0e9a57f7f3e6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService.PutRequest; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -110,9 +111,9 @@ public void testIndexTemplateWithValidateEmptyMapping() throws Exception { public void testIndexTemplateWithValidateMapping() throws Exception { PutRequest request = new PutRequest("api", "validate_template"); request.patterns(Collections.singletonList("te*")); - request.putMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "text").field("analyzer", "custom_1").endObject() - .endObject().endObject().endObject().string()); + request.putMapping("type1", Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("field2").field("type", "text").field("analyzer", "custom_1").endObject() + .endObject().endObject().endObject())); List errors = putTemplateDetail(request); assertThat(errors.size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java index fca6ca4fd84d9..72cbe2bd9ecab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -77,7 +78,7 @@ public void testPutIndexTemplateRequest510() throws IOException { public void testPutIndexTemplateRequestSerializationXContent() throws IOException { PutIndexTemplateRequest request = new PutIndexTemplateRequest("foo"); - String mapping = YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject().string(); + String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); request.patterns(Collections.singletonList("foo")); request.mapping("bar", mapping, XContentType.YAML); assertNotEquals(mapping, request.mappings().get("bar")); @@ -106,7 +107,7 @@ public void testPutIndexTemplateRequestSerializationXContentBwc() throws IOExcep in.setVersion(version); PutIndexTemplateRequest request = new PutIndexTemplateRequest(); request.readFrom(in); - String mapping = YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject().string(); + String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); assertNotEquals(mapping, request.mappings().get("bar")); assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.mappings().get("bar")); assertEquals("foo", request.name()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java index 4a55f0c8b95bf..20a42407720ff 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java @@ -104,7 +104,7 @@ public void testFailureToAndFromXContent() throws IOException { // Shuffle the XContent fields if (randomBoolean()) { try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - originalBytes = shuffleXContent(parser, randomBoolean()).bytes(); + originalBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index 44e0bbf823063..76a99994e04ee 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -307,8 +308,8 @@ private static MultiGetRequestBuilder indexDocs(Client client, BulkProcessor pro .source(Requests.INDEX_CONTENT_TYPE, "field", randomRealisticUnicodeOfLengthBetween(1, 30))); } else { final String source = "{ \"index\":{\"_index\":\"test\",\"_type\":\"test\",\"_id\":\"" + Integer.toString(i) + "\"} }\n" - + JsonXContent.contentBuilder() - .startObject().field("field", randomRealisticUnicodeOfLengthBetween(1, 30)).endObject().string() + "\n"; + + Strings.toString(JsonXContent.contentBuilder() + .startObject().field("field", randomRealisticUnicodeOfLengthBetween(1, 30)).endObject()) + "\n"; processor.add(new BytesArray(source), null, null, XContentType.JSON); } multiGetRequestBuilder.add("test", "test", Integer.toString(i)); diff --git a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index b04c7dfcd84f8..4d8e0f544c458 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.ingest; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -127,7 +128,7 @@ public void testToXContent() throws IOException { builder.startObject(); writeableIngestDocument.toXContent(builder, EMPTY_PARAMS); builder.endObject(); - Map toXContentMap = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map toXContentMap = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); Map toXContentDoc = (Map) toXContentMap.get("doc"); Map toXContentSource = (Map) toXContentDoc.get("_source"); diff --git a/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java b/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java index 552e3801954aa..bf04fe590805d 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -74,7 +75,7 @@ public void testToXContent() throws IOException { + "\"minimum_wire_compatibility_version\":\"" + version.minimumCompatibilityVersion().toString() + "\"," + "\"minimum_index_compatibility_version\":\"" + version.minimumIndexCompatibilityVersion().toString() + "\"}," + "\"tagline\":\"You Know, for Search\"" - + "}", builder.string()); + + "}", Strings.toString(builder)); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/ClearScrollRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/ClearScrollRequestTests.java index 6414e510069a0..9aaf10930b091 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ClearScrollRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ClearScrollRequestTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -84,7 +85,7 @@ public void testToXContent() throws IOException { clearScrollRequest.addScrollId("SCROLL_ID"); try (XContentBuilder builder = JsonXContent.contentBuilder()) { clearScrollRequest.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"scroll_id\":[\"SCROLL_ID\"]}", builder.string()); + assertEquals("{\"scroll_id\":[\"SCROLL_ID\"]}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java index c542f3b72f9f2..e96a0975fd46c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java @@ -135,7 +135,7 @@ public void testToXContent() throws IOException { "\"col\":7" + "}" + "}" + - "]}", builder.string()); + "]}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java index f40819ec08958..466f56a792491 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -104,7 +105,7 @@ public void testToXContent() throws IOException { searchScrollRequest.scroll("1m"); try (XContentBuilder builder = JsonXContent.contentBuilder()) { searchScrollRequest.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"scroll_id\":\"SCROLL_ID\",\"scroll\":\"1m\"}", builder.string()); + assertEquals("{\"scroll_id\":\"SCROLL_ID\",\"scroll\":\"1m\"}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/action/support/DefaultShardOperationFailedExceptionTests.java b/server/src/test/java/org/elasticsearch/action/support/DefaultShardOperationFailedExceptionTests.java index 28099506e08e6..8241628b55021 100644 --- a/server/src/test/java/org/elasticsearch/action/support/DefaultShardOperationFailedExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/DefaultShardOperationFailedExceptionTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -96,7 +97,7 @@ public void testFromXContent() throws IOException { .endObject(); builder = shuffleXContent(builder); DefaultShardOperationFailedException parsed; - try(XContentParser parser = createParser(xContent, builder.bytes())) { + try(XContentParser parser = createParser(xContent, BytesReference.bytes(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); parsed = DefaultShardOperationFailedException.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java index fd61bbc6d71e9..9ec91f4e45091 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java @@ -82,7 +82,7 @@ public void testShardInfoToAndFromXContent() throws IOException { // Shuffle the XContent fields if (randomBoolean()) { try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - originalBytes = shuffleXContent(parser, randomBoolean()).bytes(); + originalBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); } } diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 36266026504a9..ddf4f32c2c2b4 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -51,7 +51,6 @@ import org.junit.Before; import java.io.IOException; -import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -476,7 +475,7 @@ public void testToAndFromXContent() throws IOException { if (randomBoolean()) { try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - originalBytes = shuffleXContent(parser, randomBoolean()).bytes(); + originalBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index a315cdc820678..307b9716fa3f9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -354,7 +355,7 @@ private CompressedXContent createMapping() throws IOException { } private CompressedXContent createMapping(String fieldType) throws IOException { - final String mapping = XContentFactory.jsonBuilder() + final String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") @@ -363,7 +364,7 @@ private CompressedXContent createMapping(String fieldType) throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); return new CompressedXContent(mapping); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java index ef801dad28eb4..344b6dc42caed 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -73,7 +74,7 @@ public void testXContent() throws IOException { assertThat(Strings.toString(graveyard, false, true), containsString(XContentBuilder.DEFAULT_DATE_PRINTER.print(graveyard.getTombstones().get(0).getDeleteDateInMillis()))); } - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); parser.nextToken(); // the beginning of the parser assertThat(IndexGraveyard.fromXContent(parser), equalTo(graveyard)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java index 3f21bd29ff3b8..5a206407648b6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; @@ -56,7 +57,7 @@ public void testIndexMetaDataSerialization() throws IOException { builder.startObject(); metaData.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); final IndexMetaData fromXContentMeta = IndexMetaData.fromXContent(parser); assertEquals(metaData, fromXContentMeta); assertEquals(metaData.hashCode(), fromXContentMeta.hashCode()); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java index 0d2443c42021e..d6eb00c499e9c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java @@ -113,7 +113,7 @@ public void testIndexTemplateMetaDataXContentRoundTrip() throws Exception { builder.startObject(); IndexTemplateMetaData.Builder.toXContent(indexTemplateMetaData, builder, params); builder.endObject(); - templateBytesRoundTrip = builder.bytes(); + templateBytesRoundTrip = BytesReference.bytes(builder); } final IndexTemplateMetaData indexTemplateMetaDataRoundTrip; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 2e670666c61a2..74d13a2aab046 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -169,12 +169,12 @@ public void testResolveIndexRouting() { } public void testUnknownFieldClusterMetaData() throws IOException { - BytesReference metadata = JsonXContent.contentBuilder() + BytesReference metadata = BytesReference.bytes(JsonXContent.contentBuilder() .startObject() .startObject("meta-data") .field("random", "value") .endObject() - .endObject().bytes(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); try { MetaData.Builder.fromXContent(parser); @@ -185,12 +185,12 @@ public void testUnknownFieldClusterMetaData() throws IOException { } public void testUnknownFieldIndexMetaData() throws IOException { - BytesReference metadata = JsonXContent.contentBuilder() + BytesReference metadata = BytesReference.bytes(JsonXContent.contentBuilder() .startObject() .startObject("index_name") .field("random", "value") .endObject() - .endObject().bytes(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); try { IndexMetaData.Builder.fromXContent(parser); @@ -219,7 +219,7 @@ public void testXContentWithIndexGraveyard() throws IOException { builder.startObject(); originalMeta.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); final MetaData fromXContentMeta = MetaData.fromXContent(parser); assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard())); } @@ -321,7 +321,7 @@ public void testFindMappingsWithFilters() throws IOException { Map doc = (Map)stringObjectMap.get("_doc"); try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.map(doc); - mapping = builder.string(); + mapping = Strings.toString(builder); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java index 949d4f350080c..8a62e14ba579a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java @@ -114,7 +114,7 @@ public void testSerialization() throws IOException { if (randomBoolean()) { allocationId = AllocationId.newRelocation(allocationId); } - BytesReference bytes = allocationId.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).bytes(); + BytesReference bytes = BytesReference.bytes(allocationId.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); AllocationId parsedAllocationId = AllocationId.fromXContent(createParser(JsonXContent.jsonXContent, bytes)); assertEquals(allocationId, parsedAllocationId); } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index fc987c7e3caf3..98a7fe514543f 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -27,6 +27,7 @@ import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -292,7 +293,7 @@ public void testParseInvalidMultipoint() throws IOException { public void testParseInvalidMultiPolygon() throws IOException { // test invalid multipolygon (an "accidental" polygon with inner rings outside outer ring) - String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon") + String multiPolygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon") .startArray("coordinates") .startArray()//one poly (with two holes) .startArray() @@ -318,7 +319,7 @@ public void testParseInvalidMultiPolygon() throws IOException { .endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); parser.nextToken(); @@ -327,7 +328,7 @@ public void testParseInvalidMultiPolygon() throws IOException { public void testParseOGCPolygonWithoutHoles() throws IOException { // test 1: ccw poly not crossing dateline - String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -338,7 +339,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -347,7 +348,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() @@ -358,7 +359,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .startArray().value(-177.0).value(10.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -367,7 +368,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -378,7 +379,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -387,7 +388,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -398,7 +399,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -409,7 +410,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { public void testParseOGCPolygonWithHoles() throws IOException { // test 1: ccw poly not crossing dateline - String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -426,7 +427,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -435,7 +436,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() @@ -452,7 +453,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { .startArray().value(178.0).value(8.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -461,7 +462,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -478,7 +479,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { .startArray().value(177.0).value(8.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -487,7 +488,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(183.0).value(10.0).endArray() @@ -504,7 +505,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { .startArray().value(178.0).value(8.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -519,83 +520,83 @@ public void testParseInvalidPolygon() throws IOException { * per the GeoJSON specification */ // test case 1: create an invalid polygon with only 2 points - String invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + String invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .startArray().value(-75.022).value(41.783).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid polygon with only 1 point - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 3: create an invalid polygon with 0 points - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 4: create an invalid polygon with null value points - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().nullValue().nullValue().endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); // test case 5: create an invalid polygon with 1 invalid LinearRing - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .nullValue().nullValue() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); // test case 6: create an invalid polygon with 0 LinearRings - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates").endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 7: create an invalid polygon with 0 LinearRings - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); @@ -650,7 +651,7 @@ public void testParsePolygonWithHole() throws IOException { public void testParseSelfCrossingPolygon() throws IOException { // test self crossing ccw poly not crossing dateline - String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -662,7 +663,7 @@ public void testParseSelfCrossingPolygon() throws IOException { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 69b69a2fcf61d..e6b31d95c85d1 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -20,6 +20,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -158,7 +159,7 @@ private void testFiltering(Settings source, Settings filtered, String... pattern xContentBuilder.startObject(); source.toXContent(xContentBuilder, request); xContentBuilder.endObject(); - String filteredSettingsString = xContentBuilder.string(); + String filteredSettingsString = Strings.toString(xContentBuilder); filteredSettings = Settings.builder().loadFromSource(filteredSettingsString, xContentBuilder.contentType()).build(); assertThat(filteredSettings, equalTo(filtered)); } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 52502acb61fe4..d9cecdd604c88 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -588,20 +589,20 @@ public void testToXContent() throws IOException { builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap())); builder.endObject(); - assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string()); + assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", Strings.toString(builder)); test = Settings.builder().putList("foo.bar", "1", "2", "3").build(); builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap())); builder.endObject(); - assertEquals("{\"foo\":{\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string()); + assertEquals("{\"foo\":{\"bar\":[\"1\",\"2\",\"3\"]}}", Strings.toString(builder)); builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true"))); builder.endObject(); - assertEquals("{\"foo.bar\":[\"1\",\"2\",\"3\"]}", builder.string()); + assertEquals("{\"foo.bar\":[\"1\",\"2\",\"3\"]}", Strings.toString(builder)); } public void testLoadEmptyStream() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index dbb47764158c9..e74d3b7acea97 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -85,13 +85,13 @@ public void testContentType() throws IOException { } public void testStartEndObject() throws IOException { - expectUnclosedException(() -> builder().startObject().bytes()); + expectUnclosedException(() -> BytesReference.bytes(builder().startObject())); expectUnclosedException(() -> builder().startObject().close()); - expectUnclosedException(() -> builder().startObject().string()); + expectUnclosedException(() -> Strings.toString(builder().startObject())); - expectObjectException(() -> builder().endObject().bytes()); + expectObjectException(() -> BytesReference.bytes(builder().endObject())); expectObjectException(() -> builder().endObject().close()); - expectObjectException(() -> builder().endObject().string()); + expectObjectException(() -> Strings.toString(builder().endObject())); expectValueException(() -> builder().startObject("foo").endObject()); expectNonNullFieldException(() -> builder().startObject().startObject(null)); @@ -109,13 +109,13 @@ public void testStartEndObject() throws IOException { } public void testStartEndArray() throws IOException { - expectUnclosedException(() -> builder().startArray().bytes()); + expectUnclosedException(() -> BytesReference.bytes(builder().startArray())); expectUnclosedException(() -> builder().startArray().close()); - expectUnclosedException(() -> builder().startArray().string()); + expectUnclosedException(() -> Strings.toString(builder().startArray())); - expectArrayException(() -> builder().endArray().bytes()); + expectArrayException(() -> BytesReference.bytes(builder().endArray())); expectArrayException(() -> builder().endArray().close()); - expectArrayException(() -> builder().endArray().string()); + expectArrayException(() -> Strings.toString(builder().endArray())); expectValueException(() -> builder().startArray("foo").endObject()); expectFieldException(() -> builder().startObject().startArray().endArray().endObject()); @@ -133,17 +133,17 @@ public void testStartEndArray() throws IOException { } public void testField() throws IOException { - expectValueException(() -> builder().field("foo").bytes()); - expectNonNullFieldException(() -> builder().field(null).bytes()); - expectUnclosedException(() -> builder().startObject().field("foo").bytes()); + expectValueException(() -> BytesReference.bytes(builder().field("foo"))); + expectNonNullFieldException(() -> BytesReference.bytes(builder().field(null))); + expectUnclosedException(() -> BytesReference.bytes(builder().startObject().field("foo"))); assertResult("{'foo':'bar'}", () -> builder().startObject().field("foo").value("bar").endObject()); } public void testNullField() throws IOException { - expectValueException(() -> builder().nullField("foo").bytes()); - expectNonNullFieldException(() -> builder().nullField(null).bytes()); - expectUnclosedException(() -> builder().startObject().nullField("foo").bytes()); + expectValueException(() -> BytesReference.bytes(builder().nullField("foo"))); + expectNonNullFieldException(() -> BytesReference.bytes(builder().nullField(null))); + expectUnclosedException(() -> BytesReference.bytes(builder().startObject().nullField("foo"))); assertResult("{'foo':null}", () -> builder().startObject().nullField("foo").endObject()); } @@ -272,7 +272,7 @@ public void testBinaryField() throws Exception { assertResult("{'binary':null}", () -> builder().startObject().field("binary", (byte[]) null).endObject()); final byte[] randomBytes = randomBytes(); - BytesReference bytes = builder().startObject().field("binary", randomBytes).endObject().bytes(); + BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary", randomBytes).endObject()); XContentParser parser = createParser(xcontentType().xContent(), bytes); assertSame(parser.nextToken(), Token.START_OBJECT); @@ -288,7 +288,7 @@ public void testBinaryValue() throws Exception { assertResult("{'binary':null}", () -> builder().startObject().field("binary").value((byte[]) null).endObject()); final byte[] randomBytes = randomBytes(); - BytesReference bytes = builder().startObject().field("binary").value(randomBytes).endObject().bytes(); + BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary").value(randomBytes).endObject()); XContentParser parser = createParser(xcontentType().xContent(), bytes); assertSame(parser.nextToken(), Token.START_OBJECT); @@ -315,7 +315,7 @@ public void testBinaryValueWithOffsetLength() throws Exception { } builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), builder.bytes()); + XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.FIELD_NAME); assertEquals(parser.currentName(), "bin"); @@ -337,7 +337,7 @@ public void testBinaryUTF8() throws Exception { } builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), builder.bytes()); + XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.FIELD_NAME); assertEquals(parser.currentName(), "utf8"); @@ -355,7 +355,7 @@ public void testText() throws Exception { final BytesReference random = new BytesArray(randomBytes()); XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject(); - XContentParser parser = createParser(xcontentType().xContent(), builder.bytes()); + XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.FIELD_NAME); assertEquals(parser.currentName(), "text"); @@ -1015,7 +1015,8 @@ public void testNamedObject() throws IOException { new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); XContentBuilder b = XContentBuilder.builder(xcontentType().xContent()); b.value("test"); - XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, b.bytes().streamInput()); + XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(b).streamInput()); assertEquals(test1, p.namedObject(Object.class, "test1", null)); assertEquals(test2, p.namedObject(Object.class, "test2", null)); assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); @@ -1085,7 +1086,7 @@ public static Matcher equalToJson(String json) { private static void assertResult(String expected, Builder builder) throws IOException { // Build the XContentBuilder, convert its bytes to JSON and check it matches - assertThat(XContentHelper.convertToJson(builder.build().bytes(), randomBoolean()), equalToJson(expected)); + assertThat(XContentHelper.convertToJson(BytesReference.bytes(builder.build()), randomBoolean()), equalToJson(expected)); } private static byte[] randomBytes() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java index 7e5bdbd017449..9f24861fdaa0e 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParserTests.NamedObject; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -78,7 +79,7 @@ public void testRandomOrder() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); expected.toXContent(builder, ToXContent.EMPTY_PARAMS); builder = shuffleXContent(builder); - BytesReference bytes = builder.bytes(); + BytesReference bytes = BytesReference.bytes(builder); try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { HasCtorArguments parsed = randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null); assertEquals(expected.animal, parsed.animal); @@ -91,7 +92,7 @@ public void testRandomOrder() throws Exception { assertEquals(expected.d, parsed.d); } catch (Exception e) { // It is convenient to decorate the error message with the json - throw new Exception("Error parsing: [" + builder.string() + "]", e); + throw new Exception("Error parsing: [" + Strings.toString(builder) + "]", e); } } @@ -428,7 +429,7 @@ private static void declareSetters(ConstructingObjectParser } public void testParseNamedObject() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": {\n" + " \"a\": {}" + "},\"named_in_constructor\": {\n" @@ -443,7 +444,7 @@ public void testParseNamedObject() throws IOException { } public void testParseNamedObjectInOrder() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": [\n" @@ -458,7 +459,7 @@ public void testParseNamedObjectInOrder() throws IOException { } public void testParseNamedObjectTwoFieldsInArray() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}, \"b\": {}}" + "],\"named_in_constructor\": [\n" @@ -472,7 +473,7 @@ public void testParseNamedObjectTwoFieldsInArray() throws IOException { } public void testParseNamedObjectTwoFieldsInArrayConstructorArg() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": [\n" @@ -486,7 +487,7 @@ public void testParseNamedObjectTwoFieldsInArrayConstructorArg() throws IOExcept } public void testParseNamedObjectNoFieldsInArray() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {}" + "],\"named_in_constructor\": [\n" @@ -500,7 +501,7 @@ public void testParseNamedObjectNoFieldsInArray() throws IOException { } public void testParseNamedObjectNoFieldsInArrayConstructorArg() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": [\n" @@ -514,7 +515,7 @@ public void testParseNamedObjectNoFieldsInArrayConstructorArg() throws IOExcepti } public void testParseNamedObjectJunkInArray() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " \"junk\"" + "],\"named_in_constructor\": [\n" @@ -528,7 +529,7 @@ public void testParseNamedObjectJunkInArray() throws IOException { } public void testParseNamedObjectJunkInArrayConstructorArg() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": [\n" diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 7b6f14518fecc..6f0c0208b9c75 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -358,7 +360,7 @@ public void testAllVariants() throws IOException { } builder.field("string_or_null", nullValue ? null : "5"); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string()); + XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder)); class TestStruct { int int_field; long long_field; @@ -533,7 +535,7 @@ public void testIgnoreUnknownFields() throws IOException { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, b.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); class TestStruct { public String test; @@ -557,7 +559,7 @@ public void testIgnoreUnknownObjects() throws IOException { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, b.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); class TestStruct { public String test; @@ -585,7 +587,7 @@ public void testIgnoreUnknownArrays() throws IOException { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, b.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); class TestStruct { public String test; } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index 8a3d0ef9ccf01..65489a997ac7c 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -21,8 +21,9 @@ import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.smile.SmileConstants; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; @@ -53,12 +54,12 @@ private void testGuessType(XContentType type) throws IOException { builder.field("field1", "value1"); builder.endObject(); - assertThat(XContentFactory.xContentType(builder.bytes()), equalTo(type)); - assertThat(XContentFactory.xContentType(builder.bytes().streamInput()), equalTo(type)); + assertThat(XContentFactory.xContentType(BytesReference.bytes(builder)), equalTo(type)); + assertThat(XContentFactory.xContentType(BytesReference.bytes(builder).streamInput()), equalTo(type)); // CBOR is binary, cannot use String if (type != XContentType.CBOR && type != XContentType.SMILE) { - assertThat(XContentFactory.xContentType(builder.string()), equalTo(type)); + assertThat(XContentFactory.xContentType(Strings.toString(builder)), equalTo(type)); } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java index 8e3246d8b8a59..1f38116f2f7c7 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -21,6 +21,8 @@ import com.fasterxml.jackson.core.JsonParseException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; @@ -58,7 +60,7 @@ public void testFloat() throws IOException { builder.endObject(); final Number number; - try (XContentParser parser = createParser(xContentType.xContent(), builder.bytes())) { + try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals(field, parser.currentName()); @@ -244,7 +246,7 @@ public void testEmptyList() throws IOException { .startArray("some_array") .endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -264,7 +266,7 @@ public void testSimpleList() throws IOException { .value(0) .endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -284,7 +286,7 @@ public void testNestedList() throws IOException { .startArray().value(2).endArray() .endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -305,7 +307,7 @@ public void testNestedMapInList() throws IOException { .startObject().endObject() .endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index d3a5e44a89efd..038d8f73c8ab2 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -20,7 +20,9 @@ package org.elasticsearch.common.xcontent.builder; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -93,61 +95,61 @@ public void testRaw() throws IOException { { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"}}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"foo\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); - xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); + xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.field("test1", "value1"); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); - xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); + xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.field("test1", "value1"); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); } } public void testSimpleGenerator() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test", "value").endObject(); - assertThat(builder.string(), equalTo("{\"test\":\"value\"}")); + assertThat(Strings.toString(builder), equalTo("{\"test\":\"value\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test", "value").endObject(); - assertThat(builder.string(), equalTo("{\"test\":\"value\"}")); + assertThat(Strings.toString(builder), equalTo("{\"test\":\"value\"}")); } public void testOverloadedList() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test", Arrays.asList("1", "2")).endObject(); - assertThat(builder.string(), equalTo("{\"test\":[\"1\",\"2\"]}")); + assertThat(Strings.toString(builder), equalTo("{\"test\":[\"1\",\"2\"]}")); } public void testWritingBinaryToStream() throws Exception { @@ -169,7 +171,7 @@ public void testWritingBinaryToStream() throws Exception { public void testByteConversion() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test_name", (Byte)(byte)120).endObject(); - assertThat(builder.bytes().utf8ToString(), equalTo("{\"test_name\":120}")); + assertThat(BytesReference.bytes(builder).utf8ToString(), equalTo("{\"test_name\":120}")); } public void testDateTypesConversion() throws Exception { @@ -179,23 +181,23 @@ public void testDateTypesConversion() throws Exception { String expectedCalendar = XContentBuilder.DEFAULT_DATE_PRINTER.print(calendar.getTimeInMillis()); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("date", date).endObject(); - assertThat(builder.string(), equalTo("{\"date\":\"" + expectedDate + "\"}")); + assertThat(Strings.toString(builder), equalTo("{\"date\":\"" + expectedDate + "\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("calendar", calendar).endObject(); - assertThat(builder.string(), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); + assertThat(Strings.toString(builder), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); Map map = new HashMap<>(); map.put("date", date); builder.map(map); - assertThat(builder.string(), equalTo("{\"date\":\"" + expectedDate + "\"}")); + assertThat(Strings.toString(builder), equalTo("{\"date\":\"" + expectedDate + "\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); map = new HashMap<>(); map.put("calendar", calendar); builder.map(map); - assertThat(builder.string(), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); + assertThat(Strings.toString(builder), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); } public void testCopyCurrentStructure() throws Exception { @@ -214,7 +216,7 @@ public void testCopyCurrentStructure() throws Exception { builder.field("fakefield", terms).endObject().endObject().endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); XContentBuilder filterBuilder = null; XContentParser.Token token; @@ -236,7 +238,7 @@ public void testCopyCurrentStructure() throws Exception { } assertNotNull(filterBuilder); - parser = createParser(JsonXContent.jsonXContent, filterBuilder.bytes()); + parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(filterBuilder)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("terms")); @@ -274,7 +276,7 @@ private void checkPathSerialization(Path path) throws IOException { XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); stringBuilder.startObject().field("file", path.toString()).endObject(); - assertThat(pathBuilder.string(), equalTo(stringBuilder.string())); + assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); } public void testHandlingOfPath_StringName() throws IOException { @@ -287,7 +289,7 @@ public void testHandlingOfPath_StringName() throws IOException { XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); stringBuilder.startObject().field(name, path.toString()).endObject(); - assertThat(pathBuilder.string(), equalTo(stringBuilder.string())); + assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); } public void testHandlingOfCollectionOfPaths() throws IOException { @@ -299,13 +301,13 @@ public void testHandlingOfCollectionOfPaths() throws IOException { XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); stringBuilder.startObject().field("file", Arrays.asList(path.toString())).endObject(); - assertThat(pathBuilder.string(), equalTo(stringBuilder.string())); + assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); } public void testIndentIsPlatformIndependent() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); builder.startObject().field("test","foo").startObject("foo").field("foobar", "boom").endObject().endObject(); - String string = builder.string(); + String string = Strings.toString(builder); assertEquals("{\n" + " \"test\" : \"foo\",\n" + " \"foo\" : {\n" + @@ -315,7 +317,7 @@ public void testIndentIsPlatformIndependent() throws IOException { builder = XContentFactory.contentBuilder(XContentType.YAML).prettyPrint(); builder.startObject().field("test","foo").startObject("foo").field("foobar", "boom").endObject().endObject(); - string = builder.string(); + string = Strings.toString(builder); assertEquals("---\n" + "test: \"foo\"\n" + "foo:\n" + @@ -325,7 +327,7 @@ public void testIndentIsPlatformIndependent() throws IOException { public void testRenderGeoPoint() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); builder.startObject().field("foo").value(new GeoPoint(1,2)).endObject(); - String string = builder.string(); + String string = Strings.toString(builder); assertEquals("{\n" + " \"foo\" : {\n" + " \"lat\" : 1.0,\n" + diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java index b95ec03f94c14..146b83c8c17a9 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java @@ -28,7 +28,7 @@ public class CborXContentParserTests extends ESTestCase { public void testEmptyValue() throws IOException { - BytesReference ref = XContentFactory.cborBuilder().startObject().field("field", "").endObject().bytes(); + BytesReference ref = BytesReference.bytes(XContentFactory.cborBuilder().startObject().field("field", "").endObject()); for (int i = 0; i < 2; i++) { // Running this part twice triggers the issue. diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java index 9d95ea6013f3a..c12376bd5516c 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java @@ -1085,12 +1085,12 @@ public void testRawField() throws Exception { .endObject(); Builder sampleWithRaw = builder -> { - BytesReference raw = XContentBuilder.builder(builder.contentType().xContent()) - .startObject() - .field("content", "hello world!") - .endObject() - .bytes(); - return builder.startObject().field("foo", 0).rawField("raw", raw).endObject(); + BytesReference raw = BytesReference + .bytes(XContentBuilder.builder(builder.contentType().xContent()) + .startObject() + .field("content", "hello world!") + .endObject()); + return builder.startObject().field("foo", 0).rawField("raw", raw.streamInput()).endObject(); }; // Test method: rawField(String fieldName, BytesReference content) @@ -1101,11 +1101,11 @@ public void testRawField() throws Exception { testFilter(expectedRawFieldNotFiltered, sampleWithRaw, emptySet(), singleton("f*")); sampleWithRaw = builder -> { - BytesReference raw = XContentBuilder.builder(builder.contentType().xContent()) - .startObject() - . field("content", "hello world!") - .endObject() - .bytes(); + BytesReference raw = BytesReference + .bytes(XContentBuilder.builder(builder.contentType().xContent()) + .startObject() + . field("content", "hello world!") + .endObject()); return builder.startObject().field("foo", 0).rawField("raw", raw.streamInput()).endObject(); }; diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java index ce092e6f2123d..4acb497c46bd9 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.xcontent.support; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -77,7 +78,7 @@ public void testExtractValue() throws Exception { .endObject(); Map map; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractValue("test", map).toString(), equalTo("value")); @@ -88,7 +89,7 @@ public void testExtractValue() throws Exception { .startObject("path1").startObject("path2").field("test", "value").endObject().endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractValue("path1.path2.test", map).toString(), equalTo("value")); @@ -110,7 +111,7 @@ public void testExtractValue() throws Exception { .startObject("path1").array("test", "value1", "value2").endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } @@ -129,7 +130,7 @@ public void testExtractValue() throws Exception { .endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } @@ -145,7 +146,7 @@ public void testExtractValue() throws Exception { builder = XContentFactory.jsonBuilder().startObject() .field("xxx.yyy", "value") .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractValue("xxx.yyy", map).toString(), equalTo("value")); @@ -154,7 +155,7 @@ public void testExtractValue() throws Exception { .startObject("path1.xxx").startObject("path2.yyy").field("test", "value").endObject().endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractValue("path1.xxx.path2.yyy.test", map).toString(), equalTo("value")); @@ -166,7 +167,7 @@ public void testExtractRawValue() throws Exception { .endObject(); Map map; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("test", map).get(0).toString(), equalTo("value")); @@ -175,7 +176,7 @@ public void testExtractRawValue() throws Exception { .field("test.me", "value") .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("test.me", map).get(0).toString(), equalTo("value")); @@ -184,7 +185,7 @@ public void testExtractRawValue() throws Exception { .startObject("path1").startObject("path2").field("test", "value").endObject().endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("path1.path2.test", map).get(0).toString(), equalTo("value")); @@ -193,7 +194,7 @@ public void testExtractRawValue() throws Exception { .startObject("path1.xxx").startObject("path2.yyy").field("test", "value").endObject().endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("path1.xxx.path2.yyy.test", map).get(0).toString(), equalTo("value")); @@ -352,7 +353,7 @@ public void testThatFilterIncludesEmptyObjectWhenUsingIncludes() throws Exceptio .endObject() .endObject(); - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"obj"}, Strings.EMPTY_ARRAY); assertThat(mapTuple.v2(), equalTo(filteredSource)); @@ -364,7 +365,7 @@ public void testThatFilterIncludesEmptyObjectWhenUsingExcludes() throws Exceptio .endObject() .endObject(); - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"nonExistingField"}); assertThat(mapTuple.v2(), equalTo(filteredSource)); @@ -377,7 +378,7 @@ public void testNotOmittingObjectsWithExcludedProperties() throws Exception { .endObject() .endObject(); - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"obj.f1"}); assertThat(filteredSource.size(), equalTo(1)); @@ -397,7 +398,7 @@ public void testNotOmittingObjectWithNestedExcludedObject() throws Exception { .endObject(); // implicit include - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"*.obj2"}); assertThat(filteredSource.size(), equalTo(1)); @@ -427,7 +428,7 @@ public void testIncludingObjectWithNestedIncludedObject() throws Exception { .endObject() .endObject(); - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"*.obj2"}, Strings.EMPTY_ARRAY); assertThat(filteredSource.size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java index 3f6ec53f4f69f..1d12defe6988d 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.xcontent.support.filtering; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -69,7 +71,7 @@ public void testSingleFieldObject() throws IOException { } static void assertXContentBuilderAsString(final XContentBuilder expected, final XContentBuilder actual) { - assertThat(actual.bytes().utf8ToString(), is(expected.bytes().utf8ToString())); + assertThat(Strings.toString(actual), is(Strings.toString(expected))); } static void assertXContentBuilderAsBytes(final XContentBuilder expected, final XContentBuilder actual) { @@ -77,10 +79,10 @@ static void assertXContentBuilderAsBytes(final XContentBuilder expected, final X XContent xContent = XContentFactory.xContent(actual.contentType()); XContentParser jsonParser = xContent.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, expected.bytes().streamInput()); + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(expected).streamInput()); XContentParser testParser = xContent.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, actual.bytes().streamInput()); + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(actual).streamInput()); while (true) { XContentParser.Token token1 = jsonParser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 31cfa30a49eb0..e51177c318ca8 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -286,6 +287,6 @@ public void testDiscoveryStats() throws Exception { stats.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertThat(builder.string(), equalTo(expectedStatsJsonResponse)); + assertThat(Strings.toString(builder), equalTo(expectedStatsJsonResponse)); } } diff --git a/server/src/test/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/test/java/org/elasticsearch/document/DocumentActionsIT.java index 88bab97a1f3ff..cfcb48f4a4899 100644 --- a/server/src/test/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/test/java/org/elasticsearch/document/DocumentActionsIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -97,10 +98,10 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { getResult = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); assertThat("cycle(map) #" + i, (String) getResult.getSourceAsMap().get("name"), equalTo("test")); getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } @@ -149,10 +150,10 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); String ste1 = getResult.getSourceAsString(); - String ste2 = source("2", "test2").string(); + String ste2 = Strings.toString(source("2", "test2")); assertThat("cycle #" + i, ste1, equalTo(ste2)); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } @@ -236,11 +237,11 @@ public void testBulk() throws Exception { assertThat("cycle #" + i, getResult.isExists(), equalTo(false)); getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("2", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("2", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); getResult = client().get(getRequest("test").type("type1").id(generatedId3)).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("3", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("3", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } } diff --git a/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 23254e81060a0..154d702e7fb77 100644 --- a/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -85,9 +86,9 @@ public void testOneNodeRecoverFromGateway() throws Exception { internalCluster().startNode(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("appAccountIds").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); client().prepareIndex("test", "type1", "10990239").setSource(jsonBuilder().startObject() @@ -154,9 +155,9 @@ private Map assertAndCapturePrimaryTerms(Map pre public void testSingleNodeNoFlush() throws Exception { internalCluster().startNode(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("field").field("type", "text").endObject().startObject("num").field("type", "integer").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); // note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test. int numberOfShards = numberOfShards(); assertAcked(prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards()) diff --git a/server/src/test/java/org/elasticsearch/get/GetActionIT.java b/server/src/test/java/org/elasticsearch/get/GetActionIT.java index 911e26528c9ad..7e3645b33ce58 100644 --- a/server/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -250,11 +250,11 @@ public void testSimpleMultiGet() throws Exception { } public void testGetDocWithMultivaluedFields() throws Exception { - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field").field("type", "text").field("store", true).endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate("test") .addMapping("type1", mapping1, XContentType.JSON)); ensureGreen(); @@ -290,16 +290,16 @@ public void testGetDocWithMultivaluedFields() throws Exception { public void testGetDocWithMultivaluedFieldsMultiTypeBWC() throws Exception { assertTrue("remove this multi type test", Version.CURRENT.before(Version.fromString("7.0.0"))); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field").field("type", "text").field("store", true).endObject() .endObject() - .endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2") + .endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("field").field("type", "text").field("store", true).endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate("test") .addMapping("type1", mapping1, XContentType.JSON) .addMapping("type2", mapping2, XContentType.JSON) @@ -693,7 +693,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject().endObject().endObject() .endObject().endObject().endObject())); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .startArray("field1") .startObject() .startObject("field2") @@ -714,7 +714,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); logger.info("indexing documents"); diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index bd2170dc1eee8..5000af6688f83 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TopDocs; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -60,7 +61,7 @@ public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOExc XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - return new CompressedXContent(builder.string()); + return new CompressedXContent(Strings.toString(builder)); } public void testBaseAsyncTask() throws InterruptedException, IOException { diff --git a/server/src/test/java/org/elasticsearch/index/IndexTests.java b/server/src/test/java/org/elasticsearch/index/IndexTests.java index c39a43e849060..fda181614ffa1 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -55,7 +56,7 @@ public void testXContent() throws IOException { final Index original = new Index(name, uuid); final XContentBuilder builder = JsonXContent.contentBuilder(); original.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); parser.nextToken(); // the beginning of the parser assertThat(Index.fromXContent(parser), equalTo(original)); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index ff5166e8f1a14..49e6c6597e180 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -45,7 +45,7 @@ public class IndexingSlowLogTests extends ESTestCase { public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { - BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes(); + BytesReference source = BytesReference.bytes(JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject()); ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", "test", null, null, source, XContentType.JSON, null); Index index = new Index("foo", "123"); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index b20972adeda08..d0ffdbe229dd6 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -80,9 +81,9 @@ public void testThatAnalyzersAreUsedInMapping() throws IOException { NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(analyzerName, AnalyzerScope.INDEX, randomPreBuiltAnalyzer.getAnalyzer(randomVersion)).get(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", analyzerName).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper fieldMapper = docMapper.mappers().getMapper("field"); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index 64dcf0a0943b9..3d811832d2951 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -21,6 +21,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -39,14 +41,14 @@ protected boolean hasDocValues() { } public void testDocValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("test") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("test") .startObject("properties") .startObject("field") .field("type", "binary") .field("doc_values", true) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); final DocumentMapper mapper = mapperService.documentMapperParser().parse("test", new CompressedXContent(mapping)); @@ -62,16 +64,16 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", doc.bytes(), XContentType.JSON)); + ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); BytesRef bytes1 = randomBytes(); doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1).endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "2", doc.bytes(), XContentType.JSON)); + d = mapper.parse(SourceToParse.source("test", "test", "2", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); doc = XContentFactory.jsonBuilder().startObject().endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "3", doc.bytes(), XContentType.JSON)); + d = mapper.parse(SourceToParse.source("test", "test", "3", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); // test remove duplicate value @@ -87,7 +89,7 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "4", doc.bytes(), XContentType.JSON)); + d = mapper.parse(SourceToParse.source("test", "test", "4", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); IndexFieldData indexFieldData = getForField("field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java index b33d98c9b0015..62f3495ee172b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -32,7 +33,7 @@ public void testUpdateDefaultSearchAnalyzer() throws Exception { IndexService indexService = createIndex("test", Settings.builder() .put("index.analysis.analyzer.default_search.type", "custom") .put("index.analysis.analyzer.default_search.tokenizer", "standard").build()); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject()); indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, indexService.mapperService().documentMapper("_doc").mapping().toString()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index fac6e4c84b18c..e4cd5731daafa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -20,27 +20,23 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Arrays; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -53,13 +49,13 @@ protected Collection> getPlugins() { } public void testDefaultMapping() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "binary") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -69,14 +65,14 @@ public void testDefaultMapping() throws Exception { } public void testStoredValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "binary") .field("store", true) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -93,8 +89,8 @@ public void testStoredValue() throws IOException { assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "id", - XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "id", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON)); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); assertEquals(new BytesRef(value), indexedValue); @@ -106,9 +102,9 @@ public void testStoredValue() throws IOException { public void testEmptyName() throws IOException { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "binary").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index bb839d8e57361..81c8397c036d3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -75,17 +76,17 @@ protected Collection> getPlugins() { } public void testDefaults() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", true) - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", true) + .endObject()), XContentType.JSON)); try (Directory dir = new RAMDirectory(); @@ -105,36 +106,36 @@ public void testDefaults() throws IOException { } public void testSerialization() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\"field\":{\"type\":\"boolean\"}}", builder.string()); + assertEquals("{\"field\":{\"type\":\"boolean\"}}", Strings.toString(builder)); // now change some parameters - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "boolean") .field("doc_values", "false") .field("null_value", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); defaultMapper = parser.parse("type", new CompressedXContent(mapping)); mapper = defaultMapper.mappers().getMapper("field"); builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\"field\":{\"type\":\"boolean\",\"doc_values\":false,\"null_value\":true}}", builder.string()); + assertEquals("{\"field\":{\"type\":\"boolean\",\"doc_values\":false,\"null_value\":true}}", Strings.toString(builder)); } public void testParsesPreEs6BooleansLenient() throws IOException { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") @@ -146,18 +147,18 @@ public void testParsesPreEs6BooleansLenient() throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); DocumentMapper defaultMapper = preEs6Parser.parse("type", new CompressedXContent(mapping)); String falsy = randomFrom("false", "off", "no", "0"); String truthy = randomFrom("true", "on", "yes", "1"); - ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("legacy", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", falsy) - .field("field2", truthy) - .endObject() - .bytes(), + ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("legacy", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", falsy) + .field("field2", truthy) + .endObject()), XContentType.JSON)); Document doc = parsedDoc.rootDoc(); assertEquals("F", doc.getField("field1").stringValue()); @@ -177,7 +178,7 @@ public void testParsesPreEs6BooleansLenient() throws IOException { } public void testParsesEs6BooleansStrict() throws IOException { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") @@ -186,20 +187,20 @@ public void testParsesEs6BooleansStrict() throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() + BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() // omit "false"/"true" here as they should still be parsed correctly .field("field", randomFrom("off", "no", "0", "on", "yes", "1")) - .endObject().bytes(); - MapperParsingException ex = expectThrows(MapperParsingException.class, + .endObject()); + MapperParsingException ex = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON))); assertEquals("failed to parse [field]", ex.getMessage()); } public void testMultiFields() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "boolean") @@ -209,20 +210,20 @@ public void testMultiFields() throws IOException { .endObject() .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = indexService.mapperService() .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); - BytesReference source = XContentFactory.jsonBuilder() + BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", false) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); assertNotNull(doc.rootDoc().getField("field.as_string")); } public void testDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("bool1") .field("type", "boolean") @@ -236,17 +237,17 @@ public void testDocValues() throws Exception { .field("index", true) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("bool1", true) - .field("bool2", true) - .field("bool3", true) - .endObject() - .bytes(), + ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("bool1", true) + .field("bool2", true) + .field("bool3", true) + .endObject()), XContentType.JSON)); Document doc = parsedDoc.rootDoc(); IndexableField[] fields = doc.getFields("bool1"); @@ -263,9 +264,9 @@ public void testDocValues() throws Exception { public void testEmptyName() throws IOException { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "boolean").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java index ac14f2905cf3b..09394b396679f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java @@ -19,26 +19,27 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; public class CamelCaseFieldNameTests extends ESSingleNodeTestCase { public void testCamelCaseFieldNameStaysAsIs() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper documentMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .field("thisIsCamelCase", "value1") - .endObject().bytes(), - XContentType.JSON)); + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .field("thisIsCamelCase", "value1") + .endObject()), + XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 74183ae864a60..be03a28a0aad2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -29,6 +29,8 @@ import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; @@ -51,11 +53,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { public void testDefaultConfiguration() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); @@ -80,7 +82,7 @@ public void testDefaultConfiguration() throws IOException { } public void testCompletionAnalyzerSettings() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .field("analyzer", "simple") @@ -88,7 +90,7 @@ public void testCompletionAnalyzerSettings() throws Exception { .field("preserve_separators", false) .field("preserve_position_increments", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); @@ -114,7 +116,7 @@ public void testCompletionAnalyzerSettings() throws Exception { } public void testTypeParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .field("analyzer", "simple") @@ -123,7 +125,7 @@ public void testTypeParsing() throws Exception { .field("preserve_position_increments", true) .field("max_input_length", 14) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); @@ -134,7 +136,7 @@ public void testTypeParsing() throws Exception { XContentBuilder builder = jsonBuilder().startObject(); completionFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); builder.close(); - Map serializedMap = createParser(JsonXContent.jsonXContent, builder.bytes()).map(); + Map serializedMap = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)).map(); Map configMap = (Map) serializedMap.get("completion"); assertThat(configMap.get("analyzer").toString(), is("simple")); assertThat(configMap.get("search_analyzer").toString(), is("standard")); @@ -144,196 +146,196 @@ public void testTypeParsing() throws Exception { } public void testParsingMinimal() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", "suggestion") - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", "suggestion") + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } public void testParsingFailure() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); MapperParsingException e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", 1.0) - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", 1.0) + .endObject()), XContentType.JSON))); assertEquals("failed to parse [completion]: expected text or object, but got VALUE_NUMBER", e.getCause().getMessage()); } public void testParsingMultiValued() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .array("completion", "suggestion1", "suggestion2") - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("completion", "suggestion1", "suggestion2") + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 2); } public void testParsingWithWeight() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("completion") - .field("input", "suggestion") - .field("weight", 2) - .endObject() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("completion") + .field("input", "suggestion") + .field("weight", 2) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } public void testParsingMultiValueWithWeight() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("completion") - .array("input", "suggestion1", "suggestion2", "suggestion3") - .field("weight", 2) - .endObject() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("completion") + .array("input", "suggestion1", "suggestion2", "suggestion3") + .field("weight", 2) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } public void testParsingFull() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .field("input", "suggestion1") - .field("weight", 3) - .endObject() - .startObject() - .field("input", "suggestion2") - .field("weight", 4) - .endObject() - .startObject() - .field("input", "suggestion3") - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .field("input", "suggestion1") + .field("weight", 3) + .endObject() + .startObject() + .field("input", "suggestion2") + .field("weight", 4) + .endObject() + .startObject() + .field("input", "suggestion3") + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } public void testParsingMixed() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion1", "suggestion2") - .field("weight", 3) - .endObject() - .startObject() - .field("input", "suggestion3") - .field("weight", 4) - .endObject() - .startObject() - .array("input", "suggestion4", "suggestion5", "suggestion6") - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion1", "suggestion2") + .field("weight", 3) + .endObject() + .startObject() + .field("input", "suggestion3") + .field("weight", 4) + .endObject() + .startObject() + .array("input", "suggestion4", "suggestion5", "suggestion6") + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 6); } public void testNonContextEnabledParsingWithContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("field1") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("field1") - .field("input", "suggestion1") - .startObject("contexts") - .field("ctx", "ctx2") - .endObject() - .field("weight", 3) - .endObject() - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("field1") + .field("input", "suggestion1") + .startObject("contexts") + .field("ctx", "ctx2") + .endObject() + .field("weight", 3) + .endObject() + .endObject()), XContentType.JSON)); fail("Supplying contexts to a non context-enabled field should error"); } catch (MapperParsingException e) { @@ -342,22 +344,22 @@ public void testNonContextEnabledParsingWithContexts() throws Exception { } public void testFieldValueValidation() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); CharsRefBuilder charsRefBuilder = new CharsRefBuilder(); charsRefBuilder.append("sugg"); charsRefBuilder.setCharAt(2, '\u001F'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", charsRefBuilder.get().toString()) - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", charsRefBuilder.get().toString()) + .endObject()), XContentType.JSON)); fail("No error indexing value with reserved character [0x1F]"); } catch (MapperParsingException e) { @@ -368,11 +370,11 @@ public void testFieldValueValidation() throws Exception { charsRefBuilder.setCharAt(2, '\u0000'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", charsRefBuilder.get().toString()) - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", charsRefBuilder.get().toString()) + .endObject()), XContentType.JSON)); fail("No error indexing value with reserved character [0x0]"); } catch (MapperParsingException e) { @@ -383,11 +385,11 @@ public void testFieldValueValidation() throws Exception { charsRefBuilder.setCharAt(2, '\u001E'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", charsRefBuilder.get().toString()) - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", charsRefBuilder.get().toString()) + .endObject()), XContentType.JSON)); fail("No error indexing value with reserved character [0x1E]"); } catch (MapperParsingException e) { @@ -398,11 +400,11 @@ public void testFieldValueValidation() throws Exception { } public void testPrefixQueryType() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -412,11 +414,11 @@ public void testPrefixQueryType() throws Exception { } public void testFuzzyQueryType() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -429,11 +431,11 @@ public void testFuzzyQueryType() throws Exception { } public void testRegexQueryType() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -456,9 +458,9 @@ private static void assertSuggestFields(IndexableField[] fields, int expected) { public void testEmptyName() throws IOException { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "completion").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java index 637a25b24d60a..0015919b674ae 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -69,12 +70,12 @@ public void testDynamicTemplateCopyTo() throws Exception { } public void testDynamicObjectCopyTo() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc").startObject("properties") .startObject("foo") .field("type", "text") .field("copy_to", "root.top.child") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); assertAcked( client().admin().indices().prepareCreate("test-idx") .addMapping("_doc", mapping, XContentType.JSON) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java index b4c698fa26d0b..eff6222e6c6ff 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; @@ -46,7 +47,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { @SuppressWarnings("unchecked") public void testCopyToFieldsParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("copy_test") .field("type", "text") .array("copy_to", "another_field", "cyclic_test") @@ -66,7 +67,7 @@ public void testCopyToFieldsParsing() throws Exception { .field("doc_values", false) .array("copy_to", "another_field", "new_field") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type1").setSource(mapping, XContentType.JSON).get(); @@ -79,7 +80,7 @@ public void testCopyToFieldsParsing() throws Exception { stringFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); builder.close(); Map serializedMap; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { serializedMap = parser.map(); } Map copyTestMap = (Map) serializedMap.get("copy_test"); @@ -90,11 +91,11 @@ public void testCopyToFieldsParsing() throws Exception { assertThat(copyToList.get(1), equalTo("cyclic_test")); // Check data parsing - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .field("cyclic_test", "bar") .field("int_to_str_test", 42) - .endObject().bytes(); + .endObject()); ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)); ParseContext.Document doc = parsedDoc.rootDoc(); @@ -126,7 +127,7 @@ public void testCopyToFieldsParsing() throws Exception { } public void testCopyToFieldsInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("copy_test") .field("type", "text") @@ -142,16 +143,16 @@ public void testCopyToFieldsInnerObjectParsing() throws Exception { .endObject() .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .startObject("foo").startObject("bar").field("baz", "zoo").endObject().endObject() - .endObject().bytes(); + .endObject()); - ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -162,21 +163,21 @@ public void testCopyToFieldsInnerObjectParsing() throws Exception { } public void testCopyToDynamicInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("copy_test") .field("type", "text") .field("copy_to", "very.inner.field") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .field("new_field", "bar") - .endObject().bytes(); + .endObject()); ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); @@ -191,7 +192,7 @@ public void testCopyToDynamicInnerObjectParsing() throws Exception { } public void testCopyToDynamicInnerInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("copy_test") .field("type", "text") @@ -206,16 +207,16 @@ public void testCopyToDynamicInnerInnerObjectParsing() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .field("new_field", "bar") - .endObject().bytes(); + .endObject()); - ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -228,7 +229,7 @@ public void testCopyToDynamicInnerInnerObjectParsing() throws Exception { } public void testCopyToStrictDynamicInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .field("dynamic", "strict") .startObject("properties") .startObject("copy_test") @@ -236,13 +237,13 @@ public void testCopyToStrictDynamicInnerObjectParsing() throws Exception { .field("copy_to", "very.inner.field") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") - .endObject().bytes(); + .endObject()); try { docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); @@ -253,7 +254,7 @@ public void testCopyToStrictDynamicInnerObjectParsing() throws Exception { } public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("copy_test") .field("type", "text") @@ -270,13 +271,13 @@ public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception { .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") - .endObject().bytes(); + .endObject()); try { docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); @@ -287,23 +288,23 @@ public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception { } public void testCopyToFieldMerge() throws Exception { - String mappingBefore = jsonBuilder().startObject().startObject("type1").startObject("properties") + String mappingBefore = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("copy_test") .field("type", "text") .array("copy_to", "foo", "bar") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); - String mappingAfter = jsonBuilder().startObject().startObject("type1").startObject("properties") + String mappingAfter = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("copy_test") .field("type", "text") .array("copy_to", "baz", "bar") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), MapperService.MergeReason.MAPPING_UPDATE); @@ -357,7 +358,7 @@ public void testCopyToNestedField() throws Exception { .endObject() .endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); XContentBuilder jsonDoc = XContentFactory.jsonBuilder() .startObject() @@ -382,7 +383,7 @@ public void testCopyToNestedField() throws Exception { .endArray() .endObject(); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", jsonDoc.bytes(), XContentType.JSON)); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc), XContentType.JSON)); assertEquals(6, doc.docs().size()); Document nested = doc.docs().get(0); @@ -437,7 +438,7 @@ public void testCopyToChildNested() throws Exception { .endObject() .endObject(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(rootToNestedMapping.bytes()), + () -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)), MergeReason.MAPPING_UPDATE)); assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings")); @@ -465,7 +466,7 @@ public void testCopyToChildNested() throws Exception { .endObject() .endObject(); e = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(nestedToNestedMapping.bytes()), + () -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(nestedToNestedMapping)), MergeReason.MAPPING_UPDATE)); } @@ -495,7 +496,7 @@ public void testCopyToSiblingNested() throws Exception { .endObject() .endObject(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(rootToNestedMapping.bytes()), + () -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)), MergeReason.MAPPING_UPDATE)); assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings")); } @@ -516,13 +517,13 @@ public void testCopyToObject() throws Exception { .endObject() .endObject(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(rootToNestedMapping.bytes()), + () -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)), MergeReason.MAPPING_UPDATE)); assertThat(e.getMessage(), Matchers.startsWith("Cannot copy to field [target] since it is mapped as an object")); } public void testCopyToDynamicNestedObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startArray("dynamic_templates") .startObject() .startObject("objects") @@ -539,14 +540,14 @@ public void testCopyToDynamicNestedObjectParsing() throws Exception { .field("copy_to", "very.inner.field") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .field("new_field", "bar") - .endObject().bytes(); + .endObject()); try { docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); @@ -569,7 +570,7 @@ private void assertFieldValue(Document doc, String field, Number... expected) { } public void testCopyToMultiField() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("my_field") .field("type", "keyword") @@ -581,7 +582,7 @@ public void testCopyToMultiField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -590,7 +591,7 @@ public void testCopyToMultiField() throws Exception { } public void testNestedCopyTo() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("n") .field("type", "nested") @@ -605,14 +606,14 @@ public void testNestedCopyTo() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); // no exception } public void testNestedCopyToMultiField() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("n") .field("type", "nested") @@ -629,7 +630,7 @@ public void testNestedCopyToMultiField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -638,7 +639,7 @@ public void testNestedCopyToMultiField() throws Exception { } public void testCopyFromMultiField() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("my_field") .field("type", "keyword") @@ -650,7 +651,7 @@ public void testCopyFromMultiField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); MapperParsingException e = expectThrows(MapperParsingException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 23bcba4cda76b..9d334cecb708f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -21,6 +21,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -57,19 +59,19 @@ protected Collection> getPlugins() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-11") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -86,19 +88,19 @@ public void testDefaults() throws Exception { } public void testNotIndexed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-11") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -108,19 +110,19 @@ public void testNotIndexed() throws Exception { } public void testNoDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-11") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -130,19 +132,19 @@ public void testNoDocValues() throws Exception { } public void testStore() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-11") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -157,35 +159,35 @@ public void testStore() throws Exception { } public void testIgnoreMalformed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-99") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-99") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("Cannot parse \"2016-03-99\"")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date") .field("ignore_malformed", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", ":1") - .endObject() - .bytes(), + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -193,20 +195,20 @@ public void testIgnoreMalformed() throws Exception { } public void testChangeFormat() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date") .field("format", "epoch_second").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 1457654400) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 1457654400) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -216,10 +218,10 @@ public void testChangeFormat() throws IOException { } public void testFloatEpochFormat() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date") .field("format", "epoch_millis").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -228,11 +230,11 @@ public void testFloatEpochFormat() throws IOException { double epochFloatMillisFromEpoch = (randomDouble() * 2 - 1) * 1000000; String epochFloatValue = String.format(Locale.US, "%f", epochFloatMillisFromEpoch); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", epochFloatValue) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", epochFloatValue) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -242,44 +244,44 @@ public void testFloatEpochFormat() throws IOException { } public void testChangeLocale() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").field("locale", "fr").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 1457654400) - .endObject() - .bytes(), + mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 1457654400) + .endObject()), XContentType.JSON)); } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") .field("type", "date") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -287,16 +289,16 @@ public void testNullValue() throws IOException { .field("null_value", "2016-03-11") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -312,7 +314,7 @@ public void testNullValue() throws IOException { } public void testNullConfigValuesFail() throws MapperParsingException, IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -320,17 +322,17 @@ public void testNullConfigValuesFail() throws MapperParsingException, IOExceptio .field("format", (String) null) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Exception e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertEquals("[format] must not have a [null] value", e.getMessage()); } public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "date") .field("format", "epoch_second").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) @@ -345,7 +347,7 @@ public void testEmptyName() throws IOException { public void testTimeZoneParsing() throws Exception { final String timeZonePattern = "yyyy-MM-dd" + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'"); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -353,7 +355,7 @@ public void testTimeZoneParsing() throws Exception { .field("format", timeZonePattern) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); @@ -361,11 +363,11 @@ public void testTimeZoneParsing() throws Exception { final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone(); final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -375,20 +377,20 @@ public void testTimeZoneParsing() throws Exception { } public void testMergeDate() throws IOException { - String initMapping = XContentFactory.jsonBuilder().startObject().startObject("movie") + String initMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("movie") .startObject("properties") .startObject("release_date").field("type", "date").field("format", "yyyy/MM/dd").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper initMapper = indexService.mapperService().merge("movie", new CompressedXContent(initMapping), MapperService.MergeReason.MAPPING_UPDATE); assertThat(initMapper.mappers().getMapper("release_date"), notNullValue()); assertFalse(initMapper.mappers().getMapper("release_date").fieldType().stored()); - String updateFormatMapping = XContentFactory.jsonBuilder().startObject().startObject("movie") + String updateFormatMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("movie") .startObject("properties") .startObject("release_date").field("type", "date").field("format", "epoch_millis").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); Exception e = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("movie", new CompressedXContent(updateFormatMapping), @@ -397,14 +399,14 @@ public void testMergeDate() throws IOException { } public void testMergeText() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("date").field("type", "date").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = indexService.mapperService().parse("_doc", new CompressedXContent(mapping), false); - String mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_doc") + String mappingUpdate = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("date").field("type", "text").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper update = indexService.mapperService().parse("_doc", new CompressedXContent(mappingUpdate), false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java index b528c2119cfe1..41d98aa173df7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -41,16 +42,16 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { public void test1Merge() throws Exception { - String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") + String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("name").field("type", "text").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping)); - String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") + String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("name").field("type", "text").endObject() .startObject("age").field("type", "integer").endObject() .startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); DocumentMapper merged = stage1.merge(stage2.mapping()); @@ -64,11 +65,11 @@ public void test1Merge() throws Exception { public void testMergeObjectDynamic() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject().string(); + String objectMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject()); DocumentMapper mapper = parser.parse("type1", new CompressedXContent(objectMapping)); assertNull(mapper.root().dynamic()); - String withDynamicMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject().string(); + String withDynamicMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject()); DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping)); assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); @@ -78,13 +79,13 @@ public void testMergeObjectDynamic() throws Exception { public void testMergeObjectAndNested() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") + String objectMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("obj").field("type", "object").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper objectMapper = parser.parse("type1", new CompressedXContent(objectMapping)); - String nestedMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") + String nestedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("obj").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping)); try { @@ -104,12 +105,12 @@ public void testMergeObjectAndNested() throws Exception { public void testMergeSearchAnalyzer() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() - .endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); @@ -122,12 +123,12 @@ public void testMergeSearchAnalyzer() throws Exception { public void testChangeSearchAnalyzerToDefault() throws Exception { MapperService mapperService = createIndex("test").mapperService(); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() - .endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE); DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE); @@ -206,23 +207,23 @@ public void run() { } public void testDoNotRepeatOriginalMapping() throws IOException { - CompressedXContent mapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent mapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("_source") .field("enabled", false) .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge("type", mapping, MapperService.MergeReason.MAPPING_UPDATE); - CompressedXContent update = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent update = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("foo") .field("type", "text") .endObject() .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); DocumentMapper mapper = mapperService.merge("type", update, MapperService.MergeReason.MAPPING_UPDATE); assertNotNull(mapper.mappers().getMapper("foo")); @@ -232,28 +233,28 @@ public void testDoNotRepeatOriginalMapping() throws IOException { public void testMergeChildType() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String initMapping = XContentFactory.jsonBuilder().startObject().startObject("child") + String initMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child") .startObject("_parent").field("type", "parent").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper initMapper = parser.parse("child", new CompressedXContent(initMapping)); assertThat(initMapper.mappers().getMapper("_parent#parent"), notNullValue()); - String updatedMapping1 = XContentFactory.jsonBuilder().startObject().startObject("child") + String updatedMapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child") .startObject("properties") .startObject("name").field("type", "text").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper updatedMapper1 = parser.parse("child", new CompressedXContent(updatedMapping1)); DocumentMapper mergedMapper1 = initMapper.merge(updatedMapper1.mapping()); assertThat(mergedMapper1.mappers().getMapper("_parent#parent"), notNullValue()); assertThat(mergedMapper1.mappers().getMapper("name"), notNullValue()); - String updatedMapping2 = XContentFactory.jsonBuilder().startObject().startObject("child") + String updatedMapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child") .startObject("_parent").field("type", "parent").endObject() .startObject("properties") .startObject("age").field("type", "byte").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper updatedMapper2 = parser.parse("child", new CompressedXContent(updatedMapping2)); DocumentMapper mergedMapper2 = mergedMapper1.merge(updatedMapper2.mapping()); @@ -261,9 +262,9 @@ public void testMergeChildType() throws IOException { assertThat(mergedMapper2.mappers().getMapper("name"), notNullValue()); assertThat(mergedMapper2.mappers().getMapper("age"), notNullValue()); - String modParentMapping = XContentFactory.jsonBuilder().startObject().startObject("child") + String modParentMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child") .startObject("_parent").field("type", "new_parent").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper modParentMapper = parser.parse("child", new CompressedXContent(modParentMapping)); Exception e = expectThrows(IllegalArgumentException.class, () -> initMapper.merge(modParentMapper.mapping())); assertThat(e.getMessage(), containsString("The _parent field's type option can't be changed: [parent]->[new_parent]")); @@ -272,19 +273,19 @@ public void testMergeChildType() throws IOException { public void testMergeAddingParent() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String initMapping = XContentFactory.jsonBuilder().startObject().startObject("cowboy") + String initMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("cowboy") .startObject("properties") .startObject("name").field("type", "text").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper initMapper = parser.parse("cowboy", new CompressedXContent(initMapping)); assertThat(initMapper.mappers().getMapper("name"), notNullValue()); - String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("cowboy") + String updatedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("cowboy") .startObject("_parent").field("type", "parent").endObject() .startObject("properties") .startObject("age").field("type", "byte").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper updatedMapper = parser.parse("cowboy", new CompressedXContent(updatedMapping)); Exception e = expectThrows(IllegalArgumentException.class, () -> initMapper.merge(updatedMapper.mapping())); assertThat(e.getMessage(), containsString("The _parent field's type option can't be changed: [null]->[parent]")); @@ -293,41 +294,41 @@ public void testMergeAddingParent() throws IOException { public void testMergeMeta() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String initMapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .startObject("_meta") - .field("foo").value("bar") + String initMapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .startObject("_meta") + .field("foo").value("bar") + .endObject() .endObject() - .endObject() - .endObject() - .string(); + .endObject()); DocumentMapper initMapper = parser.parse("test", new CompressedXContent(initMapping)); assertThat(initMapper.meta().get("foo"), equalTo("bar")); - String updateMapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .startObject("properties") - .startObject("name").field("type", "text").endObject() + String updateMapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .startObject("properties") + .startObject("name").field("type", "text").endObject() + .endObject() .endObject() - .endObject() - .endObject() - .string(); + .endObject()); DocumentMapper updatedMapper = parser.parse("test", new CompressedXContent(updateMapping)); assertThat(initMapper.merge(updatedMapper.mapping()).meta().get("foo"), equalTo("bar")); - updateMapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .startObject("_meta") - .field("foo").value("new_bar") + updateMapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .startObject("_meta") + .field("foo").value("new_bar") + .endObject() .endObject() - .endObject() - .endObject() - .string(); + .endObject()); updatedMapper = parser.parse("test", new CompressedXContent(updateMapping)); assertThat(initMapper.merge(updatedMapper.mapping()).meta().get("foo"), equalTo("new_bar")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java index a29566bfa4ee5..268b03d046c1c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java @@ -19,11 +19,10 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; @@ -31,8 +30,8 @@ // TODO: move this test...it doesn't need to be by itself public class DocumentMapperParserTests extends ESSingleNodeTestCase { public void testTypeLevel() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -42,10 +41,10 @@ public void testTypeLevel() throws Exception { public void testFieldNameWithDots() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo.bar").field("type", "text").endObject() .startObject("foo.baz").field("type", "keyword").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = mapperParser.parse("type", new CompressedXContent(mapping)); assertNotNull(docMapper.mappers().getMapper("foo.bar")); assertNotNull(docMapper.mappers().getMapper("foo.baz")); @@ -55,11 +54,11 @@ public void testFieldNameWithDots() throws Exception { public void testFieldNameWithDeepDots() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo.bar").field("type", "text").endObject() .startObject("foo.baz").startObject("properties") .startObject("deep.field").field("type", "keyword").endObject().endObject() - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper docMapper = mapperParser.parse("type", new CompressedXContent(mapping)); assertNotNull(docMapper.mappers().getMapper("foo.bar")); assertNotNull(docMapper.mappers().getMapper("foo.baz.deep.field")); @@ -69,10 +68,10 @@ public void testFieldNameWithDeepDots() throws Exception { public void testFieldNameWithDotsConflict() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").field("type", "text").endObject() .startObject("foo.baz").field("type", "keyword").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapperParser.parse("type", new CompressedXContent(mapping))); assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] of different type")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index b3b33cf0dd6ad..dd4717a1a0f6a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -60,17 +61,17 @@ protected Collection> getPlugins() { public void testFieldDisabled() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").field("enabled", false).endObject() .startObject("bar").field("type", "integer").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo", "1234") .field("bar", 10) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.rootDoc().getField("foo")); assertNotNull(doc.rootDoc().getField("bar")); @@ -79,14 +80,14 @@ public void testFieldDisabled() throws Exception { public void testDotsWithExistingMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties") .startObject("bar").startObject("properties") .startObject("baz").field("type", "integer") - .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo.bar.baz", 123) .startObject("foo") @@ -95,7 +96,7 @@ public void testDotsWithExistingMapper() throws Exception { .startObject("foo.bar") .field("baz", 789) .endObject() - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); // no update! String[] values = doc.rootDoc().getValues("foo.bar.baz"); @@ -107,16 +108,16 @@ public void testDotsWithExistingMapper() throws Exception { public void testDotsWithExistingNestedMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").field("type", "nested").startObject("properties") .startObject("bar").field("type", "integer") - .endObject().endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo.bar", 123) - .endObject().bytes(); + .endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals( @@ -126,7 +127,7 @@ public void testDotsWithExistingNestedMapper() throws Exception { public void testDotsWithDynamicNestedMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates") .startObject() .startObject("objects_as_nested") @@ -136,13 +137,13 @@ public void testDotsWithDynamicNestedMapper() throws Exception { .endObject() .endObject() .endObject() - .endArray().endObject().endObject().string(); + .endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo.bar",42) - .endObject().bytes(); + .endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals( @@ -179,8 +180,8 @@ public void testNestedHaveIdAndTypeFields() throws Exception { mapping.endObject(); } mapping.endObject().endObject().endObject(); - DocumentMapper mapper1 = mapperParser1.parse("type", new CompressedXContent(mapping.string())); - DocumentMapper mapper2 = mapperParser2.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper1 = mapperParser1.parse("type", new CompressedXContent(Strings.toString(mapping))); + DocumentMapper mapper2 = mapperParser2.parse("type", new CompressedXContent(Strings.toString(mapping))); XContentBuilder doc = XContentFactory.jsonBuilder().startObject(); { @@ -196,7 +197,7 @@ public void testNestedHaveIdAndTypeFields() throws Exception { doc.endObject(); // Verify in the case where multiple types are allowed that the _uid field is added to nested documents: - ParsedDocument result = mapper1.parse(SourceToParse.source("index1", "type", "1", doc.bytes(), XContentType.JSON)); + ParsedDocument result = mapper1.parse(SourceToParse.source("index1", "type", "1", BytesReference.bytes(doc), XContentType.JSON)); assertEquals(2, result.docs().size()); // Nested document: assertNull(result.docs().get(0).getField(IdFieldMapper.NAME)); @@ -216,7 +217,7 @@ public void testNestedHaveIdAndTypeFields() throws Exception { assertEquals("value2", result.docs().get(1).getField("baz").binaryValue().utf8ToString()); // Verify in the case where only a single type is allowed that the _id field is added to nested documents: - result = mapper2.parse(SourceToParse.source("index2", "type", "1", doc.bytes(), XContentType.JSON)); + result = mapper2.parse(SourceToParse.source("index2", "type", "1", BytesReference.bytes(doc), XContentType.JSON)); assertEquals(2, result.docs().size()); // Nested document: assertNull(result.docs().get(0).getField(UidFieldMapper.NAME)); @@ -237,19 +238,19 @@ public void testNestedHaveIdAndTypeFields() throws Exception { public void testPropagateDynamicWithExistingMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .field("dynamic", false) .startObject("properties") .startObject("foo") .field("type", "object") .field("dynamic", true) .startObject("properties") - .endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "something") - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.rootDoc().getField("foo.bar")); @@ -257,19 +258,19 @@ public void testPropagateDynamicWithExistingMapper() throws Exception { public void testPropagateDynamicWithDynamicMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .field("dynamic", false) .startObject("properties") .startObject("foo") .field("type", "object") .field("dynamic", true) .startObject("properties") - .endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo").startObject("bar") .field("baz", "something") - .endObject().endObject().endObject().bytes(); + .endObject().endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.rootDoc().getField("foo.bar.baz")); @@ -277,30 +278,30 @@ public void testPropagateDynamicWithDynamicMapper() throws Exception { public void testDynamicRootFallback() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .field("dynamic", false) .startObject("properties") .startObject("foo") .field("type", "object") .startObject("properties") - .endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "something") - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); assertNull(doc.rootDoc().getField("foo.bar")); } DocumentMapper createDummyMapping(MapperService mapperService) throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("y").field("type", "object").endObject() .startObject("x").startObject("properties") .startObject("subx").field("type", "object").startObject("properties") .startObject("subsubx").field("type", "object") - .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); return defaultMapper; @@ -405,81 +406,81 @@ public void testObjectMappingUpdate() throws Exception { public void testDynamicGeoPointArrayWithTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "geo_point").field("doc_values", false).endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .startArray().value(0).value(0).endArray() .startArray().value(1).value(1).endArray() - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo").length); } public void testDynamicLongArrayWithTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "long").endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } public void testDynamicLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } public void testDynamicFalseLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo").length); } public void testDynamicStrictLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -487,78 +488,78 @@ public void testDynamicStrictLongArray() throws Exception { public void testMappedGeoPointArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "geo_point").field("doc_values", false) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .startArray().value(0).value(0).endArray() .startArray().value(1).value(1).endArray() - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo").length); } public void testMappedLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } public void testDynamicObjectWithTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "object") .startObject("properties").startObject("bar").field("type", "keyword").endObject().endObject().endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "baz") - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar").length); } public void testDynamicFalseObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "baz") - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar").length); } public void testDynamicStrictObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "baz") - .endObject().endObject().bytes(); + .endObject().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -566,28 +567,28 @@ public void testDynamicStrictObject() throws Exception { public void testDynamicFalseValue() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("bar", "baz") - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("bar").length); } public void testDynamicStrictValue() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("bar", "baz") - .endObject().bytes(); + .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [bar] within [type] is not allowed", exception.getMessage()); @@ -595,28 +596,28 @@ public void testDynamicStrictValue() throws Exception { public void testDynamicFalseNull() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("bar", (String) null) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("bar").length); } public void testDynamicStrictNull() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("bar", (String) null) - .endObject().bytes(); + .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [bar] within [type] is not allowed", exception.getMessage()); @@ -624,29 +625,29 @@ public void testDynamicStrictNull() throws Exception { public void testMappedNullValue() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo", (Long) null) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo").length); } public void testDynamicDottedFieldNameLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -662,18 +663,18 @@ public void testDynamicDottedFieldNameLongArray() throws Exception { public void testDynamicDottedFieldNameLongArrayWithParentTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "object").endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -689,17 +690,17 @@ public void testDynamicDottedFieldNameLongArrayWithParentTemplate() throws Excep public void testDynamicDottedFieldNameLongArrayWithExistingParent() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "object") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -715,17 +716,17 @@ public void testDynamicDottedFieldNameLongArrayWithExistingParent() throws Excep public void testDynamicDottedFieldNameLongArrayWithExistingParentWrongType() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " @@ -734,30 +735,30 @@ public void testDynamicDottedFieldNameLongArrayWithExistingParentWrongType() thr public void testDynamicFalseDottedFieldNameLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz").length); } public void testDynamicStrictDottedFieldNameLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -765,13 +766,13 @@ public void testDynamicStrictDottedFieldNameLongArray() throws Exception { public void testDynamicDottedFieldNameLong() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -787,16 +788,16 @@ public void testDynamicDottedFieldNameLong() throws Exception { public void testDynamicDottedFieldNameLongWithParentTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "object").endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -812,15 +813,15 @@ public void testDynamicDottedFieldNameLongWithParentTemplate() throws Exception public void testDynamicDottedFieldNameLongWithExistingParent() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "object") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -836,15 +837,15 @@ public void testDynamicDottedFieldNameLongWithExistingParent() throws Exception public void testDynamicDottedFieldNameLongWithExistingParentWrongType() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " @@ -853,26 +854,26 @@ public void testDynamicDottedFieldNameLongWithExistingParentWrongType() throws E public void testDynamicFalseDottedFieldNameLong() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz").length); } public void testDynamicStrictDottedFieldNameLong() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -880,14 +881,14 @@ public void testDynamicStrictDottedFieldNameLong() throws Exception { public void testDynamicDottedFieldNameObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo.bar.baz") .field("a", 0) - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -906,17 +907,17 @@ public void testDynamicDottedFieldNameObject() throws Exception { public void testDynamicDottedFieldNameObjectWithParentTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "object").endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo.bar.baz") .field("a", 0) - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -935,12 +936,12 @@ public void testDynamicDottedFieldNameObjectWithParentTemplate() throws Exceptio public void testDynamicDottedFieldNameObjectWithExistingParent() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("foo") - .field("type", "object").endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("foo") + .field("type", "object").endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject() - .bytes(); + BytesReference bytes = BytesReference + .bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -959,14 +960,14 @@ public void testDynamicDottedFieldNameObjectWithExistingParent() throws Exceptio public void testDynamicDottedFieldNameObjectWithExistingParentWrongType() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject() - .bytes(); + BytesReference bytes = BytesReference + .bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); @@ -976,28 +977,28 @@ public void testDynamicDottedFieldNameObjectWithExistingParentWrongType() throws public void testDynamicFalseDottedFieldNameObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo.bar.baz") .field("a", 0) - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz.a").length); } public void testDynamicStrictDottedFieldNameObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo.bar.baz") .field("a", 0) - .endObject().endObject().bytes(); + .endObject().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -1005,15 +1006,15 @@ public void testDynamicStrictDottedFieldNameObject() throws Exception { public void testDocumentContainsMetadataField() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder().startObject().field("_ttl", 0).endObject().bytes(); + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_ttl", 0).endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertTrue(e.getMessage(), e.getMessage().contains("cannot be added inside a document")); - BytesReference bytes2 = XContentFactory.jsonBuilder().startObject().field("foo._ttl", 0).endObject().bytes(); + BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo._ttl", 0).endObject()); mapper.parse(SourceToParse.source("test", "type", "1", bytes2, XContentType.JSON)); // parses without error } @@ -1094,17 +1095,17 @@ public void testNoDocumentSent() throws Exception { } public void testNoLevel() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("test1"), equalTo("value1")); @@ -1113,17 +1114,17 @@ public void testNoLevel() throws Exception { } public void testTypeLevel() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); @@ -1132,18 +1133,18 @@ public void testTypeLevel() throws Exception { } public void testNoLevelWithFieldTypeAsValue() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("type", "value_type") - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("type", "value_type") + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type"), equalTo("value_type")); @@ -1153,18 +1154,18 @@ public void testNoLevelWithFieldTypeAsValue() throws Exception { } public void testTypeLevelWithFieldTypeAsValue() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("type", "value_type") - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("type", "value_type") + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); @@ -1174,18 +1175,18 @@ public void testTypeLevelWithFieldTypeAsValue() throws Exception { } public void testNoLevelWithFieldTypeAsObject() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type").field("type_field", "type_value").endObject() - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("type").field("type_field", "type_value").endObject() + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject()), XContentType.JSON)); // in this case, we analyze the type object as the actual document, and ignore the other same level fields @@ -1195,18 +1196,18 @@ public void testNoLevelWithFieldTypeAsObject() throws Exception { } public void testTypeLevelWithFieldTypeAsObject() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .startObject("type").field("type_field", "type_value").endObject() - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .startObject("type").field("type_field", "type_value").endObject() + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type.type_field"), equalTo("type_value")); @@ -1216,18 +1217,18 @@ public void testTypeLevelWithFieldTypeAsObject() throws Exception { } public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("test1", "value1") - .field("test2", "value2") - .field("type", "value_type") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("test1", "value1") + .field("test2", "value2") + .field("type", "value_type") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); @@ -1237,18 +1238,18 @@ public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception { } public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("test1", "value1") - .field("type", "value_type") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("test1", "value1") + .field("type", "value_type") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); @@ -1258,18 +1259,18 @@ public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception { } public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("test1", "value1") - .startObject("type").field("type_field", "type_value").endObject() - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("test1", "value1") + .startObject("type").field("type_field", "type_value").endObject() + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject()), XContentType.JSON)); // when the type is not the first one, we don't confuse it... @@ -1280,18 +1281,18 @@ public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception { } public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("test1", "value1") - .startObject("type").field("type_field", "type_value").endObject() - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("test1", "value1") + .startObject("type").field("type_field", "type_value").endObject() + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type.type_field"), equalTo("type_value")); @@ -1302,16 +1303,16 @@ public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception { public void testDynamicDateDetectionDisabledOnNumbers() throws IOException { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_date_formats") .value("yyyy") - .endArray().endObject().endObject().string(); + .endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo", "2016") - .endObject().bytes(); + .endObject()); // Even though we matched the dynamic format, we do not match on numbers, // which are too likely to be false positives @@ -1325,16 +1326,16 @@ public void testDynamicDateDetectionDisabledOnNumbers() throws IOException { public void testDynamicDateDetectionEnabledWithNoSpecialCharacters() throws IOException { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_date_formats") .value("yyyy MM") - .endArray().endObject().endObject().string(); + .endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo", "2016 12") - .endObject().bytes(); + .endObject()); // We should have generated a date field ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); @@ -1346,17 +1347,17 @@ public void testDynamicDateDetectionEnabledWithNoSpecialCharacters() throws IOEx } public void testDynamicFieldsStartingAndEndingWithDot() throws Exception { - BytesReference bytes = XContentFactory.jsonBuilder().startObject().startArray("top.") + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startArray("top.") .startObject().startArray("foo.") .startObject() .field("thing", "bah") .endObject().endArray() .endObject().endArray() - .endObject().bytes(); + .endObject()); client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get(); - bytes = XContentFactory.jsonBuilder().startObject().startArray("top.") + bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startArray("top.") .startObject().startArray("foo.") .startObject() .startObject("bar.") @@ -1366,7 +1367,7 @@ public void testDynamicFieldsStartingAndEndingWithDot() throws Exception { .endObject() .endObject() .endArray().endObject().endArray() - .endObject().bytes(); + .endObject()); try { client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get(); @@ -1378,14 +1379,14 @@ public void testDynamicFieldsStartingAndEndingWithDot() throws Exception { } public void testDynamicFieldsEmptyName() throws Exception { - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("top.") .startObject() .startObject("aoeu") .field("a", 1).field(" ", 2) .endObject() .endObject().endArray() - .endObject().bytes(); + .endObject()); IllegalArgumentException emptyFieldNameException = expectThrows(IllegalArgumentException.class, () -> client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get()); @@ -1395,21 +1396,21 @@ public void testDynamicFieldsEmptyName() throws Exception { } public void testBlankFieldNames() throws Exception { - final BytesReference bytes = XContentFactory.jsonBuilder() + final BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("", "foo") - .endObject().bytes(); + .endObject()); MapperParsingException err = expectThrows(MapperParsingException.class, () -> client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get()); assertThat(ExceptionsHelper.detailedMessage(err), containsString("field name cannot be an empty string")); - final BytesReference bytes2 = XContentFactory.jsonBuilder() + final BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("foo") .field("", "bar") .endObject() - .endObject().bytes(); + .endObject()); err = expectThrows(MapperParsingException.class, () -> client().prepareIndex("idx", "type").setSource(bytes2, XContentType.JSON).get()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 804214ad96f51..6c83f31f93fe6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -24,12 +24,12 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -40,23 +40,23 @@ public void testDoubleIndexingSameDoc() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), Lucene.STANDARD_ANALYZER)); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper mapper = index.mapperService().documentMapper("type"); QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L, null); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", 1) - .field("field3", 1.1) - .field("field4", "2010-01-01") - .startArray("field5").value(1).value(2).value(3).endArray() - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", 1) + .field("field3", 1.1) + .field("field4", "2010-01-01") + .startArray("field5").value(1).value(2).value(3).endArray() + .endObject()), XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index fd61afc566efc..f26deb5fc4fae 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -34,7 +35,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.BooleanFieldMapper.BooleanFieldType; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -59,21 +59,21 @@ protected Collection> getPlugins() { } public void testDynamicTrue() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "true") .startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", "value2") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("field1"), equalTo("value1")); @@ -81,21 +81,21 @@ public void testDynamicTrue() throws IOException { } public void testDynamicFalse() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "false") .startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", "value2") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("field1"), equalTo("value1")); @@ -104,53 +104,53 @@ public void testDynamicFalse() throws IOException { public void testDynamicStrict() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "strict") .startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes(), + StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", "value2") + .endObject()), XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [type] is not allowed")); - e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", (String) null) - .endObject() - .bytes(), + e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", (String) null) + .endObject()), XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [type] is not allowed")); } public void testDynamicFalseWithInnerObjectButDynamicSetOnRoot() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "false") .startObject("properties") .startObject("obj1").startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject().endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject().startObject("obj1") - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject().startObject("obj1") + .field("field1", "value1") + .field("field2", "value2") + .endObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("obj1.field1"), equalTo("value1")); @@ -158,25 +158,25 @@ public void testDynamicFalseWithInnerObjectButDynamicSetOnRoot() throws IOExcept } public void testDynamicStrictWithInnerObjectButDynamicSetOnRoot() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "strict") .startObject("properties") .startObject("obj1").startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject().endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> - defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject().startObject("obj1") - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject().startObject("obj1") + .field("field1", "value1") + .field("field2", "value2") + .endObject() + .endObject()), XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [obj1] is not allowed")); } @@ -207,12 +207,12 @@ public void testTypeNotCreatedOnIndexFailure() throws IOException, InterruptedEx private String serialize(ToXContent mapper) throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, new ToXContent.MapParams(emptyMap())); - return builder.endObject().string(); + return Strings.toString(builder.endObject()); } private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XContentBuilder builder) throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - SourceToParse source = SourceToParse.source("test", mapper.type(), "some_id", builder.bytes(), builder.contentType()); + SourceToParse source = SourceToParse.source("test", mapper.type(), "some_id", BytesReference.bytes(builder), builder.contentType()); try (XContentParser xContentParser = createParser(JsonXContent.jsonXContent, source.source())) { ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext(settings, parser, mapper, source, xContentParser); assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken()); @@ -226,9 +226,9 @@ private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XConten public void testDynamicMappingsNotNeeded() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()); @@ -239,9 +239,9 @@ public void testDynamicMappingsNotNeeded() throws Exception { public void testField() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -251,7 +251,7 @@ public void testField() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo") .field("type", "text") .startObject("fields") @@ -261,7 +261,7 @@ public void testField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testIncremental() throws Exception { @@ -269,9 +269,9 @@ public void testIncremental() throws Exception { DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); // Make sure that mapping updates are incremental, this is important for performance otherwise // every new field introduction runs in linear time with the total number of fields - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -281,7 +281,7 @@ public void testIncremental() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") // foo is NOT in the update .startObject("bar").field("type", "text") .startObject("fields") @@ -291,15 +291,15 @@ public void testIncremental() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testIntroduceTwoFields() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -309,7 +309,7 @@ public void testIntroduceTwoFields() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("bar").field("type", "text") .startObject("fields") .startObject("keyword") @@ -326,15 +326,15 @@ public void testIntroduceTwoFields() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testObject() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -344,19 +344,19 @@ public void testObject() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "text") .startObject("fields").startObject("keyword").field("type", "keyword").field("ignore_above", 256).endObject() .endObject().endObject().endObject().endObject().endObject().endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testArray() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -366,7 +366,7 @@ public void testArray() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo") .field("type", "text") .startObject("fields") @@ -376,15 +376,15 @@ public void testArray() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testInnerDynamicMapping() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo").field("type", "object").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -394,19 +394,19 @@ public void testInnerDynamicMapping() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "text").startObject("fields") .startObject("keyword").field("type", "keyword").field("ignore_above", 256).endObject() .endObject().endObject().endObject().endObject().endObject().endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testComplexArray() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -416,7 +416,7 @@ public void testComplexArray() throws Exception { .startObject().field("baz", 3).endObject() .endArray().endObject()); assertEquals(mapping, serialize(mapper)); - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties") .startObject("bar").field("type", "text") .startObject("fields") @@ -428,7 +428,7 @@ public void testComplexArray() throws Exception { .endObject() .startObject("baz").field("type", "long").endObject() .endObject().endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testReuseExistingMappings() throws IOException, Exception { @@ -551,13 +551,13 @@ public void testMixTemplateMultiFieldAndMappingReuse() throws Exception { .endObject() .endArray() .endObject().endObject(); - indexService.mapperService().merge("_doc", new CompressedXContent(mappings1.bytes()), + indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(mappings1)), MapperService.MergeReason.MAPPING_UPDATE); XContentBuilder json = XContentFactory.jsonBuilder().startObject() .field("field", "foo") .endObject(); - SourceToParse source = SourceToParse.source("test", "_doc", "1", json.bytes(), json.contentType()); + SourceToParse source = SourceToParse.source("test", "_doc", "1", BytesReference.bytes(json), json.contentType()); DocumentMapper mapper = indexService.mapperService().documentMapper("_doc"); assertNull(mapper.mappers().getMapper("field.raw")); ParsedDocument parsed = mapper.parse(source); @@ -591,7 +591,7 @@ public void testMixTemplateMultiFieldMultiTypeAndMappingReuse() throws Exception .endObject() .endArray() .endObject().endObject(); - indexService.mapperService().merge("type1", new CompressedXContent(mappings1.bytes()), MapperService.MergeReason.MAPPING_UPDATE); + indexService.mapperService().merge("type1", new CompressedXContent(BytesReference.bytes(mappings1)), MapperService.MergeReason.MAPPING_UPDATE); XContentBuilder mappings2 = jsonBuilder().startObject() .startObject("type2") .startObject("properties") @@ -600,12 +600,12 @@ public void testMixTemplateMultiFieldMultiTypeAndMappingReuse() throws Exception .endObject() .endObject() .endObject().endObject(); - indexService.mapperService().merge("type2", new CompressedXContent(mappings2.bytes()), MapperService.MergeReason.MAPPING_UPDATE); + indexService.mapperService().merge("type2", new CompressedXContent(BytesReference.bytes(mappings2)), MapperService.MergeReason.MAPPING_UPDATE); XContentBuilder json = XContentFactory.jsonBuilder().startObject() .field("field", "foo") .endObject(); - SourceToParse source = SourceToParse.source("test", "type1", "1", json.bytes(), json.contentType()); + SourceToParse source = SourceToParse.source("test", "type1", "1", BytesReference.bytes(json), json.contentType()); DocumentMapper mapper = indexService.mapperService().documentMapper("type1"); assertNull(mapper.mappers().getMapper("field.raw")); ParsedDocument parsed = mapper.parse(source); @@ -620,10 +620,10 @@ public void testMixTemplateMultiFieldMultiTypeAndMappingReuse() throws Exception public void testDefaultFloatingPointMappings() throws IOException { MapperService mapperService = createIndex("test").mapperService(); - String mapping = jsonBuilder().startObject() + String mapping = Strings.toString(jsonBuilder().startObject() .startObject("type") .field("numeric_detection", true) - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); DocumentMapper mapper = mapperService.documentMapper("type"); doTestDefaultFloatingPointMappings(mapper, XContentFactory.jsonBuilder()); @@ -633,12 +633,12 @@ public void testDefaultFloatingPointMappings() throws IOException { } private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentBuilder builder) throws IOException { - BytesReference source = builder.startObject() + BytesReference source = BytesReference.bytes(builder.startObject() .field("foo", 3.2f) // float .field("bar", 3.2d) // double .field("baz", (double) 3.2f) // double that can be accurately represented as a float .field("quux", "3.2") // float detected through numeric detection - .endObject().bytes(); + .endObject()); ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id", source, builder.contentType())); Mapping update = parsedDocument.dynamicMappingsUpdate(); assertNotNull(update); @@ -649,20 +649,20 @@ private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentB } public void testNumericDetectionEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .field("numeric_detection", true) - .endObject().endObject().string(); + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("s_long", "100") - .field("s_double", "100.0") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("s_long", "100") + .field("s_double", "100.0") + .endObject()), XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") @@ -677,19 +677,19 @@ public void testNumericDetectionEnabled() throws Exception { } public void testNumericDetectionDefault() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("s_long", "100") - .field("s_double", "100.0") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("s_long", "100") + .field("s_double", "100.0") + .endObject()), XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type") @@ -704,7 +704,7 @@ public void testNumericDetectionDefault() throws Exception { } public void testDateDetectionInheritsFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_date_formats") .value("yyyy-MM-dd") .endArray() @@ -727,19 +727,19 @@ public void testDateDetectionInheritsFormat() throws Exception { .endObject() .endObject() .endArray() - .endObject().endObject().string(); + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date1", "2016-11-20") - .field("date2", "2016-11-20") - .field("date3", "2016-11-20") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("date1", "2016-11-20") + .field("date2", "2016-11-20") + .field("date3", "2016-11-20") + .endObject()), XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java index 37c887401f24a..fba85b1909cec 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -43,17 +45,17 @@ public void testDynamicMappingDefault() throws IOException { .documentMapperWithAutoCreate("my-type").getDocumentMapper(); ParsedDocument parsedDoc = documentMapper.parse( - SourceToParse.source("my-index", "my-type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("foo", 3) - .endObject() - .bytes(), XContentType.JSON)); + SourceToParse.source("my-index", "my-type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("foo", 3) + .endObject()), XContentType.JSON)); - String expectedMapping = XContentFactory.jsonBuilder().startObject() + String expectedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("my-type") .startObject("properties") .startObject("foo").field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); assertEquals(expectedMapping, parsedDoc.dynamicMappingsUpdate().toString()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index 7ed6efe516ab0..562d54a92babd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -86,7 +87,7 @@ public void testSerialization() throws Exception { DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); XContentBuilder builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", builder.string()); + assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); // name-based template templateDef = new HashMap<>(); @@ -96,7 +97,7 @@ public void testSerialization() throws Exception { template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", builder.string()); + assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); // path-based template templateDef = new HashMap<>(); @@ -107,7 +108,7 @@ public void testSerialization() throws Exception { builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"path_match\":\"*name\",\"path_unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", - builder.string()); + Strings.toString(builder)); // regex matching templateDef = new HashMap<>(); @@ -117,6 +118,6 @@ public void testSerialization() throws Exception { template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", builder.string()); + assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 70cc2c08441eb..64927103e6d1d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -23,15 +23,12 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentFieldMappers; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import org.hamcrest.Matchers; @@ -51,7 +48,7 @@ public void testMatchTypeOnly() throws Exception { DocumentMapper docMapper = index.mapperService().documentMapper("person"); builder = JsonXContent.contentBuilder(); builder.startObject().field("s", "hello").field("l", 1).endObject(); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", builder.bytes(), + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", BytesReference.bytes(builder), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); @@ -74,7 +71,7 @@ public void testSimple() throws Exception { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); @@ -133,7 +130,7 @@ public void testSimpleWithXContentTraverse() throws Exception { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index 72d6e8c4c2cc7..8f2a51bbfc2bd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -23,6 +23,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; @@ -71,20 +73,20 @@ public void testExternalValues() throws Exception { indexService.getIndexAnalyzers(), indexService.xContentRegistry(), indexService.similarityService(), mapperRegistry, queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( - XContentFactory.jsonBuilder().startObject().startObject("type") + Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject(ExternalMetadataMapper.CONTENT_TYPE) .endObject() .startObject("properties") .startObject("field").field("type", "external").endObject() .endObject() - .endObject().endObject().string() + .endObject().endObject()) )); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); @@ -123,7 +125,8 @@ public void testExternalValuesWithMultifield() throws Exception { queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( - XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + Strings + .toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("field") .field("type", ExternalMapperPlugin.EXTERNAL) .startObject("fields") @@ -139,14 +142,13 @@ public void testExternalValuesWithMultifield() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject() - .string())); + .endObject().endObject().endObject()))); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); @@ -189,7 +191,8 @@ public void testExternalValuesWithMultifieldTwoLevels() throws Exception { queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( - XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + Strings + .toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("field") .field("type", ExternalMapperPlugin.EXTERNAL) .startObject("fields") @@ -209,14 +212,13 @@ public void testExternalValuesWithMultifieldTwoLevels() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject() - .string())); + .endObject().endObject().endObject()))); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index f075353736672..8138d46e95689 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -61,9 +63,9 @@ public void testExtractFieldNames() { } public void testFieldType() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); @@ -75,66 +77,66 @@ public void testFieldType() throws Exception { } public void testInjectIntoDocDuringParsing() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("a", "100") - .startObject("b") - .field("c", 42) - .endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("a", "100") + .startObject("b") + .field("c", 42) + .endObject() + .endObject()), XContentType.JSON)); assertFieldNames(Collections.emptySet(), doc); } public void testExplicitEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", true).endObject() .startObject("properties").startObject("field").field("type", "keyword").field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertTrue(fieldNamesMapper.fieldType().isEnabled()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()), XContentType.JSON)); assertFieldNames(set("field"), doc); } public void testDisabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().isEnabled()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()), XContentType.JSON)); assertNull(doc.rootDoc().get("_field_names")); } public void testMergingMappings() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String enabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", true).endObject() - .endObject().endObject().string(); - String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); + String disabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index f8775073e2169..40fc0e81a920c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,14 +54,14 @@ protected Collection> getPlugins() { public void testGeoHashValue() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", stringEncode(1.3, 1.2)) - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("point", stringEncode(1.3, 1.2)) + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -68,14 +70,14 @@ public void testGeoHashValue() throws Exception { public void testLatLonValuesStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -84,17 +86,17 @@ public void testLatLonValuesStored() throws Exception { public void testArrayLatLonValues() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point") - .startObject().field("lat", 1.2).field("lon", 1.3).endObject() - .startObject().field("lat", 1.4).field("lon", 1.5).endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point") + .startObject().field("lat", 1.2).field("lon", 1.3).endObject() + .startObject().field("lat", 1.4).field("lon", 1.5).endObject() + .endArray() + .endObject()), XContentType.JSON)); // doc values are enabled by default, but in this test we disable them; we should only have 2 points @@ -105,15 +107,15 @@ public void testArrayLatLonValues() throws Exception { public void testLatLonInOneValue() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", "1.2,1.3") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("point", "1.2,1.3") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -122,15 +124,15 @@ public void testLatLonInOneValue() throws Exception { public void testLatLonInOneValueStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", "1.2,1.3") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("point", "1.2,1.3") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } @@ -138,18 +140,18 @@ public void testLatLonInOneValueStored() throws Exception { public void testLatLonInOneValueArray() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point") - .value("1.2,1.3") - .value("1.4,1.5") - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point") + .value("1.2,1.3") + .value("1.4,1.5") + .endArray() + .endObject()), XContentType.JSON)); // doc values are enabled by default, but in this test we disable them; we should only have 2 points @@ -160,14 +162,14 @@ public void testLatLonInOneValueArray() throws Exception { public void testLonLatArray() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point").value(1.3).value(1.2).endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point").value(1.3).value(1.2).endArray() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -177,14 +179,14 @@ public void testLonLatArrayDynamic() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("point").field("match", "point*") .startObject("mapping").field("type", "geo_point"); - String mapping = xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point").value(1.3).value(1.2).endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point").value(1.3).value(1.2).endArray() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -193,14 +195,14 @@ public void testLonLatArrayDynamic() throws Exception { public void testLonLatArrayStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point").value(1.3).value(1.2).endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point").value(1.3).value(1.2).endArray() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -210,18 +212,18 @@ public void testLonLatArrayStored() throws Exception { public void testLonLatArrayArrayStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject() + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point") - .startArray().value(1.3).value(1.2).endArray() - .startArray().value(1.5).value(1.4).endArray() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point") + .startArray().value(1.3).value(1.2).endArray() + .startArray().value(1.5).value(1.4).endArray() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields("point"), notNullValue()); @@ -230,13 +232,13 @@ public void testLonLatArrayArrayStored() throws Exception { public void testMultiField() throws Exception { int numDocs = randomIntBetween(10, 100); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") .field("type", "geo_point") .startObject("fields") .startObject("geohash").field("type", "keyword").endObject() // test geohash as keyword .startObject("latlon").field("type", "keyword").endObject() // test geohash as string .endObject() - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test") .addMapping("pin", mapping, XContentType.JSON); mappingRequest.execute().actionGet(); @@ -262,9 +264,9 @@ public void testMultiField() throws Exception { public void testEmptyName() throws Exception { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "geo_point").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index a9a830a4141e9..fb143cc3898e4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; @@ -46,11 +47,11 @@ protected Collection> getPlugins() { } public void testDefaultConfiguration() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -69,12 +70,12 @@ public void testDefaultConfiguration() throws IOException { * Test that orientation parameter correctly parses */ public void testOrientationParsing() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("orientation", "left") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -86,12 +87,12 @@ public void testOrientationParsing() throws IOException { assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); // explicit right orientation test - mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("orientation", "right") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -107,12 +108,12 @@ public void testOrientationParsing() throws IOException { * Test that coerce parameter correctly parses */ public void testCoerceParsing() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("coerce", "true") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -122,12 +123,12 @@ public void testCoerceParsing() throws IOException { assertThat(coerce, equalTo(true)); // explicit false coerce test - mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("coerce", "false") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -141,12 +142,12 @@ public void testCoerceParsing() throws IOException { * Test that ignore_malformed parameter correctly parses */ public void testIgnoreMalformedParsing() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("ignore_malformed", "true") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -156,12 +157,12 @@ public void testIgnoreMalformedParsing() throws IOException { assertThat(ignoreMalformed.value(), equalTo(true)); // explicit false ignore_malformed test - mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("ignore_malformed", "false") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -173,14 +174,14 @@ public void testIgnoreMalformedParsing() throws IOException { } public void testGeohashConfiguration() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") .field("tree_levels", "4") .field("distance_error_pct", "0.1") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -195,7 +196,7 @@ public void testGeohashConfiguration() throws IOException { } public void testQuadtreeConfiguration() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") @@ -203,7 +204,7 @@ public void testQuadtreeConfiguration() throws IOException { .field("distance_error_pct", "0.5") .field("points_only", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -222,7 +223,7 @@ public void testLevelPrecisionConfiguration() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") @@ -230,7 +231,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .field("precision", "70m") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); @@ -247,14 +248,14 @@ public void testLevelPrecisionConfiguration() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .field("tree_levels", "26") .field("precision", "70m") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); @@ -273,7 +274,7 @@ public void testLevelPrecisionConfiguration() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") @@ -281,7 +282,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .field("precision", "70m") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -297,7 +298,7 @@ public void testLevelPrecisionConfiguration() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") @@ -305,7 +306,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .field("precision", "70m") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -320,7 +321,7 @@ public void testLevelPrecisionConfiguration() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") @@ -328,7 +329,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .field("precision", "70m") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -344,13 +345,13 @@ public void testLevelPrecisionConfiguration() throws IOException { } public void testPointsOnlyOption() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") .field("points_only", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -366,13 +367,13 @@ public void testPointsOnlyOption() throws IOException { public void testLevelDefaults() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); @@ -389,13 +390,13 @@ public void testLevelDefaults() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -412,16 +413,16 @@ public void testLevelDefaults() throws IOException { } public void testGeoShapeMapperMerge() throws Exception { - String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("shape").field("type", "geo_shape").field("tree", "geohash").field("strategy", "recursive") .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE); - String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree") .field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26) - .field("orientation", "cw").endObject().endObject().endObject().endObject().string(); + .field("orientation", "cw").endObject().endObject().endObject().endObject()); try { mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); fail(); @@ -446,9 +447,9 @@ public void testGeoShapeMapperMerge() throws Exception { assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW)); // correct mapping - stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") - .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string(); + .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject()); docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); fieldMapper = docMapper.mappers().getMapper("shape"); @@ -466,11 +467,11 @@ public void testGeoShapeMapperMerge() throws Exception { public void testEmptyName() throws Exception { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("") .field("type", "geo_shape") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java index 111389336f291..764cabf65363b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java @@ -22,7 +22,9 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -45,12 +47,12 @@ protected Collection> getPlugins() { } public void testIncludeInObjectNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { - docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_id", "1").endObject().bytes(), XContentType.JSON)); + docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject().field("_id", "1").endObject()), XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("Field [_id] is a metadata field and cannot be added inside a document")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java index 910fa0f74faba..5e60e248927d7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java @@ -19,16 +19,11 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -47,15 +42,15 @@ protected Collection> getPlugins() { } public void testDefaultDisabledIndexMapper() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("_index"), nullValue()); @@ -63,9 +58,9 @@ public void testDefaultDisabledIndexMapper() throws Exception { } public void testIndexNotConfigurable() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index 8632a936de0ef..28a3a2f16f28b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -23,6 +23,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.xcontent.ToXContent; @@ -58,19 +60,19 @@ protected Collection> getPlugins() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "::1") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -87,19 +89,19 @@ public void testDefaults() throws Exception { } public void testNotIndexed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "::1") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -109,19 +111,19 @@ public void testNotIndexed() throws Exception { } public void testNoDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "::1") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -132,19 +134,19 @@ public void testNoDocValues() throws Exception { } public void testStore() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "::1") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -160,34 +162,34 @@ public void testStore() throws Exception { } public void testIgnoreMalformed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", ":1") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("':1' is not an IP string literal")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").field("ignore_malformed", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", ":1") - .endObject() - .bytes(), + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -195,27 +197,27 @@ public void testIgnoreMalformed() throws Exception { } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") .field("type", "ip") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -223,16 +225,16 @@ public void testNullValue() throws IOException { .field("null_value", "::1") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -248,15 +250,15 @@ public void testNullValue() throws IOException { } public void testSerializeDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); IpFieldMapper mapper = (IpFieldMapper)docMapper.root().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.doXContentBody(builder, true, ToXContent.EMPTY_PARAMS); - String got = builder.endObject().string(); + String got = Strings.toString(builder.endObject()); // it would be nice to check the entire serialized default mapper, but there are // a whole lot of bogus settings right now it picks up from calling super.doXContentBody... @@ -265,9 +267,9 @@ public void testSerializeDefaults() throws Exception { } public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java index 829c05701fffe..e16b04748a18b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java @@ -20,6 +20,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -50,18 +52,18 @@ public void testStoreCidr() throws Exception { .startObject("properties").startObject("field").field("type", "ip_range") .field("store", true); mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); final Map cases = new HashMap<>(); cases.put("192.168.0.0/15", "192.169.255.255"); cases.put("192.168.0.0/16", "192.168.255.255"); cases.put("192.168.0.0/17", "192.168.127.255"); for (final Map.Entry entry : cases.entrySet()) { ParsedDocument doc = - mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", entry.getKey()) - .endObject().bytes(), + .endObject()), XContentType.JSON )); IndexableField[] fields = doc.rootDoc().getFields("field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java index 49034a0b28785..32084c50310fc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java @@ -25,8 +25,6 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -45,7 +43,7 @@ public void testMergeMultiField() throws Exception { assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); - BytesReference json = XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject().bytes(); + BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); @@ -101,7 +99,7 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); - BytesReference json = XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject().bytes(); + BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index aa663ed5699a0..bffb1737eeb93 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -25,6 +25,8 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -77,19 +79,19 @@ public void setup() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -114,29 +116,29 @@ public void testDefaults() throws Exception { } public void testIgnoreAbove() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("ignore_above", 5).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "elk") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "elk") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "elasticsearch") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "elasticsearch") + .endObject()), XContentType.JSON)); fields = doc.rootDoc().getFields("field"); @@ -144,43 +146,43 @@ public void testIgnoreAbove() throws IOException { } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("null_value", "uri").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); fields = doc.rootDoc().getFields("field"); @@ -189,19 +191,19 @@ public void testNullValue() throws IOException { } public void testEnableStore() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -210,19 +212,19 @@ public void testEnableStore() throws IOException { } public void testDisableIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -232,19 +234,19 @@ public void testDisableIndex() throws IOException { } public void testDisableDocValues() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -253,20 +255,20 @@ public void testDisableDocValues() throws IOException { } public void testIndexOptions() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword") .field("index_options", "freqs").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -274,10 +276,10 @@ public void testIndexOptions() throws IOException { assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions()); for (String indexOptions : Arrays.asList("positions", "offsets")) { - final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + final String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword") .field("index_options", indexOptions).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping2))); assertEquals("The [keyword] field does not support positions, got [index_options]=" + indexOptions, e.getMessage()); @@ -285,9 +287,9 @@ public void testIndexOptions() throws IOException { } public void testBoost() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("boost", 2f).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -295,19 +297,19 @@ public void testBoost() throws IOException { } public void testEnableNorms() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("norms", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -316,20 +318,20 @@ public void testEnableNorms() throws IOException { } public void testNormalizer() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "keyword").field("normalizer", "my_lowercase").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "AbC") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "AbC") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -354,16 +356,16 @@ public void testNormalizer() throws IOException { } public void testUpdateNormalizer() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "keyword").field("normalizer", "my_lowercase").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "keyword").field("normalizer", "my_other_lowercase").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)); @@ -373,14 +375,14 @@ public void testUpdateNormalizer() throws IOException { } public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("") .field("type", "keyword") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); // Empty name not allowed in index created after 5.0 IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 6d7665d889563..1531c2251a146 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -141,9 +143,9 @@ public void testIndexIntoDefaultMapping() throws Throwable { public void testTotalFieldsExceedsLimit() throws Throwable { Function mapping = type -> { try { - return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") + return Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") .startObject("field1").field("type", "keyword") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -158,22 +160,22 @@ public void testTotalFieldsExceedsLimit() throws Throwable { } public void testMappingDepthExceedsLimit() throws Throwable { - CompressedXContent simpleMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent simpleMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("field") .field("type", "text") .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); IndexService indexService1 = createIndex("test1", Settings.builder().put(MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING.getKey(), 1).build()); // no exception indexService1.mapperService().merge("type", simpleMapping, MergeReason.MAPPING_UPDATE); - CompressedXContent objectMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent objectMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("object1") .field("type", "object") .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); IndexService indexService2 = createIndex("test2"); // no exception @@ -231,12 +233,12 @@ public void testOtherDocumentMappersOnlyUpdatedWhenChangingFieldType() throws IO IndexService indexService = createIndex("test", Settings.builder().put("index.version.created", Version.V_5_6_0).build()); // multiple types - CompressedXContent simpleMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent simpleMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("field") .field("type", "text") .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); indexService.mapperService().merge("type1", simpleMapping, MergeReason.MAPPING_UPDATE); DocumentMapper documentMapper = indexService.mapperService().documentMapper("type1"); @@ -244,13 +246,13 @@ public void testOtherDocumentMappersOnlyUpdatedWhenChangingFieldType() throws IO indexService.mapperService().merge("type2", simpleMapping, MergeReason.MAPPING_UPDATE); assertSame(indexService.mapperService().documentMapper("type1"), documentMapper); - CompressedXContent normsDisabledMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent normsDisabledMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("field") .field("type", "text") .field("norms", false) .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); indexService.mapperService().merge("type3", normsDisabledMapping, MergeReason.MAPPING_UPDATE); assertNotSame(indexService.mapperService().documentMapper("type1"), documentMapper); @@ -299,12 +301,12 @@ public void testIndexSortWithNestedFields() throws IOException { assertThat(invalidNestedException.getMessage(), containsString("cannot have nested fields when index sort is activated")); IndexService indexService = createIndex("test", settings, "t", "foo", "type=keyword"); - CompressedXContent nestedFieldMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent nestedFieldMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("nested_field") .field("type", "nested") .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); invalidNestedException = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("t", nestedFieldMapping, MergeReason.MAPPING_UPDATE)); @@ -313,18 +315,18 @@ public void testIndexSortWithNestedFields() throws IOException { } public void testForbidMultipleTypes() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject().string(); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapperService.merge("type2", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)); assertThat(e.getMessage(), Matchers.startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: ")); } public void testDefaultMappingIsDeprecated() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("_default_").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_default_").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge("_default_", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertWarnings("[_default_] mapping is deprecated since it is not useful anymore now that indexes " + diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java index 4d9323bddb1ad..9e31bd76c3016 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,7 +40,7 @@ public void testExceptionForCopyToInMultiFields() throws IOException { // first check that for newer versions we throw exception if copy_to is found withing multi field MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "test"); try { - mapperService.parse("type", new CompressedXContent(mapping.string()), true); + mapperService.parse("type", new CompressedXContent(Strings.toString(mapping)), true); fail("Parsing should throw an exception because the mapping contains a copy_to in a multi field"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("copy_to in multi fields is not allowed. Found the copy_to in field [c] which is within a multi field.")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index adc84277a6ed6..0d5b4ca154a5a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -168,7 +169,7 @@ public void testMultiFieldsInConsistentOrder() throws Exception { builder = builder.startObject(multiFieldName).field("type", "text").endObject(); } builder = builder.endObject().endObject().endObject().endObject().endObject(); - String mapping = builder.string(); + String mapping = Strings.toString(builder); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); Arrays.sort(multiFieldNames); @@ -186,9 +187,9 @@ public void testMultiFieldsInConsistentOrder() throws Exception { } public void testObjectFieldNotAllowed() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field") .field("type", "text").startObject("fields").startObject("multi").field("type", "object").endObject().endObject() - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); try { parser.parse("type", new CompressedXContent(mapping)); @@ -199,9 +200,9 @@ public void testObjectFieldNotAllowed() throws Exception { } public void testNestedFieldNotAllowed() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field") .field("type", "text").startObject("fields").startObject("multi").field("type", "nested").endObject().endObject() - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); try { parser.parse("type", new CompressedXContent(mapping)); @@ -231,7 +232,7 @@ public void testMultiFieldWithDot() throws IOException { MapperService mapperService = createIndex("test").mapperService(); try { - mapperService.documentMapperParser().parse("my_type", new CompressedXContent(mapping.string())); + mapperService.documentMapperParser().parse("my_type", new CompressedXContent(Strings.toString(mapping))); fail("this should throw an exception because one field contains a dot"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Field name [raw.foo] which is a multi field of [city] cannot contain '.'")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index bbcad5b7203a2..8bb9d84f576d9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -22,6 +22,8 @@ import java.util.HashSet; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,37 +54,37 @@ protected Collection> getPlugins() { } public void testEmptyNested() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .nullField("nested1") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .nullField("nested1") + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(1)); - doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested").endArray() - .endObject() - .bytes(), + doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested").endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(1)); } public void testSingleNested() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -90,12 +92,12 @@ public void testSingleNested() throws Exception { ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); assertThat(nested1Mapper.nested().isNested(), equalTo(true)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startObject("nested1").field("field1", "1").field("field2", "2").endObject() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startObject("nested1").field("field1", "1").field("field2", "2").endObject() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(2)); @@ -106,15 +108,15 @@ public void testSingleNested() throws Exception { assertThat(doc.docs().get(1).get("field"), equalTo("value")); - doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").field("field2", "2").endObject() - .startObject().field("field1", "3").field("field2", "4").endObject() - .endArray() - .endObject() - .bytes(), + doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").field("field2", "2").endObject() + .startObject().field("field1", "3").field("field2", "4").endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(3)); @@ -129,11 +131,11 @@ public void testSingleNested() throws Exception { } public void testMultiNested() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested") .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -147,15 +149,15 @@ public void testMultiNested() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() - .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() + .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); @@ -181,11 +183,11 @@ public void testMultiNested() throws Exception { } public void testMultiObjectAndNested1() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested").field("include_in_parent", true) .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -199,15 +201,15 @@ public void testMultiObjectAndNested1() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() - .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() + .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); @@ -233,11 +235,11 @@ public void testMultiObjectAndNested1() throws Exception { } public void testMultiObjectAndNested2() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").field("include_in_parent", true).startObject("properties") .startObject("nested2").field("type", "nested").field("include_in_parent", true) .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -251,15 +253,15 @@ public void testMultiObjectAndNested2() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() - .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() + .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); @@ -285,11 +287,11 @@ public void testMultiObjectAndNested2() throws Exception { } public void testMultiRootAndNested1() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested").field("include_in_root", true) .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -303,15 +305,15 @@ public void testMultiRootAndNested1() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(true)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() - .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() + .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); @@ -342,21 +344,21 @@ public void testMultiRootAndNested1() throws Exception { * lead to duplicate fields on the root document. */ public void testMultipleLevelsIncludeRoot1() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").field("include_in_root", true).field("include_in_parent", true).startObject("properties") .startObject("nested2").field("type", "nested").field("include_in_root", true).field("include_in_parent", true) .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startArray("nested1") - .startObject().startArray("nested2").startObject().field("foo", "bar") - .endObject().endArray().endObject().endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startArray("nested1") + .startObject().startArray("nested2").startObject().field("foo", "bar") + .endObject().endArray().endObject().endArray() + .endObject()), XContentType.JSON)); final Collection fields = doc.rootDoc().getFields(); @@ -371,7 +373,7 @@ public void testMultipleLevelsIncludeRoot1() throws Exception { * {@code false} and {@code include_in_root} set to {@code true}. */ public void testMultipleLevelsIncludeRoot2() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested") .field("include_in_root", true).field("include_in_parent", true).startObject("properties") @@ -380,17 +382,17 @@ public void testMultipleLevelsIncludeRoot2() throws Exception { .startObject("nested3").field("type", "nested") .field("include_in_root", true).field("include_in_parent", true) .endObject().endObject().endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startArray("nested1") - .startObject().startArray("nested2") - .startObject().startArray("nested3").startObject().field("foo", "bar") - .endObject().endArray().endObject().endArray().endObject().endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startArray("nested1") + .startObject().startArray("nested2") + .startObject().startArray("nested3").startObject().field("foo", "bar") + .endObject().endArray().endObject().endArray().endObject().endArray() + .endObject()), XContentType.JSON)); final Collection fields = doc.rootDoc().getFields(); @@ -398,11 +400,11 @@ public void testMultipleLevelsIncludeRoot2() throws Exception { } public void testNestedArrayStrict() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").field("dynamic", "strict").startObject("properties") .startObject("field1").field("type", "text") .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -411,15 +413,15 @@ public void testNestedArrayStrict() throws Exception { assertThat(nested1Mapper.nested().isNested(), equalTo(true)); assertThat(nested1Mapper.dynamic(), equalTo(Dynamic.STRICT)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").endObject() - .startObject().field("field1", "4").endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").endObject() + .startObject().field("field1", "4").endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(3)); @@ -433,11 +435,11 @@ public void testNestedArrayStrict() throws Exception { public void testLimitOfNestedFieldsPerIndex() throws Exception { Function mapping = type -> { try { - return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") + return Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested") .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -466,11 +468,11 @@ public void testLimitOfNestedFieldsPerIndex() throws Exception { public void testLimitOfNestedFieldsWithMultiTypePerIndex() throws Exception { Function mapping = type -> { try { - return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") + return Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested") .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -483,8 +485,8 @@ public void testLimitOfNestedFieldsWithMultiTypePerIndex() throws Exception { // merging same fields, but different type is ok mapperService.merge("type2", new CompressedXContent(mapping.apply("type2")), MergeReason.MAPPING_UPDATE); // adding new fields from different type is not ok - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3") - .field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().endObject().string(); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3") + .field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)); assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded")); @@ -527,9 +529,9 @@ public void testParentObjectMapperAreNested() throws Exception { public void testLimitNestedDocsDefaultSettings() throws Exception{ Settings settings = Settings.builder().build(); MapperService mapperService = createIndex("test1", settings).mapperService(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); long defaultMaxNoNestedDocs = MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.get(settings); @@ -546,7 +548,7 @@ public void testLimitNestedDocsDefaultSettings() throws Exception{ docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = SourceToParse.source("test1", "type", "1", docBuilder.bytes(), XContentType.JSON); + SourceToParse source1 = SourceToParse.source("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source1)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" + defaultMaxNoNestedDocs @@ -561,9 +563,9 @@ public void testLimitNestedDocs() throws Exception{ long maxNoNestedDocs = 2L; MapperService mapperService = createIndex("test1", Settings.builder() .put(MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.getKey(), maxNoNestedDocs).build()).mapperService(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); // parsing a doc with 2 nested objects succeeds @@ -578,7 +580,7 @@ public void testLimitNestedDocs() throws Exception{ docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = SourceToParse.source("test1", "type", "1", docBuilder.bytes(), XContentType.JSON); + SourceToParse source1 = SourceToParse.source("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); ParsedDocument doc = docMapper.parse(source1); assertThat(doc.docs().size(), equalTo(3)); @@ -595,7 +597,7 @@ public void testLimitNestedDocs() throws Exception{ docBuilder2.endArray(); } docBuilder2.endObject(); - SourceToParse source2 = SourceToParse.source("test1", "type", "2", docBuilder2.bytes(), XContentType.JSON); + SourceToParse source2 = SourceToParse.source("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" + maxNoNestedDocs @@ -610,10 +612,10 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception{ long maxNoNestedDocs = 2L; MapperService mapperService = createIndex("test1", Settings.builder() .put(MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.getKey(), maxNoNestedDocs).build()).mapperService(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").endObject() .startObject("nested2").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); // parsing a doc with 2 nested objects succeeds @@ -632,7 +634,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception{ docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = SourceToParse.source("test1", "type", "1", docBuilder.bytes(), XContentType.JSON); + SourceToParse source1 = SourceToParse.source("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); ParsedDocument doc = docMapper.parse(source1); assertThat(doc.docs().size(), equalTo(3)); @@ -654,7 +656,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception{ } docBuilder2.endObject(); - SourceToParse source2 = SourceToParse.source("test1", "type", "2", docBuilder2.bytes(), XContentType.JSON); + SourceToParse source2 = SourceToParse.source("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" + maxNoNestedDocs diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java index 8a46f24998db9..815388eeffc55 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java @@ -19,11 +19,11 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -32,38 +32,38 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase { public void testNullValueObject() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("obj1").field("type", "object").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("obj1").endObject() - .field("value1", "test1") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("obj1").endObject() + .field("value1", "test1") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("value1"), equalTo("test1")); - doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("obj1") - .field("value1", "test1") - .endObject() - .bytes(), + doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("obj1") + .field("value1", "test1") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("value1"), equalTo("test1")); - doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("obj1").field("field", "value").endObject() - .field("value1", "test1") - .endObject() - .bytes(), + doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("obj1").field("field", "value").endObject() + .field("value1", "test1") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("obj1.field"), equalTo("value")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java index bc054564a6863..d9502d8e8800c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java @@ -1,5 +1,6 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; /* @@ -25,7 +26,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; @@ -36,7 +36,7 @@ public void testNullNullValue() throws Exception { String[] typesToTest = {"integer", "long", "double", "float", "short", "date", "ip", "keyword", "boolean", "byte"}; for (String type : typesToTest) { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") @@ -46,7 +46,7 @@ public void testNullNullValue() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); try { indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index afbf63a23bd32..66b90cdca3a3a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; @@ -47,19 +48,19 @@ protected void setTypeList() { @Override public void doTestDefaults(String type) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -75,19 +76,19 @@ public void doTestDefaults(String type) throws Exception { @Override public void doTestNotIndexed(String type) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -98,19 +99,19 @@ public void doTestNotIndexed(String type) throws Exception { @Override public void doTestNoDocValues(String type) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -122,19 +123,19 @@ public void doTestNoDocValues(String type) throws Exception { @Override public void doTestStore(String type) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -151,19 +152,19 @@ public void doTestStore(String type) throws Exception { @Override public void doTestCoerce(String type) throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -174,19 +175,19 @@ public void doTestCoerce(String type) throws IOException { IndexableField dvField = fields[1]; assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("coerce", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("passed as String")); @@ -194,19 +195,19 @@ public void doTestCoerce(String type) throws IOException { @Override protected void doTestDecimalCoerce(String type) throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "7.89") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "7.89") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -221,35 +222,35 @@ public void testIgnoreMalformed() throws Exception { } private void doTestIgnoreMalformed(String type) throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "a") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "a") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("For input string: \"a\"")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("ignore_malformed", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "a") - .endObject() - .bytes(), + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "a") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -260,13 +261,13 @@ public void testRejectNorms() throws IOException { // not supported as of 5.0 for (String type : TYPES) { DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", type) .field("norms", random().nextBoolean()) .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [norms")); @@ -279,13 +280,13 @@ public void testRejectNorms() throws IOException { public void testRejectIndexOptions() throws IOException { for (String type : TYPES) { DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", type) .field("index_options", randomFrom(new String[] { "docs", "freqs", "positions", "offsets" })) .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertThat(e.getMessage(), containsString("index_options not allowed in field [foo] of type [" + type +"]")); @@ -294,23 +295,23 @@ public void testRejectIndexOptions() throws IOException { @Override protected void doTestNullValue(String type) throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") .field("type", type) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); @@ -320,7 +321,7 @@ protected void doTestNullValue(String type) throws IOException { } else { missing = 123L; } - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -328,16 +329,16 @@ protected void doTestNullValue(String type) throws IOException { .field("null_value", missing) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -354,9 +355,9 @@ protected void doTestNullValue(String type) throws IOException { public void testEmptyName() throws IOException { // after version 5 for (String type : TYPES) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) @@ -424,29 +425,29 @@ private void parseRequest(NumberType type, BytesReference content) throws IOExce } private DocumentMapper createDocumentMapper(NumberType type) throws IOException { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", type.typeName()) + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", type.typeName()) + .endObject() .endObject() .endObject() - .endObject() - .endObject() - .string(); + .endObject()); return parser.parse("type", new CompressedXContent(mapping)); } private BytesReference createIndexRequest(Object value) throws IOException { if (value instanceof BigInteger) { - return XContentFactory.jsonBuilder() + return BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .rawField("field", new ByteArrayInputStream(value.toString().getBytes("UTF-8")), XContentType.JSON) - .endObject().bytes(); + .endObject()); } else { - return XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes(); + return BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index ea8f63345a183..94e63cdb85936 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -19,33 +19,26 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ObjectMapper.Dynamic; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; public class ObjectMapperTests extends ESSingleNodeTestCase { public void testDifferentInnerObjectTokenFailure() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { @@ -68,45 +61,45 @@ public void testDifferentInnerObjectTokenFailure() throws Exception { } public void testEmptyArrayProperties() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("properties").endArray() - .endObject().endObject().string(); + .endObject().endObject()); createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); } public void testEmptyFieldsArrayMultiFields() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startObject("name") - .field("type", "text") - .startArray("fields") - .endArray() - .endObject() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startObject("name") + .field("type", "text") + .startArray("fields") + .endArray() + .endObject() + .endObject() + .endObject() + .endObject()); createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testFieldsArrayMultiFieldsShouldThrowException() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startObject("name") - .field("type", "text") - .startArray("fields") - .startObject().field("test", "string").endObject() - .startObject().field("test2", "string").endObject() - .endArray() + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startObject("name") + .field("type", "text") + .startArray("fields") + .startObject().field("test", "string").endObject() + .startObject().field("test2", "string").endObject() + .endArray() + .endObject() + .endObject() .endObject() - .endObject() - .endObject() - .endObject() - .string(); + .endObject()); try { createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); @@ -117,32 +110,32 @@ public void testFieldsArrayMultiFieldsShouldThrowException() throws Exception { } public void testEmptyFieldsArray() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startArray("fields") - .endArray() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startArray("fields") + .endArray() + .endObject() + .endObject() + .endObject()); createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testFieldsWithFilledArrayShouldThrowException() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startArray("fields") - .startObject().field("test", "string").endObject() - .startObject().field("test2", "string").endObject() - .endArray() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startArray("fields") + .startObject().field("test", "string").endObject() + .startObject().field("test2", "string").endObject() + .endArray() + .endObject() + .endObject() + .endObject()); try { createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); @@ -152,54 +145,54 @@ public void testFieldsWithFilledArrayShouldThrowException() throws Exception { } public void testFieldPropertiesArray() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startObject("name") - .field("type", "text") - .startObject("fields") - .startObject("raw") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startObject("name") + .field("type", "text") + .startObject("fields") + .startObject("raw") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject()); createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testMerge() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("foo") .field("type", "keyword") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertNull(mapper.root().dynamic()); - String update = XContentFactory.jsonBuilder().startObject() + String update = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .field("dynamic", "strict") - .endObject().endObject().string(); + .endObject().endObject()); mapper = mapperService.merge("type", new CompressedXContent(update), MergeReason.MAPPING_UPDATE); assertEquals(Dynamic.STRICT, mapper.root().dynamic()); } public void testEmptyName() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("") .startObject("properties") .startObject("name") .field("type", "text") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); // Empty name not allowed in index created after 5.0 IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java index dee554449bcc4..f399902920e33 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java @@ -21,7 +21,9 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -58,13 +60,13 @@ protected Collection> getPlugins() { } public void testParentSetInDocNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { - docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_parent", "1122").endObject().bytes(), XContentType.JSON)); + docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject().field("_parent", "1122").endObject()), XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("Field [_parent] is a metadata field and cannot be added inside a document")); @@ -72,11 +74,11 @@ public void testParentSetInDocNotAllowed() throws Exception { } public void testJoinFieldSet() throws Exception { - String parentMapping = XContentFactory.jsonBuilder().startObject().startObject("parent_type") - .endObject().endObject().string(); - String childMapping = XContentFactory.jsonBuilder().startObject().startObject("child_type") + String parentMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("parent_type") + .endObject().endObject()); + String childMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child_type") .startObject("_parent").field("type", "parent_type").endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test", Settings.builder().put("index.version.created", Version.V_5_6_0).build()); indexService.mapperService().merge("parent_type", new CompressedXContent(parentMapping), MergeReason.MAPPING_UPDATE); indexService.mapperService().merge("child_type", new CompressedXContent(childMapping), MergeReason.MAPPING_UPDATE); @@ -97,14 +99,14 @@ public void testJoinFieldSet() throws Exception { } public void testJoinFieldNotSet() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("x_field", "x_value") - .endObject() - .bytes(), XContentType.JSON)); + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("x_field", "x_value") + .endObject()), XContentType.JSON)); assertEquals(0, getNumberOfFieldWithParentPrefix(doc.rootDoc())); } @@ -121,7 +123,7 @@ public void testNoParentNullFieldCreatedIfNoParentSpecified() throws Exception { .startObject("properties") .endObject() .endObject().endObject(); - mapperService.merge("some_type", new CompressedXContent(mappingSource.string()), MergeReason.MAPPING_UPDATE); + mapperService.merge("some_type", new CompressedXContent(Strings.toString(mappingSource)), MergeReason.MAPPING_UPDATE); Set allFields = new HashSet<>(mapperService.simpleMatchToIndexNames("*")); assertTrue(allFields.contains("_parent")); assertFalse(allFields.contains("_parent#null")); @@ -140,20 +142,20 @@ private static int getNumberOfFieldWithParentPrefix(ParseContext.Document doc) { } public void testUpdateEagerGlobalOrds() throws IOException { - String parentMapping = XContentFactory.jsonBuilder().startObject().startObject("parent_type") - .endObject().endObject().string(); - String childMapping = XContentFactory.jsonBuilder().startObject().startObject("child_type") + String parentMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("parent_type") + .endObject().endObject()); + String childMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child_type") .startObject("_parent").field("type", "parent_type").endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test", Settings.builder().put("index.version.created", Version.V_5_6_0).build()); indexService.mapperService().merge("parent_type", new CompressedXContent(parentMapping), MergeReason.MAPPING_UPDATE); indexService.mapperService().merge("child_type", new CompressedXContent(childMapping), MergeReason.MAPPING_UPDATE); assertTrue(indexService.mapperService().documentMapper("child_type").parentFieldMapper().fieldType().eagerGlobalOrdinals()); - String childMappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("child_type") + String childMappingUpdate = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child_type") .startObject("_parent").field("type", "parent_type").field("eager_global_ordinals", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); indexService.mapperService().merge("child_type", new CompressedXContent(childMappingUpdate), MergeReason.MAPPING_UPDATE); assertFalse(indexService.mapperService().documentMapper("child_type").parentFieldMapper().fieldType().eagerGlobalOrdinals()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java index 4b705f97a31ba..54418850e5d4f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -22,6 +22,8 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.xcontent.ToXContent; @@ -116,16 +118,16 @@ public void doTestDefaults(String type) throws Exception { } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -147,16 +149,16 @@ protected void doTestNotIndexed(String type) throws Exception { } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -171,16 +173,16 @@ protected void doTestNoDocValues(String type) throws Exception { mapping = mapping.field("format", DATE_FORMAT); } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -197,16 +199,16 @@ protected void doTestStore(String type) throws Exception { mapping = mapping.field("format", DATE_FORMAT); } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -235,17 +237,17 @@ public void doTestCoerce(String type) throws IOException { mapping = mapping.field("format", DATE_FORMAT); } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -260,14 +262,14 @@ public void doTestCoerce(String type) throws IOException { mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field") .field("type", type).field("coerce", false).endObject().endObject().endObject().endObject(); - DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); - assertEquals(mapping.string(), mapper2.mappingSource().toString()); + assertEquals(Strings.toString(mapping), mapper2.mappingSource().toString()); ThrowingRunnable runnable = () -> mapper2 .parse(SourceToParse.source( - "test", "type", "1", XContentFactory.jsonBuilder().startObject().startObject("field") - .field(getFromField(), "5.2").field(getToField(), "10").endObject().endObject().bytes(), + "test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startObject("field") + .field(getFromField(), "5.2").field(getToField(), "10").endObject().endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"), containsString("failed to parse date"), @@ -281,26 +283,26 @@ protected void doTestDecimalCoerce(String type) throws IOException { .startObject("properties").startObject("field").field("type", type); mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(GT_FIELD.getPreferredName(), "2.34") .field(LT_FIELD.getPreferredName(), "5.67") .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); - ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(GT_FIELD.getPreferredName(), "2") .field(LT_FIELD.getPreferredName(), "5") .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields1 = doc1.rootDoc().getFields("field"); @@ -318,17 +320,17 @@ protected void doTestNullValue(String type) throws IOException { } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); // test null value for min and max - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .nullField(getFromField()) .nullField(getToField()) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertEquals(3, doc.rootDoc().getFields("field").length); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -337,13 +339,13 @@ protected void doTestNullValue(String type) throws IOException { assertThat(storedField.stringValue(), containsString(expected)); // test null max value - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .nullField(getToField()) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); fields = doc.rootDoc().getFields("field"); @@ -365,11 +367,11 @@ protected void doTestNullValue(String type) throws IOException { assertThat(storedField.stringValue(), containsString(strVal)); // test null range - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertNull(doc.rootDoc().get("field")); } @@ -388,15 +390,15 @@ public void doTestNoBounds(String type) throws IOException { } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); // test no bounds specified - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -417,22 +419,22 @@ public void testIllegalArguments() throws Exception { .startObject("properties").startObject("field").field("type", RangeFieldMapper.RangeType.INTEGER.name) .field("format", DATE_FORMAT).endObject().endObject().endObject().endObject(); - ThrowingRunnable runnable = () -> parser.parse("type", new CompressedXContent(mapping.string())); + ThrowingRunnable runnable = () -> parser.parse("type", new CompressedXContent(Strings.toString(mapping))); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable); assertThat(e.getMessage(), containsString("should not define a dateTimeFormatter")); } public void testSerializeDefaults() throws Exception { for (String type : TYPES) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); RangeFieldMapper mapper = (RangeFieldMapper) docMapper.root().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.doXContentBody(builder, true, ToXContent.EMPTY_PARAMS); - String got = builder.endObject().string(); + String got = Strings.toString(builder.endObject()); // if type is date_range we check that the mapper contains the default format and locale // otherwise it should not contain a locale or format diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index c93d968b448fe..0aa8565ea572c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.network.InetAddresses; @@ -54,14 +55,14 @@ public class RangeFieldQueryStringQueryBuilderTests extends AbstractQueryTestCas @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { - mapperService.merge("_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("_doc", + mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", INTEGER_RANGE_FIELD_NAME, "type=integer_range", LONG_RANGE_FIELD_NAME, "type=long_range", FLOAT_RANGE_FIELD_NAME, "type=float_range", DOUBLE_RANGE_FIELD_NAME, "type=double_range", DATE_RANGE_FIELD_NAME, "type=date_range", IP_RANGE_FIELD_NAME, "type=ip_range" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); + ))), MapperService.MergeReason.MAPPING_UPDATE); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index d55b90573e9cd..ec21a1f7286a4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -29,99 +30,99 @@ public class RootObjectMapperTests extends ESSingleNodeTestCase { public void testNumericDetection() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("numeric_detection", false) .endObject() - .endObject().string(); + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); // update with a different explicit value - String mapping2 = XContentFactory.jsonBuilder() + String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("numeric_detection", true) .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); assertEquals(mapping2, mapper.mappingSource().toString()); // update with an implicit value: no change - String mapping3 = XContentFactory.jsonBuilder() + String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping2, mapper.mappingSource().toString()); } public void testDateDetection() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("date_detection", true) .endObject() - .endObject().string(); + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); // update with a different explicit value - String mapping2 = XContentFactory.jsonBuilder() + String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("date_detection", false) .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); assertEquals(mapping2, mapper.mappingSource().toString()); // update with an implicit value: no change - String mapping3 = XContentFactory.jsonBuilder() + String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping2, mapper.mappingSource().toString()); } public void testDateFormatters() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_date_formats", Arrays.asList("YYYY-MM-dd")) .endObject() - .endObject().string(); + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); // no update if formatters are not set explicitly - String mapping2 = XContentFactory.jsonBuilder() + String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); - String mapping3 = XContentFactory.jsonBuilder() + String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_date_formats", Arrays.asList()) .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping3, mapper.mappingSource().toString()); } public void testDynamicTemplates() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startArray("dynamic_templates") @@ -135,26 +136,26 @@ public void testDynamicTemplates() throws Exception { .endObject() .endArray() .endObject() - .endObject().string(); + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); // no update if templates are not set explicitly - String mapping2 = XContentFactory.jsonBuilder() + String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); - String mapping3 = XContentFactory.jsonBuilder() + String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_templates", Arrays.asList()) .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping3, mapper.mappingSource().toString()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java index fb98f42f105eb..1b83b1bcb5b67 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java @@ -19,13 +19,11 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; @@ -33,15 +31,15 @@ public class RoutingFieldMapperTests extends ESSingleNodeTestCase { public void testRoutingMapper() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()), XContentType.JSON).routing("routing_value")); assertThat(doc.rootDoc().get("_routing"), equalTo("routing_value")); @@ -49,12 +47,12 @@ public void testRoutingMapper() throws Exception { } public void testIncludeInObjectNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { - docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_routing", "foo").endObject().bytes(),XContentType.JSON)); + docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject().field("_routing", "foo").endObject()),XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("Field [_routing] is a metadata field and cannot be added inside a document")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index f40229e9ebe78..e605a672c2982 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -22,7 +22,9 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -48,39 +50,39 @@ protected Collection> getPlugins() { } public void testNoFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("field", "value") - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); documentMapper = parser.parse("type", new CompressedXContent(mapping)); - doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.smileBuilder().startObject() + doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.smileBuilder().startObject() .field("field", "value") - .endObject().bytes(), + .endObject()), XContentType.SMILE)); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.SMILE)); } public void testIncludes() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", new String[]{"path1*"}).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() .startObject("path2").field("field2", "value2").endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField sourceField = doc.rootDoc().getField("_source"); @@ -93,16 +95,16 @@ public void testIncludes() throws Exception { } public void testExcludes() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", new String[]{"path1*"}).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() .startObject("path2").field("field2", "value2").endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField sourceField = doc.rootDoc().getField("_source"); @@ -115,9 +117,9 @@ public void testExcludes() throws Exception { } public void testDefaultMappingAndNoMapping() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper mapper = parser.parse("my_type", null, defaultMapping); @@ -143,13 +145,13 @@ public void testDefaultMappingAndNoMapping() throws Exception { } public void testDefaultMappingAndWithMappingOverride() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("my_type") .startObject("_source").field("enabled", true).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser() .parse("my_type", new CompressedXContent(mapping), defaultMapping); @@ -158,9 +160,9 @@ public void testDefaultMappingAndWithMappingOverride() throws Exception { } public void testDefaultMappingAndNoMappingWithMapperService() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_6_0).build(); MapperService mapperService = createIndex("test", settings).mapperService(); @@ -172,17 +174,17 @@ public void testDefaultMappingAndNoMappingWithMapperService() throws Exception { } public void testDefaultMappingAndWithMappingOverrideWithMapperService() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_6_0).build(); MapperService mapperService = createIndex("test", settings).mapperService(); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), MapperService.MergeReason.MAPPING_UPDATE); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("my_type") .startObject("_source").field("enabled", true).endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge("my_type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); DocumentMapper mapper = mapperService.documentMapper("my_type"); @@ -210,31 +212,31 @@ void assertConflicts(String mapping1, String mapping2, DocumentMapperParser pars public void testEnabledNotUpdateable() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); // using default of true - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(mapping1, mapping2, parser, "Cannot update enabled setting for [_source]"); // not changing is ok - String mapping3 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("enabled", true).endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(mapping1, mapping3, parser); } public void testIncludesNotUpdateable() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(defaultMapping, mapping1, parser, "Cannot update includes setting for [_source]"); assertConflicts(mapping1, defaultMapping, parser, "Cannot update includes setting for [_source]"); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*", "bar.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(mapping1, mapping2, parser, "Cannot update includes setting for [_source]"); // not changing is ok @@ -243,16 +245,16 @@ public void testIncludesNotUpdateable() throws Exception { public void testExcludesNotUpdateable() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(defaultMapping, mapping1, parser, "Cannot update excludes setting for [_source]"); assertConflicts(mapping1, defaultMapping, parser, "Cannot update excludes setting for [_source]"); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*", "bar.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(mapping1, mapping2, parser, "Cannot update excludes setting for [_source]"); // not changing is ok @@ -261,27 +263,27 @@ public void testExcludesNotUpdateable() throws Exception { public void testComplete() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); assertTrue(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); } public void testSourceObjectContainsExtraTokens() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 2ff2bda01df25..95ffc373e6b8f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -24,15 +24,14 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.RAMDirectory; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService.MergeReason; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.Collections; @@ -43,41 +42,41 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { public void testBytesAndNumericRepresentation() throws Exception { IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("field1").field("type", "byte").field("store", true).endObject() - .startObject("field2").field("type", "short").field("store", true).endObject() - .startObject("field3").field("type", "integer").field("store", true).endObject() - .startObject("field4").field("type", "float").field("store", true).endObject() - .startObject("field5").field("type", "long").field("store", true).endObject() - .startObject("field6").field("type", "double").field("store", true).endObject() - .startObject("field7").field("type", "ip").field("store", true).endObject() - .startObject("field8").field("type", "ip").field("store", true).endObject() - .startObject("field9").field("type", "date").field("store", true).endObject() - .startObject("field10").field("type", "boolean").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field1").field("type", "byte").field("store", true).endObject() + .startObject("field2").field("type", "short").field("store", true).endObject() + .startObject("field3").field("type", "integer").field("store", true).endObject() + .startObject("field4").field("type", "float").field("store", true).endObject() + .startObject("field5").field("type", "long").field("store", true).endObject() + .startObject("field6").field("type", "double").field("store", true).endObject() + .startObject("field7").field("type", "ip").field("store", true).endObject() + .startObject("field8").field("type", "ip").field("store", true).endObject() + .startObject("field9").field("type", "date").field("store", true).endObject() + .startObject("field10").field("type", "boolean").field("store", true).endObject() + .endObject() + .endObject() + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", 1) - .field("field2", 1) - .field("field3", 1) - .field("field4", 1.1) - .startArray("field5").value(1).value(2).value(3).endArray() - .field("field6", 1.1) - .field("field7", "192.168.1.1") - .field("field8", "2001:db8::2:1") - .field("field9", "2016-04-05") - .field("field10", true) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", 1) + .field("field2", 1) + .field("field3", 1) + .field("field4", 1.1) + .startArray("field5").value(1).value(2).value(3).endArray() + .field("field6", 1.1) + .field("field7", "192.168.1.1") + .field("field8", "2001:db8::2:1") + .field("field9", "2016-04-05") + .field("field10", true) + .endObject()), XContentType.JSON)); writer.addDocument(doc.rootDoc()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index d96c8f6ed742f..37c82ccc94616 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -28,12 +28,13 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.ToXContent; @@ -81,19 +82,19 @@ protected Collection> getPlugins() { } public void testDefaults() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -113,19 +114,19 @@ public void testDefaults() throws IOException { } public void testEnableStore() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -134,19 +135,19 @@ public void testEnableStore() throws IOException { } public void testDisableIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -154,22 +155,22 @@ public void testDisableIndex() throws IOException { } public void testDisableNorms() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("norms", false) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -188,7 +189,7 @@ public void testIndexOptions() throws IOException { for (String option : supportedOptions.keySet()) { mappingBuilder.startObject(option).field("type", "text").field("index_options", option).endObject(); } - String mapping = mappingBuilder.endObject().endObject().endObject().string(); + String mapping = Strings.toString(mappingBuilder.endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -196,7 +197,7 @@ public void testIndexOptions() throws IOException { for (String option : supportedOptions.keySet()) { jsonDoc.field(option, "1234"); } - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", jsonDoc.endObject().bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()), XContentType.JSON)); for (Map.Entry entry : supportedOptions.entrySet()) { @@ -209,20 +210,20 @@ public void testIndexOptions() throws IOException { } public void testDefaultPositionIncrementGap() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .array("field", new String[] {"a", "b"}) - .endObject() - .bytes(), + SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("field", new String[] {"a", "b"}) + .endObject()), XContentType.JSON); ParsedDocument doc = mapper.parse(sourceToParse); @@ -248,23 +249,23 @@ public void testDefaultPositionIncrementGap() throws IOException { public void testPositionIncrementGap() throws IOException { final int positionIncrementGap = randomIntBetween(1, 1000); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("position_increment_gap", positionIncrementGap) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .array("field", new String[]{"a", "b"}) - .endObject() - .bytes(), + SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("field", new String[]{"a", "b"}) + .endObject()), XContentType.JSON); ParsedDocument doc = mapper.parse(sourceToParse); @@ -289,62 +290,62 @@ public void testPositionIncrementGap() throws IOException { } public void testSearchAnalyzerSerialization() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "standard") .field("search_analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default index analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "default") .field("search_analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default search analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "keyword") .field("search_analyzer", "default") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -352,14 +353,14 @@ public void testSearchAnalyzerSerialization() throws IOException { mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))); builder.endObject(); - String mappingString = builder.string(); + String mappingString = Strings.toString(builder); assertTrue(mappingString.contains("analyzer")); assertTrue(mappingString.contains("search_analyzer")); assertTrue(mappingString.contains("search_quote_analyzer")); } public void testSearchQuoteAnalyzerSerialization() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") @@ -367,13 +368,13 @@ public void testSearchQuoteAnalyzerSerialization() throws IOException { .field("search_analyzer", "standard") .field("search_quote_analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default index/search analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") @@ -381,14 +382,14 @@ public void testSearchQuoteAnalyzerSerialization() throws IOException { .field("search_analyzer", "default") .field("search_quote_analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); } public void testTermVectors() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") @@ -415,20 +416,20 @@ public void testTermVectors() throws IOException { .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "1234") - .field("field2", "1234") - .field("field3", "1234") - .field("field4", "1234") - .field("field5", "1234") - .field("field6", "1234") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", "1234") + .field("field2", "1234") + .field("field3", "1234") + .field("field4", "1234") + .field("field5", "1234") + .field("field6", "1234") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false)); @@ -463,12 +464,12 @@ public void testTermVectors() throws IOException { } public void testEagerGlobalOrdinals() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("eager_global_ordinals", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -477,11 +478,11 @@ public void testEagerGlobalOrdinals() throws IOException { } public void testFielddata() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, disabledMapper.mappingSource().toString()); @@ -489,32 +490,32 @@ public void testFielddata() throws IOException { () -> disabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder("test")); assertThat(e.getMessage(), containsString("Fielddata is disabled")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("fielddata", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, enabledMapper.mappingSource().toString()); enabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder("test"); // no exception this time - String illegalMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("index", false) .field("fielddata", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(illegalMapping))); assertThat(ex.getMessage(), containsString("Cannot enable fielddata on a [text] field that is not indexed")); } public void testFrequencyFilter() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("fielddata", true) @@ -523,7 +524,7 @@ public void testFrequencyFilter() throws IOException { .field("min_segment_size", 1000) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -535,7 +536,7 @@ public void testFrequencyFilter() throws IOException { } public void testNullConfigValuesFail() throws MapperParsingException, IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -543,19 +544,19 @@ public void testNullConfigValuesFail() throws MapperParsingException, IOExceptio .field("analyzer", (String) null) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Exception e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertEquals("[analyzer] must not have a [null] value", e.getMessage()); } public void testNotIndexedFieldPositionIncrement() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("index", false) .field("position_increment_gap", 10) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); @@ -564,12 +565,12 @@ public void testNotIndexedFieldPositionIncrement() throws IOException { public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException { for (String indexOptions : Arrays.asList("docs", "freqs")) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("index_options", indexOptions) .field("position_increment_gap", 10) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); @@ -578,14 +579,14 @@ public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOExcept } public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("") .field("type", "text") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); // Empty name not allowed in index created after 5.0 IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -602,7 +603,7 @@ public void testIndexPrefixMapping() throws IOException { }, null); { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -611,7 +612,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 10) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); @@ -624,11 +625,11 @@ public void testIndexPrefixMapping() throws IOException { CONSTANT_SCORE_REWRITE, queryShardContext); assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "Some English text that is going to be very useful") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "Some English text that is going to be very useful") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field._index_prefix"); @@ -636,13 +637,13 @@ public void testIndexPrefixMapping() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") .startObject("index_prefix").endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); CompressedXContent json = new CompressedXContent(mapping); DocumentMapper mapper = parser.parse("type", json); @@ -661,7 +662,7 @@ public void testIndexPrefixMapping() throws IOException { indexService.mapperService().merge("type", json, MergeReason.MAPPING_UPDATE); - String badUpdate = XContentFactory.jsonBuilder().startObject().startObject("type") + String badUpdate = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -670,7 +671,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 10) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { indexService.mapperService() @@ -680,7 +681,7 @@ public void testIndexPrefixMapping() throws IOException { } { - String illegalMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -692,7 +693,7 @@ public void testIndexPrefixMapping() throws IOException { .startObject("_index_prefix").field("type", "text").endObject() .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { indexService.mapperService() @@ -703,7 +704,7 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -712,7 +713,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 10) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); @@ -720,7 +721,7 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -729,7 +730,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 10) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); @@ -737,7 +738,7 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -746,7 +747,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 25) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); @@ -754,13 +755,13 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") .field("index_prefix", (String) null) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); @@ -768,13 +769,13 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("index", "false") .startObject("index_prefix").endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java index c21fffc1bb167..fd9c2e2b375e2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -76,7 +78,7 @@ protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mappi CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping try { - indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), MapperService.MergeReason.MAPPING_UPDATE); + indexService.mapperService().merge("type", new CompressedXContent(BytesReference.bytes(mappingUpdate)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { // expected @@ -97,14 +99,14 @@ public void testConflictSameType() throws Exception { .endObject().endObject().endObject(); try { - mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); } try { - mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); @@ -125,7 +127,7 @@ public void testConflictNewType() throws Exception { .endObject().endObject().endObject(); try { - mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type2", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { // expected @@ -133,7 +135,7 @@ public void testConflictNewType() throws Exception { } try { - mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type2", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { // expected @@ -154,15 +156,15 @@ public void testConflictNewTypeUpdate() throws Exception { MapperService mapperService = createIndex("test", Settings.builder().put("index.version.created", Version.V_5_6_0).build()).mapperService(); - mapperService.merge("type1", new CompressedXContent(mapping1.string()), MapperService.MergeReason.MAPPING_UPDATE); - mapperService.merge("type2", new CompressedXContent(mapping2.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type1", new CompressedXContent(Strings.toString(mapping1)), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type2", new CompressedXContent(Strings.toString(mapping2)), MapperService.MergeReason.MAPPING_UPDATE); XContentBuilder update = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties").startObject("foo").field("type", "double").endObject() .endObject().endObject().endObject(); try { - mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type2", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { // expected @@ -170,7 +172,7 @@ public void testConflictNewTypeUpdate() throws Exception { } try { - mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type2", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { // expected @@ -190,14 +192,14 @@ public void testReuseMetaField() throws IOException { MapperService mapperService = createIndex("test", Settings.builder().build()).mapperService(); try { - mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); } try { - mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); @@ -205,22 +207,22 @@ public void testReuseMetaField() throws IOException { } public void testRejectFieldDefinedTwice() throws IOException { - String mapping1 = XContentFactory.jsonBuilder().startObject() + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type1") .startObject("properties") .startObject("foo") .field("type", "object") .endObject() .endObject() - .endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject() + .endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type2") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService1 = createIndex("test1").mapperService(); mapperService1.merge("type1", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE); diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index 96d756f008366..99713c140c9e0 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; @@ -121,7 +122,7 @@ protected GetResponse executeGet(GetRequest getRequest) { builder.field(expectedShapePath, indexedShapeToReturn); builder.field(randomAlphaOfLengthBetween(10, 20), "something"); builder.endObject(); - json = builder.string(); + json = Strings.toString(builder); } catch (IOException ex) { throw new ElasticsearchException("boom", ex); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index e8699897d667d..05a4b99f19d1a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.Queries; @@ -350,10 +351,10 @@ public void testExceptionUsingAnalyzerOnNumericField() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { - mapperService.merge("_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef( + mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef( "_doc", "string_boost", "type=text,boost=4", "string_no_pos", - "type=text,index_options=docs").string() + "type=text,index_options=docs")) ), MapperService.MergeReason.MAPPING_UPDATE); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 922aa9a682f45..de044d5879312 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -327,7 +328,7 @@ public void testItemCopy() throws IOException { public void testItemFromXContent() throws IOException { Item expectedItem = generateRandomItem(); - String json = expectedItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string(); + String json = Strings.toString(expectedItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); XContentParser parser = createParser(JsonXContent.jsonXContent, json); Item newItem = Item.parse(parser, new Item()); assertEquals(expectedItem, newItem); diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index c199bf02dd264..46e10bc7f224c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; @@ -58,7 +59,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertTrue(e.getMessage().contains("spanFirst must have [end] set")); } { @@ -68,7 +69,7 @@ public void testParseEnd() throws IOException { builder.endObject(); builder.endObject(); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertTrue(e.getMessage().contains("spanFirst must have [match] span query clause")); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java index 961d6092d76e5..0536dae6dfa39 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.SpanNotQuery; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.internal.SearchContext; @@ -107,7 +108,7 @@ public void testParseDist() throws IOException { builder.field("dist", 3); builder.endObject(); builder.endObject(); - SpanNotQueryBuilder query = (SpanNotQueryBuilder)parseQuery(builder.string()); + SpanNotQueryBuilder query = (SpanNotQueryBuilder)parseQuery(Strings.toString(builder)); assertThat(query.pre(), equalTo(3)); assertThat(query.post(), equalTo(3)); assertNotNull(query.includeQuery()); @@ -128,7 +129,7 @@ public void testParserExceptions() throws IOException { builder.endObject(); builder.endObject(); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertThat(e.getDetailedMessage(), containsString("spanNot must have [include]")); } { @@ -142,7 +143,7 @@ public void testParserExceptions() throws IOException { builder.endObject(); builder.endObject(); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertThat(e.getDetailedMessage(), containsString("spanNot must have [exclude]")); } { @@ -159,7 +160,7 @@ public void testParserExceptions() throws IOException { builder.endObject(); builder.endObject(); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertThat(e.getDetailedMessage(), containsString("spanNot can either use [dist] or [pre] & [post] (or none)")); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index c945e595213fd..baacd13809b22 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -191,7 +192,7 @@ public GetResponse executeGet(GetRequest getRequest) { builder.startObject(); builder.array(termsPath, randomTerms.toArray(new Object[randomTerms.size()])); builder.endObject(); - json = builder.string(); + json = Strings.toString(builder); } catch (IOException ex) { throw new ElasticsearchException("boom", ex); } @@ -226,9 +227,9 @@ public void testNumeric() throws IOException { } public void testTermsQueryWithMultipleFields() throws IOException { - String query = XContentFactory.jsonBuilder().startObject() + String query = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("terms").array("foo", 123).array("bar", 456).endObject() - .endObject().string(); + .endObject()); ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query)); assertEquals("[" + TermsQueryBuilder.NAME + "] query does not support multiple fields", e.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 91de39ecffff4..e445eb1411748 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -21,7 +21,6 @@ import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; @@ -38,6 +37,7 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.MapperService; @@ -74,9 +74,9 @@ protected Collection> getPlugins() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { String docType = "_doc"; - mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType, + mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, "m_s_m", "type=long" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); + ))), MapperService.MergeReason.MAPPING_UPDATE); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index a2865cfa129b5..40f6605edf11d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; @@ -487,7 +488,7 @@ public void testProperErrorMessageWhenMissingFunction() throws IOException { public void testWeight1fStillProducesWeightFunction() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - String queryString = jsonBuilder().startObject() + String queryString = Strings.toString(jsonBuilder().startObject() .startObject("function_score") .startArray("functions") .startObject() @@ -498,7 +499,7 @@ public void testWeight1fStillProducesWeightFunction() throws IOException { .endObject() .endArray() .endObject() - .endObject().string(); + .endObject()); QueryBuilder query = parseQuery(queryString); assertThat(query, instanceOf(FunctionScoreQueryBuilder.class)); FunctionScoreQueryBuilder functionScoreQueryBuilder = (FunctionScoreQueryBuilder) query; @@ -523,20 +524,20 @@ public void testWeight1fStillProducesWeightFunction() throws IOException { } public void testProperErrorMessagesForMisplacedWeightsAndFunctions() throws IOException { - String query = jsonBuilder().startObject().startObject("function_score") + String query = Strings.toString(jsonBuilder().startObject().startObject("function_score") .startArray("functions") .startObject().startObject("script_score").field("script", "3").endObject().endObject() .endArray() .field("weight", 2) - .endObject().endObject().string(); + .endObject().endObject()); expectParsingException(query, "[you can either define [functions] array or a single function, not both. already " + "found [functions] array, now encountering [weight].]"); - query = jsonBuilder().startObject().startObject("function_score") + query = Strings.toString(jsonBuilder().startObject().startObject("function_score") .field("weight", 2) .startArray("functions") .startObject().endObject() .endArray() - .endObject().endObject().string(); + .endObject().endObject()); expectParsingException(query, "[you can either define [functions] array or a single function, not both. already found " + "[weight], now encountering [functions].]"); } diff --git a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java index f4d4ea790bc50..109f9cbd686c5 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -68,7 +69,7 @@ public void testXContentRepresentationOfUnlimitedRequestsPerSecond() throws IOEx BulkByScrollTask.Status status = new BulkByScrollTask.Status(null, 0, 0, 0, 0, 0, 0, 0, 0, 0, timeValueMillis(0), Float.POSITIVE_INFINITY, null, timeValueMillis(0)); status.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertThat(builder.string(), containsString("\"requests_per_second\":-1")); + assertThat(Strings.toString(builder), containsString("\"requests_per_second\":-1")); } public void testXContentRepresentationOfUnfinishedSlices() throws IOException { @@ -78,7 +79,7 @@ public void testXContentRepresentationOfUnfinishedSlices() throws IOException { BulkByScrollTask.Status status = new BulkByScrollTask.Status( Arrays.asList(null, null, new BulkByScrollTask.StatusOrException(completedStatus)), null); status.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertThat(builder.string(), containsString("\"slices\":[null,null,{\"slice_id\":2")); + assertThat(Strings.toString(builder), containsString("\"slices\":[null,null,{\"slice_id\":2")); } public void testXContentRepresentationOfSliceFailures() throws IOException { @@ -87,7 +88,7 @@ public void testXContentRepresentationOfSliceFailures() throws IOException { BulkByScrollTask.Status status = new BulkByScrollTask.Status(Arrays.asList(null, null, new BulkByScrollTask.StatusOrException(e)), null); status.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertThat(builder.string(), containsString("\"slices\":[null,null,{\"type\":\"exception\"")); + assertThat(Strings.toString(builder), containsString("\"slices\":[null,null,{\"type\":\"exception\"")); } public void testMergeStatuses() { diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java index 15fdbe828b009..4f410dc6d2690 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.search.geo; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -94,7 +95,7 @@ public void testInvalidPointEmbeddedObject() throws IOException { content.endObject(); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); @@ -106,7 +107,7 @@ public void testInvalidPointLatHashMix() throws IOException { content.field("lat", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); @@ -119,7 +120,7 @@ public void testInvalidPointLonHashMix() throws IOException { content.field("lon", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); @@ -132,7 +133,7 @@ public void testInvalidField() throws IOException { content.field("lon", 0).field("lat", 0).field("test", 0); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); @@ -144,7 +145,7 @@ private XContentParser objectLatLon(double lat, double lon) throws IOException { content.startObject(); content.field("lat", lat).field("lon", lon); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); return parser; } @@ -152,7 +153,7 @@ private XContentParser objectLatLon(double lat, double lon) throws IOException { private XContentParser arrayLatLon(double lat, double lon) throws IOException { XContentBuilder content = JsonXContent.contentBuilder(); content.startArray().value(lon).value(lat).endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); return parser; } @@ -160,7 +161,7 @@ private XContentParser arrayLatLon(double lat, double lon) throws IOException { private XContentParser stringLatLon(double lat, double lon) throws IOException { XContentBuilder content = JsonXContent.contentBuilder(); content.value(Double.toString(lat) + ", " + Double.toString(lon)); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); return parser; } @@ -168,7 +169,7 @@ private XContentParser stringLatLon(double lat, double lon) throws IOException { private XContentParser geohash(double lat, double lon) throws IOException { XContentBuilder content = JsonXContent.contentBuilder(); content.value(stringEncode(lon, lat)); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); return parser; } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index a72f53eae927e..6bd378bdb8529 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; @@ -1156,7 +1157,7 @@ public void testShardStats() throws IOException { builder.startObject(); stats.toXContent(builder, EMPTY_PARAMS); builder.endObject(); - String xContent = builder.string(); + String xContent = Strings.toString(builder); StringBuilder expectedSubSequence = new StringBuilder("\"shard_path\":{\"state_path\":\""); expectedSubSequence.append(shard.shardPath().getRootStatePath().toString()); expectedSubSequence.append("\",\"data_path\":\""); @@ -2365,12 +2366,12 @@ public void testEstimateTotalDocSize() throws Exception { int numDoc = randomIntBetween(100, 200); for (int i = 0; i < numDoc; i++) { - String doc = XContentFactory.jsonBuilder() + String doc = Strings.toString(XContentFactory.jsonBuilder() .startObject() .field("count", randomInt()) .field("point", randomFloat()) .field("description", randomUnicodeOfCodepointLength(100)) - .endObject().string(); + .endObject()); indexDoc(indexShard, "doc", Integer.toString(i), doc); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 433f662062735..12c3804a1a7b0 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.resync.ResyncReplicationResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -192,7 +193,7 @@ public void testStatusReportsCorrectNumbers() throws IOException { PrimaryReplicaSyncer.ResyncTask.Status status = task.getStatus(); XContentBuilder jsonBuilder = XContentFactory.jsonBuilder(); status.toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS); - String jsonString = jsonBuilder.string(); + String jsonString = Strings.toString(jsonBuilder); assertThat(jsonString, containsString("\"phase\":\"" + task.getPhase() + "\"")); assertThat(jsonString, containsString("\"totalOperations\":" + task.getTotalOperations())); assertThat(jsonString, containsString("\"resyncedOperations\":" + task.getResyncedOperations())); diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index 3e7f4650c3e6d..2ab905a2dd526 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity; import org.apache.lucene.search.similarities.LambdaTTF; import org.apache.lucene.search.similarities.NormalizationH2; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -66,11 +67,11 @@ public void testResolveDefaultSimilarities() { } public void testResolveSimilaritiesFromMapping_classic() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "classic") @@ -85,11 +86,11 @@ public void testResolveSimilaritiesFromMapping_classic() throws IOException { } public void testResolveSimilaritiesFromMapping_bm25() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "BM25") @@ -108,11 +109,11 @@ public void testResolveSimilaritiesFromMapping_bm25() throws IOException { } public void testResolveSimilaritiesFromMapping_boolean() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "boolean").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("foo", Settings.EMPTY); DocumentMapper documentMapper = indexService.mapperService() @@ -123,11 +124,11 @@ public void testResolveSimilaritiesFromMapping_boolean() throws IOException { } public void testResolveSimilaritiesFromMapping_DFR() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "DFR") @@ -148,11 +149,11 @@ public void testResolveSimilaritiesFromMapping_DFR() throws IOException { } public void testResolveSimilaritiesFromMapping_IB() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "IB") @@ -173,11 +174,11 @@ public void testResolveSimilaritiesFromMapping_IB() throws IOException { } public void testResolveSimilaritiesFromMapping_DFI() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "DFI") @@ -192,11 +193,11 @@ public void testResolveSimilaritiesFromMapping_DFI() throws IOException { } public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "LMDirichlet") @@ -211,11 +212,11 @@ public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException } public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "LMJelinekMercer") @@ -230,11 +231,11 @@ public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOExcept } public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "unknown_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("foo"); try { diff --git a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java index 8a63c237e90d5..626b2b0e0e2b8 100644 --- a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java +++ b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java @@ -57,7 +57,7 @@ public void testToFromXContent() throws IOException { BlobStoreIndexShardSnapshot.FileInfo info = new BlobStoreIndexShardSnapshot.FileInfo("_foobar", meta, size); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); BlobStoreIndexShardSnapshot.FileInfo.toXContent(info, builder, ToXContent.EMPTY_PARAMS); - byte[] xcontent = BytesReference.toBytes(shuffleXContent(builder).bytes()); + byte[] xcontent = BytesReference.toBytes(BytesReference.bytes(shuffleXContent(builder))); final BlobStoreIndexShardSnapshot.FileInfo parsedInfo; try (XContentParser parser = createParser(JsonXContent.jsonXContent, xcontent)) { @@ -116,7 +116,7 @@ public void testInvalidFieldsInFromXContent() throws IOException { builder.field(FileInfo.WRITTEN_BY, Version.LATEST.toString()); builder.field(FileInfo.CHECKSUM, "666"); builder.endObject(); - byte[] xContent = BytesReference.toBytes(builder.bytes()); + byte[] xContent = BytesReference.toBytes(BytesReference.bytes(builder)); if (failure == null) { // No failures should read as usual diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 30843589a3463..c18784873a472 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -36,6 +36,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -467,7 +468,7 @@ public void testStats() throws IOException { builder.startObject(); copy.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertThat(builder.string(), equalTo("{\"translog\":{\"operations\":4,\"size_in_bytes\":" + expectedSizeInBytes + assertThat(Strings.toString(builder), equalTo("{\"translog\":{\"operations\":4,\"size_in_bytes\":" + expectedSizeInBytes + ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":" + expectedSizeInBytes + ",\"earliest_last_modified_age\":" + stats.getEarliestLastModifiedAge() + "}}")); } diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index 87747990b9db9..5970e1121bdee 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -224,12 +225,12 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); responseBuilder.endObject(); - String responseStrings = responseBuilder.string(); + String responseStrings = Strings.toString(responseBuilder); XContentBuilder prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings)); - assertThat(responseStrings, equalTo(prettyJsonBuilder.string())); + assertThat(responseStrings, equalTo(Strings.toString(prettyJsonBuilder))); params.put("pretty", "false"); @@ -238,11 +239,11 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); responseBuilder.endObject(); - responseStrings = responseBuilder.string(); + responseStrings = Strings.toString(responseBuilder); prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings)); - assertThat(responseStrings, not(equalTo(prettyJsonBuilder.string()))); + assertThat(responseStrings, not(equalTo(Strings.toString(prettyJsonBuilder)))); } diff --git a/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 4ab4cab52cf10..31ee22200a219 100644 --- a/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -81,22 +82,22 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc assertThat("Breaker is not set to 0", node.getBreaker().getStats(CircuitBreaker.FIELDDATA).getEstimated(), equalTo(0L)); } - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("test-str") - .field("type", "keyword") - .field("doc_values", randomBoolean()) - .endObject() // test-str - .startObject("test-num") - // I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double" - .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) - .endObject() // test-num - .endObject() // properties - .endObject() // type - .endObject() // {} - .string(); + String mapping = Strings // {} + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("test-str") + .field("type", "keyword") + .field("doc_values", randomBoolean()) + .endObject() // test-str + .startObject("test-num") + // I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double" + .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) + .endObject() // test-num + .endObject() // properties + .endObject() // type + .endObject()); final double topLevelRate; final double lowLevelRate; if (frequently()) { diff --git a/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java b/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java index 02191bc22fa69..2a867915b0fe6 100644 --- a/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -332,7 +333,7 @@ private void checkIndexState(IndexMetaData.State expectedState, String... indice } public void testOpenCloseWithDocs() throws IOException, ExecutionException, InterruptedException { - String mapping = XContentFactory.jsonBuilder(). + String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject(). startObject("type"). startObject("properties"). @@ -341,7 +342,7 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte .endObject(). endObject(). endObject() - .endObject().string(); + .endObject()); assertAcked(client().admin().indices().prepareCreate("test") .addMapping("type", mapping, XContentType.JSON)); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index d27b05d1e7b29..9e97e9bbfd449 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -48,10 +47,8 @@ import java.util.Collection; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; @@ -76,7 +73,7 @@ protected Collection> nodePlugins() { } public void testSimulate() throws Exception { - BytesReference pipelineSource = jsonBuilder().startObject() + BytesReference pipelineSource = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -84,7 +81,7 @@ public void testSimulate() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); client().admin().cluster().preparePutPipeline("_id", pipelineSource, XContentType.JSON) .get(); GetPipelineResponse getResponse = client().admin().cluster().prepareGetPipeline("_id") @@ -93,7 +90,7 @@ public void testSimulate() throws Exception { assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); - BytesReference bytes = jsonBuilder().startObject() + BytesReference bytes = BytesReference.bytes(jsonBuilder().startObject() .startArray("docs") .startObject() .field("_index", "index") @@ -105,7 +102,7 @@ public void testSimulate() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); SimulatePipelineResponse response; if (randomBoolean()) { response = client().admin().cluster().prepareSimulatePipeline(bytes, XContentType.JSON) @@ -136,7 +133,7 @@ public void testSimulate() throws Exception { public void testBulkWithIngestFailures() throws Exception { createIndex("index"); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -144,7 +141,7 @@ public void testBulkWithIngestFailures() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); client().admin().cluster().putPipeline(putPipelineRequest).get(); @@ -182,7 +179,7 @@ public void testBulkWithIngestFailures() throws Exception { public void testBulkWithUpsert() throws Exception { createIndex("index"); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -190,7 +187,7 @@ public void testBulkWithUpsert() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); client().admin().cluster().putPipeline(putPipelineRequest).get(); @@ -217,7 +214,7 @@ public void testBulkWithUpsert() throws Exception { } public void test() throws Exception { - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -225,7 +222,7 @@ public void test() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); client().admin().cluster().putPipeline(putPipelineRequest).get(); @@ -258,7 +255,7 @@ public void test() throws Exception { } public void testPutWithPipelineFactoryError() throws Exception { - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -267,7 +264,7 @@ public void testPutWithPipelineFactoryError() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id2", source, XContentType.JSON); Exception e = expectThrows(ElasticsearchParseException.class, () -> client().admin().cluster().putPipeline(putPipelineRequest).actionGet()); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java index 03777b98ab73e..84d9327a0910a 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -45,14 +45,14 @@ public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase { private volatile boolean installPlugin; public IngestProcessorNotInstalledOnAllNodesIT() throws IOException { - pipelineSource = jsonBuilder().startObject() + pipelineSource = BytesReference.bytes(jsonBuilder().startObject() .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); } @Override diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java index eb1b7814ab851..7b134879cda45 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java @@ -58,7 +58,7 @@ public void testParser() throws IOException { try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { new PipelineConfiguration("1", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), XContentType.JSON) .toXContent(builder, ToXContent.EMPTY_PARAMS); - bytes = builder.bytes(); + bytes = BytesReference.bytes(builder); } XContentParser xContentParser = xContentType.xContent() diff --git a/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java b/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java index 3c30e7610dd26..07dcabf396b59 100644 --- a/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java +++ b/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java @@ -155,11 +155,11 @@ public void testThatParentPerDocumentIsSupported() throws Exception { @SuppressWarnings("unchecked") public void testThatSourceFilteringIsSupported() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); - BytesReference sourceBytesRef = jsonBuilder().startObject() + BytesReference sourceBytesRef = BytesReference.bytes(jsonBuilder().startObject() .array("field", "1", "2") .startObject("included").field("field", "should be seen").field("hidden_field", "should not be seen").endObject() .field("excluded", "should not be seen") - .endObject().bytes(); + .endObject()); for (int i = 0; i < 100; i++) { client().prepareIndex("test", "type", Integer.toString(i)).setSource(sourceBytesRef, XContentType.JSON).get(); } diff --git a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 4da927459e55a..107ac38400e0d 100644 --- a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; @@ -101,7 +102,7 @@ private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOEx param2Builder.startObject(); param2.toXContent(param2Builder, params); param2Builder.endObject(); - assertThat(param1Builder.string(), equalTo(param2Builder.string())); + assertThat(Strings.toString(param1Builder), equalTo(Strings.toString(param2Builder))); } private static NodeInfo createNodeInfo() { diff --git a/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java b/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java index 51167d862fdfd..cd902bd438fe4 100644 --- a/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.repositories; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -68,7 +69,7 @@ public void testXContent() throws IOException { IndexId indexId = new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID()); XContentBuilder builder = JsonXContent.contentBuilder(); indexId.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); String name = null; String id = null; diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index 40ff1bad9767f..8c1e242b3262f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.repositories; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -57,7 +58,7 @@ public void testXContent() throws IOException { RepositoryData repositoryData = generateRandomRepoData(); XContentBuilder builder = JsonXContent.contentBuilder(); repositoryData.snapshotsToXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); long gen = (long) randomIntBetween(0, 500); RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen); assertEquals(repositoryData, fromXContent); diff --git a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index c658f06637ea0..96106125f19ef 100644 --- a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -338,7 +338,7 @@ public void testNoErrorFromXContent() throws IOException { builder.field("status", randomFrom(RestStatus.values()).getStatus()); builder.endObject(); - try (XContentParser parser = createParser(builder.contentType().xContent(), builder.bytes())) { + try (XContentParser parser = createParser(builder.contentType().xContent(), BytesReference.bytes(builder))) { BytesRestResponse.errorFromXContent(parser); } } diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 08cab9ea2e92b..cb2d51f6a675e 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -412,7 +411,8 @@ public boolean supportsContentStream() { public void testNonStreamingXContentCausesErrorResponse() throws IOException { FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) - .withContent(YamlXContent.contentBuilder().startObject().endObject().bytes(), XContentType.YAML).withPath("/foo").build(); + .withContent(BytesReference.bytes(YamlXContent.contentBuilder().startObject().endObject()), + XContentType.YAML).withPath("/foo").build(); AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { @Override diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java index 656b020d53279..668d1095c36ff 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java @@ -93,7 +93,7 @@ public void testGetResponse() throws Exception { responseBuilder.prettyPrint().lfAtEnd(); } mainResponse.toXContent(responseBuilder, ToXContent.EMPTY_PARAMS); - BytesReference xcontentBytes = responseBuilder.bytes(); + BytesReference xcontentBytes = BytesReference.bytes(responseBuilder); assertEquals(xcontentBytes, response.content()); } } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java b/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java index 801ed758cb228..d5769cd192b75 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -39,7 +40,8 @@ public void testFromXContentLoading() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject().field("lang0#id0", "script0").field("lang1#id0", "script1").endObject(); XContentParser parser0 = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes().streamInput()); + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); expectThrows(IllegalArgumentException.class, () -> ScriptMetaData.fromXContent(parser0)); // failure to load a new namespace script and old namespace script with the same id but different langs @@ -47,7 +49,8 @@ public void testFromXContentLoading() throws Exception { builder.startObject().field("lang0#id0", "script0") .startObject("id0").field("lang", "lang1").field("source", "script1").endObject().endObject(); XContentParser parser1 = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes().streamInput()); + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); expectThrows(IllegalArgumentException.class, () -> ScriptMetaData.fromXContent(parser1)); // failure to load a new namespace script and old namespace script with the same id but different langs with additional scripts @@ -56,7 +59,8 @@ public void testFromXContentLoading() throws Exception { .startObject("id1").field("lang", "lang0").field("source", "script0").endObject() .startObject("id0").field("lang", "lang1").field("source", "script1").endObject().endObject(); XContentParser parser2 = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes().streamInput()); + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); expectThrows(IllegalArgumentException.class, () -> ScriptMetaData.fromXContent(parser2)); // okay to load the same script from the new and old namespace if the lang is the same @@ -64,7 +68,8 @@ public void testFromXContentLoading() throws Exception { builder.startObject().field("lang0#id0", "script0") .startObject("id0").field("lang", "lang0").field("source", "script1").endObject().endObject(); XContentParser parser3 = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes().streamInput()); + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); ScriptMetaData.fromXContent(parser3); } @@ -73,15 +78,15 @@ public void testGetScript() throws Exception { XContentBuilder sourceBuilder = XContentFactory.jsonBuilder(); sourceBuilder.startObject().startObject("template").field("field", "value").endObject().endObject(); - builder.storeScript("template", StoredScriptSource.parse(sourceBuilder.bytes(), sourceBuilder.contentType())); + builder.storeScript("template", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); sourceBuilder = XContentFactory.jsonBuilder(); sourceBuilder.startObject().field("template", "value").endObject(); - builder.storeScript("template_field", StoredScriptSource.parse(sourceBuilder.bytes(), sourceBuilder.contentType())); + builder.storeScript("template_field", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); sourceBuilder = XContentFactory.jsonBuilder(); sourceBuilder.startObject().startObject("script").field("lang", "_lang").field("source", "_source").endObject().endObject(); - builder.storeScript("script", StoredScriptSource.parse(sourceBuilder.bytes(), sourceBuilder.contentType())); + builder.storeScript("script", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); ScriptMetaData scriptMetaData = builder.build(); assertEquals("_source", scriptMetaData.getStoredScript("script").getSource()); @@ -134,7 +139,7 @@ private ScriptMetaData randomScriptMetaData(XContentType sourceContentType, int .field("lang", randomAlphaOfLength(4)).field("source", randomAlphaOfLength(10)) .endObject().endObject(); builder.storeScript(randomAlphaOfLength(i + 1), - StoredScriptSource.parse(sourceBuilder.bytes(), sourceBuilder.contentType())); + StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); } return builder.build(); } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 42a4c2f6abb1a..fb140462086b2 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -263,14 +263,14 @@ public void testCacheEvictionCountedInCacheEvictionsStats() throws IOException { } public void testStoreScript() throws Exception { - BytesReference script = XContentFactory.jsonBuilder() + BytesReference script = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("script") .startObject() .field("lang", "_lang") .field("source", "abc") .endObject() - .endObject().bytes(); + .endObject()); ScriptMetaData scriptMetaData = ScriptMetaData.putStoredScript(null, "_id", StoredScriptSource.parse(script, XContentType.JSON)); assertNotNull(scriptMetaData); assertEquals("abc", scriptMetaData.getStoredScript("_id").getSource()); diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTests.java index 0459be255e57f..6e578ed910d40 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.script; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.settings.Settings; @@ -70,7 +71,7 @@ private Script createScript() throws IOException { builder.startObject(); builder.field("field", randomAlphaOfLengthBetween(1, 5)); builder.endObject(); - script = builder.string(); + script = Strings.toString(builder); } } else { script = randomAlphaOfLengthBetween(1, 5); diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java index 9174943e48b06..168ec4fc553b9 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.script; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -48,7 +50,7 @@ protected StoredScriptSource createTestInstance() { if (randomBoolean()) { options.put(Script.CONTENT_TYPE_OPTION, xContentType.mediaType()); } - return StoredScriptSource.parse(template.bytes(), xContentType); + return StoredScriptSource.parse(BytesReference.bytes(template), xContentType); } catch (IOException e) { throw new AssertionError("Failed to create test instance", e); } @@ -82,7 +84,7 @@ protected StoredScriptSource mutateInstance(StoredScriptSource instance) throws switch (between(0, 3)) { case 0: - source = newTemplate.string(); + source = Strings.toString(newTemplate); break; case 1: lang = randomAlphaOfLengthBetween(1, 20); @@ -93,7 +95,7 @@ protected StoredScriptSource mutateInstance(StoredScriptSource instance) throws break; case 3: default: - return new StoredScriptSource(newTemplate.string()); + return new StoredScriptSource(Strings.toString(newTemplate)); } return new StoredScriptSource(lang, source, options); } diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java index f66f4b68b55b1..2bf0216c546ec 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.script; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -57,7 +59,7 @@ public void testSourceParsing() throws Exception { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().startObject("script").field("lang", "lang").field("source", "code").endObject().endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -67,7 +69,7 @@ public void testSourceParsing() throws Exception { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().field("template", "code").endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("mustache", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -79,10 +81,10 @@ public void testSourceParsing() throws Exception { String code; try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = cb.startObject().field("query", "code").endObject().string(); + code = Strings.toString(cb.startObject().field("query", "code").endObject()); } - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -94,10 +96,10 @@ public void testSourceParsing() throws Exception { String code; try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = cb.startObject().field("query", "code").endObject().string(); + code = Strings.toString(cb.startObject().field("query", "code").endObject()); } - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -111,10 +113,10 @@ public void testSourceParsing() throws Exception { String code; try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = cb.startObject().field("query", "code").endObject().string(); + code = Strings.toString(cb.startObject().field("query", "code").endObject()); } - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.singletonMap("content_type", "application/json; charset=UTF-8")); @@ -125,7 +127,7 @@ public void testSourceParsing() throws Exception { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "lang").field("source", "code").endObject().endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -135,7 +137,7 @@ public void testSourceParsing() throws Exception { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "lang").field("code", "code").endObject().endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -147,7 +149,7 @@ public void testSourceParsing() throws Exception { builder.startObject().field("script").startObject().field("lang", "lang").field("source", "code") .field("options").startObject().endObject().endObject().endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -155,15 +157,16 @@ public void testSourceParsing() throws Exception { // complex script with embedded template try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { - builder.startObject().field("script").startObject().field("lang", "lang").startObject("source").field("query", "code") - .endObject().startObject("options").endObject().endObject().endObject().string(); + Strings.toString(builder.startObject().field("script").startObject().field("lang", "lang") + .startObject("source").field("query", "code") + .endObject().startObject("options").endObject().endObject().endObject()); String code; try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = cb.startObject().field("query", "code").endObject().string(); + code = Strings.toString(cb.startObject().field("query", "code").endObject()); } - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", code, Collections.singletonMap(Script.CONTENT_TYPE_OPTION, builder.contentType().mediaType())); @@ -177,7 +180,7 @@ public void testSourceParsingErrors() throws Exception { builder.startObject().field("script").startObject().field("source", "code").endObject().endObject(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> - StoredScriptSource.parse(builder.bytes(), XContentType.JSON)); + StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON)); assertThat(iae.getMessage(), equalTo("must specify lang for stored script")); } @@ -186,7 +189,7 @@ public void testSourceParsingErrors() throws Exception { builder.startObject().field("script").startObject().field("lang", "lang").endObject().endObject(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> - StoredScriptSource.parse(builder.bytes(), XContentType.JSON)); + StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON)); assertThat(iae.getMessage(), equalTo("must specify source for stored script")); } @@ -196,7 +199,7 @@ public void testSourceParsingErrors() throws Exception { .startObject("options").field("option", "option").endObject().endObject().endObject(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> - StoredScriptSource.parse(builder.bytes(), XContentType.JSON)); + StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON)); assertThat(iae.getMessage(), equalTo("illegal compiler options [{option=option}] specified")); } } diff --git a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java index 3e5943951aec2..b0eb9e907618f 100644 --- a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java +++ b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; @@ -76,7 +77,7 @@ public void testToXContent() throws IOException { " \"field\" : \"foo\",\n" + " \"offset\" : 5\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); nestedIdentity = new NestedIdentity("foo", 5, new NestedIdentity("bar", 3, null)); builder = JsonXContent.contentBuilder(); @@ -94,7 +95,7 @@ public void testToXContent() throws IOException { " \"offset\" : 3\n" + " }\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } /** diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 382f50ff6d947..97dfad4645447 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; @@ -204,7 +205,7 @@ public void testToXContent() throws IOException { searchHit.score(1.5f); XContentBuilder builder = JsonXContent.contentBuilder(); searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", builder.string()); + assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", Strings.toString(builder)); } public void testSerializeShardTarget() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index decfe804a4284..075d5bc2aa3df 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.search; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; @@ -112,7 +113,7 @@ public void testToXContent() throws IOException { builder.endObject(); assertEquals("{\"hits\":{\"total\":1000,\"max_score\":1.5," + "\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":\"-Infinity\"},"+ - "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":\"-Infinity\"}]}}", builder.string()); + "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":\"-Infinity\"}]}}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java index 316ef3d455984..d1a9a15a3937c 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -88,7 +89,7 @@ public void testToXContent() throws IOException { builder.startObject(); sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\"sort\":[1,\"foo\",3.0]}", builder.string()); + assertEquals("{\"sort\":[1,\"foo\",3.0]}", Strings.toString(builder)); } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 29c187f59a88a..29d8e327d5cd7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -252,7 +252,7 @@ public void testParsingExceptionOnUnknownAggregation() throws IOException { builder.endObject(); } builder.endObject(); - BytesReference originalBytes = builder.bytes(); + BytesReference originalBytes = BytesReference.bytes(builder); try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); ParsingException ex = expectThrows(ParsingException.class, () -> Aggregations.fromXContent(parser)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 884e732c39107..642092507fed9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -260,7 +260,7 @@ public void testRewrite() throws Exception { builder.endObject(); } builder.endObject(); - bytesReference = builder.bytes(); + bytesReference = BytesReference.bytes(builder); } FilterAggregationBuilder filterAggBuilder = new FilterAggregationBuilder("titles", new WrapperQueryBuilder(bytesReference)); BucketScriptPipelineAggregationBuilder pipelineAgg = new BucketScriptPipelineAggregationBuilder("const", new Script("1")); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java index ce5e4a694f279..679941437f029 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java @@ -56,7 +56,7 @@ public void testWrapperQueryIsRewritten() throws IOException { builder.endObject(); } builder.endObject(); - bytesReference = builder.bytes(); + bytesReference = BytesReference.bytes(builder); } FiltersAggregationBuilder builder = new FiltersAggregationBuilder("titles", new FiltersAggregator.KeyedFilter("titleterms", new WrapperQueryBuilder(bytesReference))); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index db5a0a1cd8ec2..8b00c42311add 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; @@ -1220,7 +1221,7 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { } public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception { - String mappingJson = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("date").field("type", "date").field("format", "dateOptionalTime||dd-MM-yyyy").endObject().endObject().endObject().endObject().string(); + String mappingJson = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("date").field("type", "date").field("format", "dateOptionalTime||dd-MM-yyyy").endObject().endObject().endObject().endObject()); prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).execute().actionGet(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; for (int i = 0; i < reqs.length; i++) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 737cd3513001e..79d0a0ad17e69 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -334,7 +335,7 @@ public void testXContentResponse() throws Exception { + "\"score\":0.75," + "\"bg_count\":4" + "}]}}]}}"; - assertThat(responseBuilder.string(), equalTo(result)); + assertThat(Strings.toString(responseBuilder), equalTo(result)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java index dddfee7d094d2..86ddd4843a75b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java @@ -162,7 +162,7 @@ public void testXContentRoundTrip() throws Exception { orig.toXContent(out, ToXContent.EMPTY_PARAMS); out.endObject(); - try (XContentParser in = createParser(JsonXContent.jsonXContent, out.bytes())) { + try (XContentParser in = createParser(JsonXContent.jsonXContent, BytesReference.bytes(out))) { XContentParser.Token token = in.currentToken(); assertNull(token); @@ -176,7 +176,7 @@ public void testXContentRoundTrip() throws Exception { ExtendedBounds read = ExtendedBounds.PARSER.apply(in, null); assertEquals(orig, read); } catch (Exception e) { - throw new Exception("Error parsing [" + out.bytes().utf8ToString() + "]", e); + throw new Exception("Error parsing [" + BytesReference.bytes(out).utf8ToString() + "]", e); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 796355ebfb190..b8c9825d9b5a5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -280,7 +281,7 @@ protected SignificanceHeuristic parseFromBuilder(ParseFieldRegistry> getMockPlugins() { } public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException { - String mapping = XContentFactory.jsonBuilder(). + String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject(). startObject("type"). startObject("properties"). @@ -80,7 +81,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe .endObject(). endObject(). endObject() - .endObject().string(); + .endObject()); final double lowLevelRate; final double topLevelRate; if (frequently()) { diff --git a/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index 835b980d6653e..931f940658893 100644 --- a/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Requests; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; @@ -54,7 +55,7 @@ protected Collection> nodePlugins() { } public void testRandomDirectoryIOExceptions() throws IOException, InterruptedException, ExecutionException { - String mapping = XContentFactory.jsonBuilder(). + String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject(). startObject("type"). startObject("properties"). @@ -63,7 +64,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc .endObject(). endObject(). endObject() - .endObject().string(); + .endObject()); final double exceptionRate; final double exceptionOnOpenRate; if (frequently()) { diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index a72c78f79d2d9..66d6f68b8a4aa 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -345,7 +345,7 @@ public void testToXContent() throws IOException { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); XContentBuilder builder = XContentFactory.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - BytesReference source = builder.bytes(); + BytesReference source = BytesReference.bytes(builder); Map sourceAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); assertEquals(0, sourceAsMap.size()); } @@ -354,7 +354,7 @@ public void testToXContent() throws IOException { searchSourceBuilder.query(RandomQueryBuilder.createQuery(random())); XContentBuilder builder = XContentFactory.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - BytesReference source = builder.bytes(); + BytesReference source = BytesReference.bytes(builder); Map sourceAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); assertEquals(1, sourceAsMap.size()); assertEquals("query", sourceAsMap.keySet().iterator().next()); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java index 800b863138753..5cc4e2ddc68a7 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java @@ -103,7 +103,8 @@ private FetchSubPhase.HitContext hitExecute(XContentBuilder source, boolean fetc private FetchSubPhase.HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSource, String[] includes, String[] excludes) { FetchSourceContext fetchSourceContext = new FetchSourceContext(fetchSource, includes, excludes); - SearchContext searchContext = new FetchSourceSubPhaseTestSearchContext(fetchSourceContext, source == null ? null : source.bytes()); + SearchContext searchContext = new FetchSourceSubPhaseTestSearchContext(fetchSourceContext, + source == null ? null : BytesReference.bytes(source)); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); hitContext.reset(new SearchHit(1, null, null, null), null, 1, null); FetchSourceSubPhase phase = new FetchSourceSubPhase(); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java index b4bf2950b7d07..7b27cf78ec65a 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.fetch.subphase.highlight; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.text.Text; @@ -86,7 +87,7 @@ public void testToXContent() throws IOException { " \"bar\",\n" + " \"baz\"\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); field = new HighlightField("foo", null); builder = JsonXContent.contentBuilder(); @@ -97,7 +98,7 @@ public void testToXContent() throws IOException { assertEquals( "{\n" + " \"foo\" : null\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } /** diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index cc23deda2d856..7f61655a09273 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; @@ -2724,7 +2725,7 @@ public void testKeywordFieldHighlighting() throws IOException { } public void testACopyFieldWithNestedQuery() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo") .field("type", "nested") .startObject("properties") @@ -2739,7 +2740,7 @@ public void testACopyFieldWithNestedQuery() throws Exception { .field("term_vector", "with_positions_offsets") .field("store", true) .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().startArray("foo") @@ -2872,7 +2873,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { } public void testWithNestedQuery() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("text") .field("type", "text") .field("index_options", "offsets") @@ -2886,7 +2887,7 @@ public void testWithNestedQuery() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject() diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 71db66c7fb208..d7480c2b6fb2e 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.fields; -import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; @@ -160,12 +160,12 @@ static Object docScript(Map vars, String fieldName) { public void testStoredFields() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() .startObject("field2").field("type", "text").field("store", false).endObject() .startObject("field3").field("type", "text").field("store", true).endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); @@ -253,9 +253,9 @@ public void testStoredFields() throws Exception { public void testScriptDocAndFields() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("num1").field("type", "double").field("store", true).endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); @@ -514,53 +514,53 @@ public void testPartialFields() throws Exception { public void testStoredFieldsWithoutSource() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_source") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("byte_field") - .field("type", "byte") - .field("store", true) - .endObject() - .startObject("short_field") - .field("type", "short") - .field("store", true) - .endObject() - .startObject("integer_field") - .field("type", "integer") - .field("store", true) - .endObject() - .startObject("long_field") - .field("type", "long") - .field("store", true) - .endObject() - .startObject("float_field") - .field("type", "float") - .field("store", true) - .endObject() - .startObject("double_field") - .field("type", "double") - .field("store", true) - .endObject() - .startObject("date_field") - .field("type", "date") - .field("store", true) - .endObject() - .startObject("boolean_field") - .field("type", "boolean") - .field("store", true) - .endObject() - .startObject("binary_field") - .field("type", "binary") - .field("store", true) + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("byte_field") + .field("type", "byte") + .field("store", true) + .endObject() + .startObject("short_field") + .field("type", "short") + .field("store", true) + .endObject() + .startObject("integer_field") + .field("type", "integer") + .field("store", true) + .endObject() + .startObject("long_field") + .field("type", "long") + .field("store", true) + .endObject() + .startObject("float_field") + .field("type", "float") + .field("store", true) + .endObject() + .startObject("double_field") + .field("type", "double") + .field("store", true) + .endObject() + .startObject("date_field") + .field("type", "date") + .field("store", true) + .endObject() + .startObject("boolean_field") + .field("type", "boolean") + .field("store", true) + .endObject() + .startObject("binary_field") + .field("type", "binary") + .field("store", true) + .endObject() + .endObject() .endObject() - .endObject() - .endObject() - .endObject() - .string(); + .endObject()); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); @@ -670,7 +670,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject()) .get(); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .startArray("field1") .startObject() .startObject("field2") @@ -691,7 +691,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); client().prepareIndex("my-index", "doc", "1").setRefreshPolicy(IMMEDIATE).setSource(source, XContentType.JSON).get(); @@ -722,54 +722,54 @@ public void testSingleValueFieldDatatField() throws ExecutionException, Interrup public void testFieldsPulledFromFieldData() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_source") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("text_field") - .field("type", "text") - .field("fielddata", true) - .endObject() - .startObject("keyword_field") - .field("type", "keyword") - .endObject() - .startObject("byte_field") - .field("type", "byte") - .endObject() - .startObject("short_field") - .field("type", "short") - .endObject() - .startObject("integer_field") - .field("type", "integer") - .endObject() - .startObject("long_field") - .field("type", "long") - .endObject() - .startObject("float_field") - .field("type", "float") - .endObject() - .startObject("double_field") - .field("type", "double") - .endObject() - .startObject("date_field") - .field("type", "date") - .endObject() - .startObject("boolean_field") - .field("type", "boolean") - .endObject() - .startObject("binary_field") - .field("type", "binary") - .endObject() - .startObject("ip_field") - .field("type", "ip") + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("text_field") + .field("type", "text") + .field("fielddata", true) + .endObject() + .startObject("keyword_field") + .field("type", "keyword") + .endObject() + .startObject("byte_field") + .field("type", "byte") + .endObject() + .startObject("short_field") + .field("type", "short") + .endObject() + .startObject("integer_field") + .field("type", "integer") + .endObject() + .startObject("long_field") + .field("type", "long") + .endObject() + .startObject("float_field") + .field("type", "float") + .endObject() + .startObject("double_field") + .field("type", "double") + .endObject() + .startObject("date_field") + .field("type", "date") + .endObject() + .startObject("boolean_field") + .field("type", "boolean") + .endObject() + .startObject("binary_field") + .field("type", "binary") + .endObject() + .startObject("ip_field") + .field("type", "ip") + .endObject() + .endObject() .endObject() - .endObject() - .endObject() - .endObject() - .string(); + .endObject()); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 123109cb08c01..0038ef368c150 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; @@ -202,7 +203,7 @@ public void testShapeRelations() throws Exception { assertTrue("Disjoint relation is not supported", disjointSupport); assertTrue("within relation is not supported", withinSupport); - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("polygon") .startObject("properties") @@ -212,7 +213,7 @@ public void testShapeRelations() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes") .addMapping("polygon", mapping, XContentType.JSON); @@ -229,7 +230,7 @@ public void testShapeRelations() throws Exception { .coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5).coordinate(5, -5).close()))) .polygon(new PolygonBuilder( new CoordinatesBuilder().coordinate(-4, -4).coordinate(-4, 4).coordinate(4, 4).coordinate(4, -4).close())); - BytesReference data = jsonBuilder().startObject().field("area", polygon).endObject().bytes(); + BytesReference data = BytesReference.bytes(jsonBuilder().startObject().field("area", polygon).endObject()); client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); @@ -292,7 +293,7 @@ public void testShapeRelations() throws Exception { .hole(new LineStringBuilder( new CoordinatesBuilder().coordinate(-4, -4).coordinate(-4, 4).coordinate(4, 4).coordinate(4, -4).close())); - data = jsonBuilder().startObject().field("area", inverse).endObject().bytes(); + data = BytesReference.bytes(jsonBuilder().startObject().field("area", inverse).endObject()); client().prepareIndex("shapes", "polygon", "2").setSource(data, XContentType.JSON).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); @@ -326,7 +327,7 @@ public void testShapeRelations() throws Exception { builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()); - data = jsonBuilder().startObject().field("area", builder).endObject().bytes(); + data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); @@ -335,7 +336,7 @@ public void testShapeRelations() throws Exception { .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()) .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(175, -5).coordinate(185, -5).coordinate(185, 5).coordinate(175, 5).close())); - data = jsonBuilder().startObject().field("area", builder).endObject().bytes(); + data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java index d56a98c2ea9ef..a8f559ce35e4c 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -42,22 +43,22 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase { */ public void testOrientationPersistence() throws Exception { String idxName = "orientation"; - String mapping = XContentFactory.jsonBuilder().startObject().startObject("shape") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("shape") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("orientation", "left") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); // create index assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); - mapping = XContentFactory.jsonBuilder().startObject().startObject("shape") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("shape") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("orientation", "right") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate(idxName+"2").addMapping("shape", mapping, XContentType.JSON)); ensureGreen(idxName, idxName+"2"); @@ -100,7 +101,7 @@ public void testIgnoreMalformed() throws Exception { ensureGreen(); // test self crossing ccw poly not crossing dateline - String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -112,7 +113,7 @@ public void testIgnoreMalformed() throws Exception { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape", polygonGeoJson)); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index 5ca4193da46fb..c877cb3be180c 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.geo; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; @@ -57,16 +58,15 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; public class GeoShapeQueryTests extends ESSingleNodeTestCase { public void testNullShape() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -77,12 +77,12 @@ public void testNullShape() throws Exception { } public void testIndexPointsFilterRectangle() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -124,12 +124,12 @@ public void testIndexPointsFilterRectangle() throws Exception { } public void testEdgeCases() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -161,12 +161,12 @@ public void testEdgeCases() throws Exception { } public void testIndexedShapeReference() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); createIndex("shapes"); ensureGreen(); @@ -237,9 +237,9 @@ public void testReusableBuilder() throws IOException { } private void assertUnmodified(ShapeBuilder builder) throws IOException { - String before = jsonBuilder().startObject().field("area", builder).endObject().string(); + String before = Strings.toString(jsonBuilder().startObject().field("area", builder).endObject()); builder.build(); - String after = jsonBuilder().startObject().field("area", builder).endObject().string(); + String after = Strings.toString(jsonBuilder().startObject().field("area", builder).endObject()); assertThat(before, equalTo(after)); } @@ -438,7 +438,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { } public void testPointsOnly() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", randomBoolean() ? "quadtree" : "geohash") @@ -446,7 +446,7 @@ public void testPointsOnly() throws Exception { .field("distance_error_pct", "0.01") .field("points_only", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -471,7 +471,7 @@ public void testPointsOnly() throws Exception { } public void testPointsOnlyExplicit() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", randomBoolean() ? "quadtree" : "geohash") @@ -479,7 +479,7 @@ public void testPointsOnlyExplicit() throws Exception { .field("distance_error_pct", "0.01") .field("points_only", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java index 782a16f793b91..c2016ceb02ce7 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java @@ -146,7 +146,7 @@ public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOExc XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - return new CompressedXContent(builder.string()); + return new CompressedXContent(Strings.toString(builder)); } private IndexMetaData remove(IndexMetaData indexMetaData, String alias) { diff --git a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index 49676486588d9..185ec53f3b4e3 100644 --- a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -161,10 +162,10 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { String aliasName = "foo_name"; String typeName = "bar"; - String mapping = XContentFactory.jsonBuilder().startObject().startObject("bar") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate(indexName).addMapping(typeName, mapping, XContentType.JSON).get(); client().admin().indices().prepareAliases().addAlias(indexName, aliasName).get(); @@ -183,10 +184,10 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { public void testMoreLikeThisIssue2197() throws Exception { Client client = client(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("bar") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).execute().actionGet(); client().prepareIndex("foo", "bar", "1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) @@ -206,10 +207,10 @@ public void testMoreLikeThisIssue2197() throws Exception { // Issue #2489 public void testMoreLikeWithCustomRouting() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("bar") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -227,10 +228,10 @@ public void testMoreLikeWithCustomRouting() throws Exception { // Issue #3039 public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("bar") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)) .addMapping("bar", mapping, XContentType.JSON)); diff --git a/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java index 5174267815b84..3ff7e057da735 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.profile; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -135,7 +136,7 @@ public void testToXContent() throws IOException { " }\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); result.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -168,7 +169,7 @@ public void testToXContent() throws IOException { " }\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 12345678L), Collections.emptyList()); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); @@ -181,7 +182,7 @@ public void testToXContent() throws IOException { " \"breakdown\" : {\n" + " \"key1\" : 12345678\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 1234567890L), Collections.emptyList()); @@ -195,6 +196,6 @@ public void testToXContent() throws IOException { " \"breakdown\" : {\n" + " \"key1\" : 1234567890\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java index 10bf8e2a30013..3d88f8696b1cc 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.profile.query; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -107,7 +108,7 @@ public void testToXContent() throws IOException { " \"time_in_nanos\" : 123356\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); result.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -130,7 +131,7 @@ public void testToXContent() throws IOException { " \"time_in_nanos\" : 123356\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); result = new CollectorResult("collectorName", "some reason", 12345678L, Collections.emptyList()); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); @@ -140,7 +141,7 @@ public void testToXContent() throws IOException { " \"reason\" : \"some reason\",\n" + " \"time\" : \"12.3ms\",\n" + " \"time_in_nanos\" : 12345678\n" + - "}", builder.string()); + "}", Strings.toString(builder)); result = new CollectorResult("collectorName", "some reason", 1234567890L, Collections.emptyList()); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); @@ -150,6 +151,6 @@ public void testToXContent() throws IOException { " \"reason\" : \"some reason\",\n" + " \"time\" : \"1.2s\",\n" + " \"time_in_nanos\" : 1234567890\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java index 136ed61bbbb20..6e4a1b7d618ea 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; @@ -127,7 +128,7 @@ public void testExists() throws Exception { SearchResponse resp = client().prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)).execute().actionGet(); assertSearchResponse(resp); try { - assertEquals(String.format(Locale.ROOT, "exists(%s, %d) mapping: %s response: %s", fieldName, count, mapping.string(), resp), count, resp.getHits().getTotalHits()); + assertEquals(String.format(Locale.ROOT, "exists(%s, %d) mapping: %s response: %s", fieldName, count, Strings.toString(mapping), resp), count, resp.getHits().getTotalHits()); } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { final String index = searchHit.getIndex(); diff --git a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 9deca71a2a9f9..7aef2d208ecc5 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -19,11 +19,11 @@ package org.elasticsearch.search.query; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -306,7 +306,7 @@ public void testLenientFlagBeingTooLenient() throws Exception { } public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, InterruptedException, IOException { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") @@ -316,7 +316,7 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1") .addMapping("type1", mapping, XContentType.JSON); @@ -356,7 +356,7 @@ public void testSimpleQueryStringOnIndexMetaField() throws Exception { public void testEmptySimpleQueryStringWithAnalysis() throws Exception { // https://github.com/elastic/elasticsearch/issues/18202 - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") @@ -366,7 +366,7 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices() .prepareCreate("test1") diff --git a/server/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java index b030043faf746..96582025e1af7 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -599,7 +600,7 @@ public void testInvalidScrollKeepAlive() throws IOException { private void assertToXContentResponse(ClearScrollResponse response, boolean succeed, int numFreed) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); response.toXContent(builder, ToXContent.EMPTY_PARAMS); - Map map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); assertThat(map.get("succeeded"), is(succeed)); assertThat(map.get("num_freed"), equalTo(numFreed)); } diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index edcfdc2155507..53bd9da2ff1de 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; @@ -135,7 +136,7 @@ private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { } jsonBuilder.endArray(); jsonBuilder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, jsonBuilder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder)); parser.nextToken(); parser.nextToken(); parser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java index a5962dca5951b..b9f73869ba7ab 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -50,7 +51,7 @@ public class SearchSliceIT extends ESIntegTestCase { private static final int NUM_DOCS = 1000; private int setupIndex(boolean withDocs) throws IOException, ExecutionException, InterruptedException { - String mapping = XContentFactory.jsonBuilder(). + String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject() .startObject("type") .startObject("properties") @@ -68,7 +69,7 @@ private int setupIndex(boolean withDocs) throws IOException, ExecutionException, .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); int numberOfShards = randomIntBetween(1, 7); assertAcked(client().admin().indices().prepareCreate("test") .setSettings(Settings.builder().put("number_of_shards", numberOfShards).put("index.max_slices_per_scroll", 10000)) diff --git a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java index c9b4fd80a2936..aa49bed6975b1 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.xcontent.XContentType; @@ -224,7 +225,7 @@ public void testSimpleSorts() throws Exception { } public void testSortMinValueScript() throws IOException { - String mapping = jsonBuilder() + String mapping = Strings.toString(jsonBuilder() .startObject() .startObject("type1") .startObject("properties") @@ -242,7 +243,7 @@ public void testSortMinValueScript() throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); ensureGreen(); @@ -343,7 +344,7 @@ public void testDocumentsWithNullValue() throws Exception { // TODO: sort shouldn't fail when sort field is mapped dynamically // We have to specify mapping explicitly because by the time search is performed dynamic mapping might not // be propagated to all nodes yet and sort operation fail when the sort field is not defined - String mapping = jsonBuilder() + String mapping = Strings.toString(jsonBuilder() .startObject() .startObject("type1") .startObject("properties") @@ -355,7 +356,7 @@ public void testDocumentsWithNullValue() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java index 06f5ccf696ce4..f267dec2a8623 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -183,7 +184,7 @@ public void testRandomSortBuilders() throws IOException { xContentBuilder.endArray(); } xContentBuilder.endObject(); - List> parsedSort = parseSort(xContentBuilder.string()); + List> parsedSort = parseSort(Strings.toString(xContentBuilder)); assertEquals(testBuilders.size(), parsedSort.size()); Iterator> iterator = testBuilders.iterator(); for (SortBuilder parsedBuilder : parsedSort) { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 01b16bb9fb698..deae6bf1a7ef7 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.FieldMemoryStats; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -1113,17 +1114,17 @@ public void testIssue5930() throws IOException { // see issue #6399 public void testIndexingUnrelatedNullValue() throws Exception { - String mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(jsonBuilder() + .startObject() + .startObject(TYPE) + .startObject("properties") + .startObject(FIELD) + .field("type", "completion") + .endObject() + .endObject() + .endObject() + .endObject()); assertAcked(client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, mapping, XContentType.JSON).get()); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 3b1c88cfc5779..feb15044438ec 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -1079,13 +1080,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE assertSuggestionSize(searchSuggest, 0, 10, "title"); // suggest with collate - String filterString = XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("{{field}}", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String filterString = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("{{field}}", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title")); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredQuerySuggest); @@ -1098,13 +1099,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE NumShards numShards = getNumShards("test"); // collate suggest with bad query - String incorrectFilterString = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String incorrectFilterString = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .field("title", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString); Map> namedSuggestion = new HashMap<>(); namedSuggestion.put("my_title_suggestion", incorrectFilteredSuggest); @@ -1116,13 +1117,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE } // suggest with collation - String filterStringAsFilter = XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String filterStringAsFilter = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("title", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", @@ -1130,13 +1131,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE assertSuggestionSize(searchSuggest, 0, 2, "title"); // collate suggest with bad query - String filterStr = XContentFactory.jsonBuilder() - .startObject() - .startObject("pprefix") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String filterStr = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("pprefix") + .field("title", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder in = suggest.collateQuery(filterStr); try { @@ -1147,13 +1148,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE } // collate script failure due to no additional params - String collateWithParams = XContentFactory.jsonBuilder() - .startObject() - .startObject("{{query_type}}") - .field("{{query_field}}", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String collateWithParams = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("{{query_type}}") + .field("{{query_field}}", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java index d53cbfdab6e80..d54fa0f705f0d 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java @@ -183,7 +183,7 @@ public void testParsingExceptionOnUnknownSuggestion() throws IOException { builder.endArray(); } builder.endObject(); - BytesReference originalBytes = builder.bytes(); + BytesReference originalBytes = BytesReference.bytes(builder); try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); ParsingException ex = expectThrows(ParsingException.class, () -> Suggest.fromXContent(parser)); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 18fb907b4a67d..9c62bb28483c1 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -24,11 +24,12 @@ import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; -import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -40,11 +41,9 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextBuilder; @@ -61,7 +60,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -71,36 +70,36 @@ public void testIndexingWithNoContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion1", "suggestion2") - .field("weight", 3) - .endObject() - .startObject() - .array("input", "suggestion3", "suggestion4") - .field("weight", 4) - .endObject() - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion1", "suggestion2") + .field("weight", 3) + .endObject() + .startObject() + .array("input", "suggestion3", "suggestion4") + .field("weight", 4) + .endObject() + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } public void testIndexingWithSimpleContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -110,31 +109,31 @@ public void testIndexingWithSimpleContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .field("ctx", "ctx1") - .endObject() - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .field("ctx", "ctx1") + .endObject() + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithSimpleNumberContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -144,31 +143,31 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .field("ctx", 100) - .endObject() - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .field("ctx", 100) + .endObject() + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithSimpleBooleanContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -178,31 +177,31 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .field("ctx", true) - .endObject() - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .field("ctx", true) + .endObject() + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithSimpleNULLContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -212,7 +211,7 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); XContentBuilder builder = jsonBuilder() @@ -229,12 +228,12 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { .endObject(); Exception e = expectThrows(MapperParsingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), XContentType.JSON))); + () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))); assertEquals("contexts must be a string, number or boolean or a list of string, number or boolean, but was [VALUE_NULL]", e.getCause().getMessage()); } public void testIndexingWithContextList() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -244,29 +243,29 @@ public void testIndexingWithContextList() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startObject("completion") - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .array("ctx", "ctx1", "ctx2", "ctx3") - .endObject() - .field("weight", 5) - .endObject() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startObject("completion") + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .array("ctx", "ctx1", "ctx2", "ctx3") + .endObject() + .field("weight", 5) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithMixedTypeContextList() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -276,29 +275,29 @@ public void testIndexingWithMixedTypeContextList() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startObject("completion") - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .array("ctx", "ctx1", true, 100) - .endObject() - .field("weight", 5) - .endObject() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startObject("completion") + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .array("ctx", "ctx1", true, 100) + .endObject() + .field("weight", 5) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -308,7 +307,7 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); XContentBuilder builder = jsonBuilder() @@ -323,12 +322,12 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { .endObject(); Exception e = expectThrows(MapperParsingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), XContentType.JSON))); + () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))); assertEquals("context array must have string, number or boolean values, but was [VALUE_NULL]", e.getCause().getMessage()); } public void testIndexingWithMultipleContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -342,7 +341,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -360,7 +359,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); @@ -368,7 +367,7 @@ public void testIndexingWithMultipleContexts() throws Exception { public void testQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value("context1"); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -379,7 +378,7 @@ public void testQueryContextParsingBasic() throws Exception { public void testBooleanQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(true); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -390,7 +389,7 @@ public void testBooleanQueryContextParsingBasic() throws Exception { public void testNumberQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(10); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -401,7 +400,7 @@ public void testNumberQueryContextParsingBasic() throws Exception { public void testNULLQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().nullValue(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -413,7 +412,7 @@ public void testQueryContextParsingArray() throws Exception { .value("context1") .value("context2") .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(2)); @@ -432,7 +431,7 @@ public void testQueryContextParsingMixedTypeValuesArray() throws Exception { .value(true) .value(10) .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(4)); @@ -458,7 +457,7 @@ public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Excep .value(10) .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -471,7 +470,7 @@ public void testQueryContextParsingObject() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -486,7 +485,7 @@ public void testQueryContextParsingObjectHavingBoolean() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -501,7 +500,7 @@ public void testQueryContextParsingObjectHavingNumber() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -516,7 +515,7 @@ public void testQueryContextParsingObjectHavingNULL() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -536,7 +535,7 @@ public void testQueryContextParsingObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(2)); @@ -571,7 +570,7 @@ public void testQueryContextParsingMixedTypeObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(4)); @@ -617,7 +616,7 @@ public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Excep .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -640,7 +639,7 @@ public void testQueryContextParsingMixed() throws Exception { .field("prefix", true) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(4)); @@ -674,7 +673,7 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { .endObject() .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -682,7 +681,7 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { } public void testUnknownQueryContextParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -696,7 +695,7 @@ public void testUnknownQueryContextParsing() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 9e22ad64d5c1e..2d179f3dbe6c3 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -48,7 +50,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -58,36 +60,36 @@ public void testIndexingWithNoContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion1", "suggestion2") - .field("weight", 3) - .endObject() - .startObject() - .array("input", "suggestion3", "suggestion4") - .field("weight", 4) - .endObject() - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion1", "suggestion2") + .field("weight", 3) + .endObject() + .startObject() + .array("input", "suggestion3", "suggestion4") + .field("weight", 4) + .endObject() + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } public void testIndexingWithSimpleContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -98,34 +100,34 @@ public void testIndexingWithSimpleContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .startObject("ctx") - .field("lat", 43.6624803) - .field("lon", -79.3863353) - .endObject() - .endObject() - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .startObject("ctx") + .field("lat", 43.6624803) + .field("lon", -79.3863353) + .endObject() + .endObject() + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithContextList() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -135,38 +137,38 @@ public void testIndexingWithContextList() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startObject("completion") - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .startArray("ctx") - .startObject() - .field("lat", 43.6624803) - .field("lon", -79.3863353) - .endObject() - .startObject() - .field("lat", 43.6624718) - .field("lon", -79.3873227) + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startObject("completion") + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .startArray("ctx") + .startObject() + .field("lat", 43.6624803) + .field("lon", -79.3863353) + .endObject() + .startObject() + .field("lat", 43.6624718) + .field("lon", -79.3873227) + .endObject() + .endArray() .endObject() - .endArray() - .endObject() - .field("weight", 5) - .endObject() - .endObject() - .bytes(), + .field("weight", 5) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithMultipleContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -180,7 +182,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -198,7 +200,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); @@ -206,7 +208,7 @@ public void testIndexingWithMultipleContexts() throws Exception { public void testParsingQueryContextBasic() throws Exception { XContentBuilder builder = jsonBuilder().value("ezs42e44yx96"); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 8)); @@ -225,7 +227,7 @@ public void testParsingQueryContextGeoPoint() throws Exception { .field("lat", 23.654242) .field("lon", 90.047153) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 8)); @@ -248,7 +250,7 @@ public void testParsingQueryContextObject() throws Exception { .field("boost", 10) .array("neighbours", 1, 2, 3) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8)); @@ -286,7 +288,7 @@ public void testParsingQueryContextObjectArray() throws Exception { .array("neighbours", 5) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 1 + 8)); @@ -329,7 +331,7 @@ public void testParsingQueryContextMixed() throws Exception { .field("lon", 92.112583) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8)); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 89d3e2bf0029b..3d4b6d3128a75 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -580,7 +580,7 @@ public void testIncludeGlobalState() throws Exception { if(testPipeline) { logger.info("--> creating test pipeline"); - BytesReference pipelineSource = jsonBuilder().startObject() + BytesReference pipelineSource = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -588,7 +588,7 @@ public void testIncludeGlobalState() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); assertAcked(client().admin().cluster().preparePutPipeline("barbaz", pipelineSource, XContentType.JSON).get()); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java index f3d8bba3edb68..44fe0d4dd5c58 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java @@ -81,7 +81,7 @@ public void testRestoreSnapshotRequestParsing() throws IOException { builder.field("ignore_unavailable", indicesOptions.ignoreUnavailable()); } - BytesReference bytes = builder.endObject().bytes(); + BytesReference bytes = BytesReference.bytes(builder.endObject()); request.source(XContentHelper.convertToMap(bytes, true, builder.contentType()).v2()); @@ -146,7 +146,7 @@ public void testCreateSnapshotRequestParsing() throws IOException { builder.field("ignore_unavailable", indicesOptions.ignoreUnavailable()); } - BytesReference bytes = builder.endObject().bytes(); + BytesReference bytes = BytesReference.bytes(builder.endObject()); request.source(XContentHelper.convertToMap(bytes, true, builder.contentType()).v2()); diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java index d4da4f8f1c5cb..7a481100f1372 100644 --- a/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.tasks; import org.elasticsearch.client.Requests; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -151,7 +152,7 @@ private static RawTaskStatus randomRawTaskStatus() throws IOException { builder.field(randomAlphaOfLength(5), randomAlphaOfLength(5)); } builder.endObject(); - return new RawTaskStatus(builder.bytes()); + return new RawTaskStatus(BytesReference.bytes(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index 33047b2d12a6a..fcd80b191b842 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.threadpool; import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; @@ -78,7 +79,7 @@ public void testThatToXContentWritesOutUnboundedCorrectly() throws Exception { info.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - Map map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); assertThat(map, hasKey("foo")); map = (Map) map.get("foo"); assertThat(map, hasKey("queue_size")); @@ -100,7 +101,7 @@ public void testThatToXContentWritesInteger() throws Exception { info.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - Map map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); assertThat(map, hasKey("foo")); map = (Map) map.get("foo"); assertThat(map, hasKey("queue_size")); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 80da56e351b4b..0d8a469981966 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -887,7 +888,7 @@ public void testRenderConnectionInfoXContent() throws IOException { builder.endObject(); assertEquals("{\"test_cluster\":{\"seeds\":[\"0.0.0.0:1\"],\"http_addresses\":[\"0.0.0.0:80\"],\"connected\":true," + "\"num_nodes_connected\":3,\"max_connections_per_cluster\":4,\"initial_connect_timeout\":\"30m\"," + - "\"skip_unavailable\":true}}", builder.string()); + "\"skip_unavailable\":true}}", Strings.toString(builder)); stats = new RemoteConnectionInfo("some_other_cluster", Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,1), new TransportAddress(TransportAddress.META_ADDRESS,2)), @@ -900,7 +901,7 @@ public void testRenderConnectionInfoXContent() throws IOException { builder.endObject(); assertEquals("{\"some_other_cluster\":{\"seeds\":[\"0.0.0.0:1\",\"0.0.0.0:2\"],\"http_addresses\":[\"0.0.0.0:80\",\"0.0.0.0:81\"]," + "\"connected\":false,\"num_nodes_connected\":0,\"max_connections_per_cluster\":2,\"initial_connect_timeout\":\"30s\"," + - "\"skip_unavailable\":false}}", builder.string()); + "\"skip_unavailable\":false}}", Strings.toString(builder)); } private RemoteConnectionInfo getRemoteConnectionInfo(RemoteClusterConnection connection) throws Exception { diff --git a/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java b/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java index ae232c2c6872b..17fb21441e21d 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESIntegTestCase; @@ -240,7 +241,7 @@ private void updateAndCheckSource(long expectedVersion, XContentBuilder xContent private void updateAndCheckSource(long expectedVersion, Boolean detectNoop, XContentBuilder xContentBuilder) { UpdateResponse updateResponse = update(detectNoop, expectedVersion, xContentBuilder); - assertEquals(updateResponse.getGetResult().sourceRef().utf8ToString(), xContentBuilder.bytes().utf8ToString()); + assertEquals(updateResponse.getGetResult().sourceRef().utf8ToString(), BytesReference.bytes(xContentBuilder).utf8ToString()); } private UpdateResponse update(Boolean detectNoop, long expectedVersion, XContentBuilder xContentBuilder) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java index d2db8a50b8dc4..b732c6b5b42bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -112,9 +113,9 @@ public void testNullValue() throws IOException { public void testEmptyName() throws IOException { // after version 5 for (String type : TYPES) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index 3697b4ee2438d..fa851e9c6d802 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.DeprecationHandler; @@ -312,7 +313,7 @@ public static SearchSourceBuilder randomSearchSourceBuilder( jsonBuilder.endObject(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - jsonBuilder.bytes().streamInput()); + BytesReference.bytes(jsonBuilder).streamInput()); parser.nextToken(); parser.nextToken(); parser.nextToken(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 3d681f60f7616..0037c23656f6c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -964,9 +964,9 @@ public static void checkGeneratedJson(String expected, QueryBuilder source) thro XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); source.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals( - msg(expected, builder.string()), + msg(expected, Strings.toString(builder)), expected.replaceAll("\\s+", ""), - builder.string().replaceAll("\\s+", "")); + Strings.toString(builder).replaceAll("\\s+", "")); } private static String msg(String left, String right) { @@ -1077,7 +1077,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { }); for (String type : currentTypes) { - mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, + mapperService.merge(type, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(type, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", @@ -1089,7 +1089,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { OBJECT_FIELD_NAME, "type=object", GEO_POINT_FIELD_NAME, "type=geo_point", GEO_SHAPE_FIELD_NAME, "type=geo_shape" - ).string()), MapperService.MergeReason.MAPPING_UPDATE); + ))), MapperService.MergeReason.MAPPING_UPDATE); // also add mappings for two inner field in the object field mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," + "\"properties\":{\"" + DATE_FIELD_NAME + "\":{\"type\":\"date\"},\"" + diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 28f05b6b92604..792d535dc4339 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -61,7 +61,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.AdminClient; @@ -1087,7 +1086,7 @@ public void logClusterState() { public void logSegmentsState(String... indices) throws Exception { IndicesSegmentResponse segsRsp = client().admin().indices().prepareSegments(indices).get(); logger.debug("segments {} state: \n{}", indices.length == 0 ? "[_all]" : indices, - segsRsp.toXContent(JsonXContent.contentBuilder().prettyPrint(), ToXContent.EMPTY_PARAMS).string()); + Strings.toString(segsRsp.toXContent(JsonXContent.contentBuilder().prettyPrint(), ToXContent.EMPTY_PARAMS))); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index e5efa0da8c08d..a65b8b430e681 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -943,7 +943,7 @@ protected static BytesReference toShuffledXContent(ToXContent toXContent, XConte BytesReference bytes = XContentHelper.toXContent(toXContent, xContentType, params, humanReadable); try (XContentParser parser = parserFunction.apply(xContentType.xContent(), bytes)) { try (XContentBuilder builder = shuffleXContent(parser, rarely(), exceptFieldNames)) { - return builder.bytes(); + return BytesReference.bytes(builder); } } } @@ -1128,8 +1128,8 @@ public static void assertEqualsWithErrorMessageFromXConte expectedJson.endObject(); NotEqualMessageBuilder message = new NotEqualMessageBuilder(); message.compareMaps( - XContentHelper.convertToMap(actualJson.bytes(), false).v2(), - XContentHelper.convertToMap(expectedJson.bytes(), false).v2()); + XContentHelper.convertToMap(BytesReference.bytes(actualJson), false).v2(), + XContentHelper.convertToMap(BytesReference.bytes(expectedJson), false).v2()); throw new AssertionError("Didn't match expected value:\n" + message); } catch (IOException e) { throw new AssertionError("IOException while building failure message", e); @@ -1141,7 +1141,7 @@ public static void assertEqualsWithErrorMessageFromXConte */ protected final XContentParser createParser(XContentBuilder builder) throws IOException { return builder.generator().contentType().xContent() - .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, builder.bytes().streamInput()); + .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index 1868fc34a991f..6cdd3ac7796dc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo.Failure; import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ToXContent; @@ -128,14 +127,14 @@ public static Tuple, List> randomStoredFieldValues(Random r break; case 8: byte[] randomBytes = RandomStrings.randomUnicodeOfLengthBetween(random, 10, 50).getBytes(StandardCharsets.UTF_8); - BytesArray randomBytesArray = new BytesArray(randomBytes); - originalValues.add(randomBytesArray); if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { //JSON and YAML write the base64 format expectedParsedValues.add(Base64.getEncoder().encodeToString(randomBytes)); + originalValues.add(Base64.getEncoder().encodeToString(randomBytes)); } else { //SMILE and CBOR write the original bytes as they support binary format - expectedParsedValues.add(randomBytesArray); + expectedParsedValues.add(randomBytes); + originalValues.add(randomBytes); } break; default: @@ -176,7 +175,7 @@ public static BytesReference randomSource(Random random, XContentType xContentTy builder.startObject(); addFields(random, builder, minNumFields, 0); builder.endObject(); - return builder.bytes(); + return BytesReference.bytes(builder); } catch(IOException e) { throw new RuntimeException(e); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java index cd556a9115ad3..15c650173bf87 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java @@ -55,7 +55,7 @@ public static Map convertToMap(ToXContent part) throws IOExcepti builder.startObject(); part.toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + return XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); } @@ -209,8 +209,8 @@ public static BytesReference insertRandomFields(XContentType contentType, BytesR } } }; - return XContentTestUtils - .insertIntoXContent(contentType.xContent(), xContent, insertPaths, () -> randomAsciiOfLength(random, 10), value).bytes(); + return BytesReference.bytes(XContentTestUtils + .insertIntoXContent(contentType.xContent(), xContent, insertPaths, () -> randomAsciiOfLength(random, 10), value)); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index ee9b8b3360ada..befc21eb1f697 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -303,7 +303,7 @@ private void wipeClusterSettings() throws IOException { if (mustClear) { adminClient().performRequest("PUT", "/_cluster/settings", emptyMap(), new StringEntity( - clearCommand.string(), ContentType.APPLICATION_JSON)); + Strings.toString(clearCommand), ContentType.APPLICATION_JSON)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index bea9aab3ff784..ca04c0c53d12a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -25,6 +25,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -147,7 +148,7 @@ private XContentType getContentType(Map headers, XContentType[] private BytesRef bodyAsBytesRef(Map bodyAsMap, XContentType xContentType) throws IOException { Map finalBodyAsMap = stash.replaceStashedValues(bodyAsMap); try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) { - return builder.map(finalBodyAsMap).bytes().toBytesRef(); + return BytesReference.bytes(builder.map(finalBodyAsMap)).toBytesRef(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index 245e7956595c1..3383d3bb21d04 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -22,6 +22,7 @@ import org.apache.http.client.methods.HttpHead; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -119,7 +120,7 @@ public String getBodyAsString() { .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, body)) { jsonBuilder.copyCurrentStructure(parser); } - bodyAsString = jsonBuilder.string(); + bodyAsString = Strings.toString(jsonBuilder); } catch (IOException e) { throw new UncheckedIOException("unable to convert response body to a string format", e); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/XContentTestUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/XContentTestUtilsTests.java index 4d03e8e1bb005..62857fee9addb 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/XContentTestUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/XContentTestUtilsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.test; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -73,7 +74,7 @@ public void testGetInsertPaths() throws IOException { builder.endObject(); try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes(), builder.contentType())) { + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(builder), builder.contentType())) { parser.nextToken(); List insertPaths = XContentTestUtils.getInsertPaths(parser, new Stack<>()); assertEquals(5, insertPaths.size()); @@ -90,16 +91,16 @@ public void testInsertIntoXContent() throws IOException { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); builder.endObject(); - builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList(""), - () -> "inn.er1", () -> new HashMap<>()); - builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList(""), - () -> "field1", () -> "value1"); - builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), + builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), BytesReference.bytes(builder), + Collections.singletonList(""), () -> "inn.er1", () -> new HashMap<>()); + builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), BytesReference.bytes(builder), + Collections.singletonList(""), () -> "field1", () -> "value1"); + builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), BytesReference.bytes(builder), Collections.singletonList("inn\\.er1"), () -> "inner2", () -> new HashMap<>()); - builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), + builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), BytesReference.bytes(builder), Collections.singletonList("inn\\.er1"), () -> "field2", () -> "value2"); try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes(), builder.contentType())) { + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(builder), builder.contentType())) { Map map = parser.map(); assertEquals(2, map.size()); assertEquals("value1", map.get("field1")); @@ -148,7 +149,7 @@ public void testInsertRandomXContent() throws IOException { Map resultMap; try (XContentParser parser = createParser(XContentType.JSON.xContent(), - insertRandomFields(builder.contentType(), builder.bytes(), null, random()))) { + insertRandomFields(builder.contentType(), BytesReference.bytes(builder), null, random()))) { resultMap = parser.map(); } assertEquals(5, resultMap.keySet().size()); @@ -162,7 +163,7 @@ public void testInsertRandomXContent() throws IOException { Predicate pathsToExclude = path -> path.endsWith("foo1"); try (XContentParser parser = createParser(XContentType.JSON.xContent(), - insertRandomFields(builder.contentType(), builder.bytes(), pathsToExclude, random()))) { + insertRandomFields(builder.contentType(), BytesReference.bytes(builder), pathsToExclude, random()))) { resultMap = parser.map(); } assertEquals(5, resultMap.keySet().size()); @@ -176,7 +177,7 @@ public void testInsertRandomXContent() throws IOException { pathsToExclude = path -> path.contains("foo1"); try (XContentParser parser = createParser(XContentType.JSON.xContent(), - insertRandomFields(builder.contentType(), builder.bytes(), pathsToExclude, random()))) { + insertRandomFields(builder.contentType(), BytesReference.bytes(builder), pathsToExclude, random()))) { resultMap = parser.map(); } assertEquals(5, resultMap.keySet().size()); diff --git a/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java b/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java index b111024c77ad0..705f86fbb0797 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.hamcrest; import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -83,11 +84,11 @@ public void testAssertXContentEquivalent() throws IOException { original.endObject(); try (XContentBuilder copy = JsonXContent.contentBuilder(); - XContentParser parser = createParser(original.contentType().xContent(), original.bytes())) { + XContentParser parser = createParser(original.contentType().xContent(), BytesReference.bytes(original))) { parser.nextToken(); XContentHelper.copyCurrentStructure(copy.generator(), parser); try (XContentBuilder copyShuffled = shuffleXContent(copy) ) { - assertToXContentEquivalent(original.bytes(), copyShuffled.bytes(), original.contentType()); + assertToXContentEquivalent(BytesReference.bytes(original), BytesReference.bytes(copyShuffled), original.contentType()); } } } @@ -118,7 +119,8 @@ public void testAssertXContentEquivalentErrors() throws IOException { } otherBuilder.endObject(); AssertionError error = expectThrows(AssertionError.class, - () -> assertToXContentEquivalent(builder.bytes(), otherBuilder.bytes(), builder.contentType())); + () -> assertToXContentEquivalent(BytesReference.bytes(builder), BytesReference.bytes(otherBuilder), + builder.contentType())); assertThat(error.getMessage(), containsString("f2: expected [value2] but not found")); } { @@ -146,7 +148,8 @@ public void testAssertXContentEquivalentErrors() throws IOException { } otherBuilder.endObject(); AssertionError error = expectThrows(AssertionError.class, - () -> assertToXContentEquivalent(builder.bytes(), otherBuilder.bytes(), builder.contentType())); + () -> assertToXContentEquivalent(BytesReference.bytes(builder), BytesReference.bytes(otherBuilder), + builder.contentType())); assertThat(error.getMessage(), containsString("f2: expected [value2] but was [differentValue2]")); } { @@ -178,7 +181,8 @@ public void testAssertXContentEquivalentErrors() throws IOException { otherBuilder.field("f1", "value"); otherBuilder.endObject(); AssertionError error = expectThrows(AssertionError.class, - () -> assertToXContentEquivalent(builder.bytes(), otherBuilder.bytes(), builder.contentType())); + () -> assertToXContentEquivalent(BytesReference.bytes(builder), BytesReference.bytes(otherBuilder), + builder.contentType())); assertThat(error.getMessage(), containsString("2: expected [three] but was [four]")); } { @@ -207,7 +211,8 @@ public void testAssertXContentEquivalentErrors() throws IOException { } otherBuilder.endObject(); AssertionError error = expectThrows(AssertionError.class, - () -> assertToXContentEquivalent(builder.bytes(), otherBuilder.bytes(), builder.contentType())); + () -> assertToXContentEquivalent(BytesReference.bytes(builder), BytesReference.bytes(otherBuilder), + builder.contentType())); assertThat(error.getMessage(), containsString("expected [1] more entries")); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java index c377b500ccce3..79d6d42092a85 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.test.rest.yaml; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -50,7 +51,8 @@ public void testEvaluateObjectPathEscape() throws Exception { xContentBuilder.field("field2.field3", "value2"); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.field2\\.field3"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value2")); @@ -63,7 +65,8 @@ public void testEvaluateObjectPathWithDots() throws Exception { xContentBuilder.field("field2", "value2"); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1..field2"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value2")); @@ -82,7 +85,8 @@ public void testEvaluateInteger() throws Exception { xContentBuilder.field("field2", 333); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.field2"); assertThat(object, instanceOf(Integer.class)); assertThat(object, equalTo(333)); @@ -95,7 +99,8 @@ public void testEvaluateDouble() throws Exception { xContentBuilder.field("field2", 3.55); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.field2"); assertThat(object, instanceOf(Double.class)); assertThat(object, equalTo(3.55)); @@ -108,7 +113,8 @@ public void testEvaluateArray() throws Exception { xContentBuilder.array("array1", "value1", "value2"); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.array1"); assertThat(object, instanceOf(List.class)); List list = (List) object; @@ -137,7 +143,8 @@ public void testEvaluateArrayElementObject() throws Exception { xContentBuilder.endArray(); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.array1.1.element"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value2")); @@ -164,7 +171,8 @@ public void testEvaluateObjectKeys() throws Exception { xContentBuilder.endObject(); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("metadata.templates"); assertThat(object, instanceOf(Map.class)); Map map = (Map)object; @@ -182,7 +190,8 @@ public void testEvaluateStashInPropertyName() throws Exception { xContentBuilder.endObject(); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); try { objectPath.evaluate("field1.$placeholder.element1"); fail("evaluate should have failed due to unresolved placeholder"); @@ -246,7 +255,7 @@ public void testEvaluateArrayAsRoot() throws Exception { xContentBuilder.endObject(); xContentBuilder.endArray(); ObjectPath objectPath = ObjectPath.createFromXContent( - XContentFactory.xContent(xContentBuilder.contentType()), xContentBuilder.bytes()); + XContentFactory.xContent(xContentBuilder.contentType()), BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate(""); assertThat(object, notNullValue()); assertThat(object, instanceOf(List.class)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java index 65f52352289f0..0705eb32fc294 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java @@ -119,7 +119,7 @@ public void testShuffleXContentExcludeFields() throws IOException { } } builder.endObject(); - BytesReference bytes = builder.bytes(); + BytesReference bytes = BytesReference.bytes(builder); final LinkedHashMap initialMap; try (XContentParser parser = createParser(xContentType.xContent(), bytes)) { initialMap = (LinkedHashMap)parser.mapOrdered(); From bf65cb49149ee5b203b20c965f5fc809ccfebb61 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 14 Mar 2018 20:59:47 +0100 Subject: [PATCH 29/89] Untangle Engine Constructor logic (#28245) Currently we have a fairly complicated logic in the engine constructor logic to deal with all the various ways we want to mutate the lucene index and translog we're opening. We can: 1) Create an empty index 2) Use the lucene but create a new translog 3) Use both 4) Force a new history uuid in all cases. This leads complicated code flows which makes it harder and harder to make sure we cover all the corner cases. This PR tries to take another approach. Constructing an InternalEngine always opens things as they are and all needed modifications are done by static methods directly on the directory, one at a time. --- docs/reference/indices/flush.asciidoc | 4 +- .../index/engine/CombinedDeletionPolicy.java | 28 +- .../index/engine/EngineConfig.java | 41 +- .../index/engine/EngineDiskUtils.java | 144 +++++++ .../index/engine/InternalEngine.java | 180 +++----- .../elasticsearch/index/shard/IndexShard.java | 91 ++-- .../index/shard/StoreRecovery.java | 27 +- .../org/elasticsearch/index/store/Store.java | 4 +- .../recovery/PeerRecoveryTargetService.java | 4 +- ...ryPrepareForTranslogOperationsRequest.java | 18 +- .../indices/recovery/RecoveryTarget.java | 20 +- .../recovery/RecoveryTargetHandler.java | 4 +- .../recovery/RemoteRecoveryTargetHandler.java | 4 +- .../engine/CombinedDeletionPolicyTests.java | 20 +- .../index/engine/EngineDiskUtilsTests.java | 207 ++++++++++ .../index/engine/InternalEngineTests.java | 387 ++++-------------- .../index/shard/IndexShardIT.java | 14 +- .../index/shard/IndexShardTests.java | 14 +- .../index/shard/RefreshListenersTests.java | 16 +- .../index/engine/EngineTestCase.java | 79 ++-- 20 files changed, 620 insertions(+), 686 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/engine/EngineDiskUtils.java create mode 100644 server/src/test/java/org/elasticsearch/index/engine/EngineDiskUtilsTests.java diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index e172b53f1a83c..91fac0908ef7f 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -93,12 +93,12 @@ which returns something similar to: { "commit" : { "id" : "3M3zkw2GHMo2Y4h4/KFKCg==", - "generation" : 2, + "generation" : 3, "user_data" : { "translog_uuid" : "hnOG3xFcTDeoI_kvvvOdNA", "history_uuid" : "XP7KDJGiS1a2fHYiFL5TXQ", "local_checkpoint" : "-1", - "translog_generation" : "2", + "translog_generation" : "3", "max_seq_no" : "-1", "sync_id" : "AVvFY-071siAOuFGEO9P", <1> "max_unsafe_auto_id_timestamp" : "-1" diff --git a/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java b/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java index f26a24c47a6f3..6f06c310e4cd5 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java @@ -46,16 +46,14 @@ public final class CombinedDeletionPolicy extends IndexDeletionPolicy { private final Logger logger; private final TranslogDeletionPolicy translogDeletionPolicy; - private final EngineConfig.OpenMode openMode; private final LongSupplier globalCheckpointSupplier; private final IndexCommit startingCommit; private final ObjectIntHashMap snapshottedCommits; // Number of snapshots held against each commit point. private volatile IndexCommit safeCommit; // the most recent safe commit point - its max_seqno at most the persisted global checkpoint. private volatile IndexCommit lastCommit; // the most recent commit point - CombinedDeletionPolicy(EngineConfig.OpenMode openMode, Logger logger, TranslogDeletionPolicy translogDeletionPolicy, + CombinedDeletionPolicy(Logger logger, TranslogDeletionPolicy translogDeletionPolicy, LongSupplier globalCheckpointSupplier, IndexCommit startingCommit) { - this.openMode = openMode; this.logger = logger; this.translogDeletionPolicy = translogDeletionPolicy; this.globalCheckpointSupplier = globalCheckpointSupplier; @@ -65,25 +63,11 @@ public final class CombinedDeletionPolicy extends IndexDeletionPolicy { @Override public synchronized void onInit(List commits) throws IOException { - switch (openMode) { - case CREATE_INDEX_AND_TRANSLOG: - assert startingCommit == null : "CREATE_INDEX_AND_TRANSLOG must not have starting commit; commit [" + startingCommit + "]"; - break; - case OPEN_INDEX_CREATE_TRANSLOG: - case OPEN_INDEX_AND_TRANSLOG: - assert commits.isEmpty() == false : "index is opened, but we have no commits"; - assert startingCommit != null && commits.contains(startingCommit) : "Starting commit not in the existing commit list; " - + "startingCommit [" + startingCommit + "], commit list [" + commits + "]"; - keepOnlyStartingCommitOnInit(commits); - // OPEN_INDEX_CREATE_TRANSLOG can open an index commit from other shard with a different translog history, - // We therefore should not use that index commit to update the translog deletion policy. - if (openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - updateTranslogDeletionPolicy(); - } - break; - default: - throw new IllegalArgumentException("unknown openMode [" + openMode + "]"); - } + assert commits.isEmpty() == false : "index is opened, but we have no commits"; + assert startingCommit != null && commits.contains(startingCommit) : "Starting commit not in the existing commit list; " + + "startingCommit [" + startingCommit + "], commit list [" + commits + "]"; + keepOnlyStartingCommitOnInit(commits); + updateTranslogDeletionPolicy(); } /** diff --git a/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 30743c18cfe10..352c3ba3e6280 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -75,7 +75,6 @@ public final class EngineConfig { private final List internalRefreshListener; @Nullable private final Sort indexSort; - private final boolean forceNewHistoryUUID; private final TranslogRecoveryRunner translogRecoveryRunner; @Nullable private final CircuitBreakerService circuitBreakerService; @@ -113,24 +112,20 @@ public final class EngineConfig { Property.IndexScope, Property.Dynamic); private final TranslogConfig translogConfig; - private final OpenMode openMode; /** * Creates a new {@link org.elasticsearch.index.engine.EngineConfig} */ - public EngineConfig(OpenMode openMode, ShardId shardId, String allocationId, ThreadPool threadPool, + public EngineConfig(ShardId shardId, String allocationId, ThreadPool threadPool, IndexSettings indexSettings, Engine.Warmer warmer, Store store, MergePolicy mergePolicy, Analyzer analyzer, Similarity similarity, CodecService codecService, Engine.EventListener eventListener, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, - boolean forceNewHistoryUUID, TranslogConfig translogConfig, TimeValue flushMergesAfter, + TranslogConfig translogConfig, TimeValue flushMergesAfter, List externalRefreshListener, List internalRefreshListener, Sort indexSort, TranslogRecoveryRunner translogRecoveryRunner, CircuitBreakerService circuitBreakerService, LongSupplier globalCheckpointSupplier) { - if (openMode == null) { - throw new IllegalArgumentException("openMode must not be null"); - } this.shardId = shardId; this.allocationId = allocationId; this.indexSettings = indexSettings; @@ -151,8 +146,6 @@ public EngineConfig(OpenMode openMode, ShardId shardId, String allocationId, Thr this.queryCachingPolicy = queryCachingPolicy; this.translogConfig = translogConfig; this.flushMergesAfter = flushMergesAfter; - this.openMode = openMode; - this.forceNewHistoryUUID = forceNewHistoryUUID; this.externalRefreshListener = externalRefreshListener; this.internalRefreshListener = internalRefreshListener; this.indexSort = indexSort; @@ -315,22 +308,6 @@ public TranslogConfig getTranslogConfig() { */ public TimeValue getFlushMergesAfter() { return flushMergesAfter; } - /** - * Returns the {@link OpenMode} for this engine config. - */ - public OpenMode getOpenMode() { - return openMode; - } - - - /** - * Returns true if a new history uuid must be generated. If false, a new uuid will only be generated if no existing - * one is found. - */ - public boolean getForceNewHistoryUUID() { - return forceNewHistoryUUID; - } - @FunctionalInterface public interface TranslogRecoveryRunner { int run(Engine engine, Translog.Snapshot snapshot) throws IOException; @@ -343,20 +320,6 @@ public TranslogRecoveryRunner getTranslogRecoveryRunner() { return translogRecoveryRunner; } - /** - * Engine open mode defines how the engine should be opened or in other words what the engine should expect - * to recover from. We either create a brand new engine with a new index and translog or we recover from an existing index. - * If the index exists we also have the ability open only the index and create a new transaction log which happens - * during remote recovery since we have already transferred the index files but the translog is replayed from remote. The last - * and safest option opens the lucene index as well as it's referenced transaction log for a translog recovery. - * See also {@link Engine#recoverFromTranslog()} - */ - public enum OpenMode { - CREATE_INDEX_AND_TRANSLOG, - OPEN_INDEX_CREATE_TRANSLOG, - OPEN_INDEX_AND_TRANSLOG; - } - /** * The refresh listeners to add to Lucene for externally visible refreshes */ diff --git a/server/src/main/java/org/elasticsearch/index/engine/EngineDiskUtils.java b/server/src/main/java/org/elasticsearch/index/engine/EngineDiskUtils.java new file mode 100644 index 0000000000000..f7f3aa8e9fe1d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/engine/EngineDiskUtils.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.engine; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.store.Directory; +import org.elasticsearch.Assertions; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.translog.Translog; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + + +/** + * This class contains utility methods for mutating the shard lucene index and translog as a preparation to be opened. + */ +public abstract class EngineDiskUtils { + + /** + * creates an empty lucene index and a corresponding empty translog. Any existing data will be deleted. + */ + public static void createEmpty(final Directory dir, final Path translogPath, final ShardId shardId) throws IOException { + try (IndexWriter writer = newIndexWriter(true, dir)) { + final String translogUuid = Translog.createEmptyTranslog(translogPath, SequenceNumbers.NO_OPS_PERFORMED, shardId); + final Map map = new HashMap<>(); + map.put(Translog.TRANSLOG_GENERATION_KEY, "1"); + map.put(Translog.TRANSLOG_UUID_KEY, translogUuid); + map.put(Engine.HISTORY_UUID_KEY, UUIDs.randomBase64UUID()); + map.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(SequenceNumbers.NO_OPS_PERFORMED)); + map.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(SequenceNumbers.NO_OPS_PERFORMED)); + map.put(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, "-1"); + updateCommitData(writer, map); + } + } + + + /** + * Converts an existing lucene index and marks it with a new history uuid. Also creates a new empty translog file. + * This is used to make sure no existing shard will recovery from this index using ops based recovery. + */ + public static void bootstrapNewHistoryFromLuceneIndex(final Directory dir, final Path translogPath, final ShardId shardId) + throws IOException { + try (IndexWriter writer = newIndexWriter(false, dir)) { + final Map userData = getUserData(writer); + final long maxSeqNo = Long.parseLong(userData.get(SequenceNumbers.MAX_SEQ_NO)); + final String translogUuid = Translog.createEmptyTranslog(translogPath, maxSeqNo, shardId); + final Map map = new HashMap<>(); + map.put(Translog.TRANSLOG_GENERATION_KEY, "1"); + map.put(Translog.TRANSLOG_UUID_KEY, translogUuid); + map.put(Engine.HISTORY_UUID_KEY, UUIDs.randomBase64UUID()); + map.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(maxSeqNo)); + updateCommitData(writer, map); + } + } + + /** + * Creates a new empty translog and associates it with an existing lucene index. + */ + public static void createNewTranslog(final Directory dir, final Path translogPath, long initialGlobalCheckpoint, final ShardId shardId) + throws IOException { + if (Assertions.ENABLED) { + final List existingCommits = DirectoryReader.listCommits(dir); + assert existingCommits.size() == 1 : "creating a translog translog should have one commit, commits[" + existingCommits + "]"; + SequenceNumbers.CommitInfo commitInfo = Store.loadSeqNoInfo(existingCommits.get(0)); + assert commitInfo.localCheckpoint >= initialGlobalCheckpoint : + "trying to create a shard whose local checkpoint [" + commitInfo.localCheckpoint + "] is < global checkpoint [" + + initialGlobalCheckpoint + "]"; + } + + try (IndexWriter writer = newIndexWriter(false, dir)) { + final String translogUuid = Translog.createEmptyTranslog(translogPath, initialGlobalCheckpoint, shardId); + final Map map = new HashMap<>(); + map.put(Translog.TRANSLOG_GENERATION_KEY, "1"); + map.put(Translog.TRANSLOG_UUID_KEY, translogUuid); + updateCommitData(writer, map); + } + } + + + /** + * Checks that the Lucene index contains a history uuid marker. If not, a new one is generated and committed. + */ + public static void ensureIndexHasHistoryUUID(final Directory dir) throws IOException { + try (IndexWriter writer = newIndexWriter(false, dir)) { + final Map userData = getUserData(writer); + if (userData.containsKey(Engine.HISTORY_UUID_KEY) == false) { + updateCommitData(writer, Collections.singletonMap(Engine.HISTORY_UUID_KEY, UUIDs.randomBase64UUID())); + } + } + } + + private static void updateCommitData(IndexWriter writer, Map keysToUpdate) throws IOException { + final Map userData = getUserData(writer); + userData.putAll(keysToUpdate); + writer.setLiveCommitData(userData.entrySet()); + writer.commit(); + } + + private static Map getUserData(IndexWriter writer) { + final Map userData = new HashMap<>(); + writer.getLiveCommitData().forEach(e -> userData.put(e.getKey(), e.getValue())); + return userData; + } + + private static IndexWriter newIndexWriter(final boolean create, final Directory dir) throws IOException { + IndexWriterConfig iwc = new IndexWriterConfig(null) + .setCommitOnClose(false) + // we don't want merges to happen here - we call maybe merge on the engine + // later once we stared it up otherwise we would need to wait for it here + // we also don't specify a codec here and merges should use the engines for this index + .setMergePolicy(NoMergePolicy.INSTANCE) + .setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND); + return new IndexWriter(dir, iwc); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 4ac399eac9022..49be68efcad5d 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -49,7 +49,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.LoggerInfoStream; @@ -72,6 +71,7 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ElasticsearchMergePolicy; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogCorruptedException; @@ -133,7 +133,6 @@ public class InternalEngine extends Engine { // are falling behind and when writing indexing buffer to disk is too slow. When this is 0, there is no throttling, else we throttling // incoming indexing ops to a single thread: private final AtomicInteger throttleRequestCount = new AtomicInteger(); - private final EngineConfig.OpenMode openMode; private final AtomicBoolean pendingTranslogRecovery = new AtomicBoolean(false); public static final String MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID = "max_unsafe_auto_id_timestamp"; private final AtomicLong maxUnsafeAutoIdTimestamp = new AtomicLong(-1); @@ -157,7 +156,6 @@ public InternalEngine(EngineConfig engineConfig) { final EngineConfig engineConfig, final BiFunction localCheckpointTrackerSupplier) { super(engineConfig); - openMode = engineConfig.getOpenMode(); if (engineConfig.isAutoGeneratedIDsOptimizationEnabled() == false) { maxUnsafeAutoIdTimestamp.set(Long.MAX_VALUE); } @@ -183,22 +181,15 @@ public InternalEngine(EngineConfig engineConfig) { assert translog.getGeneration() != null; this.translog = translog; final IndexCommit startingCommit = getStartingCommitPoint(); - assert openMode != EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG || startingCommit != null : - "Starting commit should be non-null; mode [" + openMode + "]; startingCommit [" + startingCommit + "]"; + assert startingCommit != null : "Starting commit should be non-null"; this.localCheckpointTracker = createLocalCheckpointTracker(localCheckpointTrackerSupplier, startingCommit); - this.combinedDeletionPolicy = new CombinedDeletionPolicy(openMode, logger, translogDeletionPolicy, + this.combinedDeletionPolicy = new CombinedDeletionPolicy(logger, translogDeletionPolicy, translog::getLastSyncedGlobalCheckpoint, startingCommit); - writer = createWriter(openMode == EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG, startingCommit); + writer = createWriter(startingCommit); updateMaxUnsafeAutoIdTimestampFromWriter(writer); - assert engineConfig.getForceNewHistoryUUID() == false - || openMode == EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG - || openMode == EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG - : "OpenMode must be either CREATE_INDEX_AND_TRANSLOG or OPEN_INDEX_CREATE_TRANSLOG if forceNewHistoryUUID; " + - "openMode [" + openMode + "], forceNewHistoryUUID [" + engineConfig.getForceNewHistoryUUID() + "]"; - historyUUID = loadOrGenerateHistoryUUID(writer, engineConfig.getForceNewHistoryUUID()); + historyUUID = loadOrGenerateHistoryUUID(writer); Objects.requireNonNull(historyUUID, "history uuid should not be null"); indexWriter = writer; - updateWriterOnOpen(); } catch (IOException | TranslogCorruptedException e) { throw new EngineCreationFailureException(shardId, "failed to create engine", e); } catch (AssertionError e) { @@ -217,7 +208,7 @@ public InternalEngine(EngineConfig engineConfig) { internalSearcherManager.addListener(versionMap); assert pendingTranslogRecovery.get() == false : "translog recovery can't be pending before we set it"; // don't allow commits until we are done with recovering - pendingTranslogRecovery.set(openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG); + pendingTranslogRecovery.set(true); for (ReferenceManager.RefreshListener listener: engineConfig.getExternalRefreshListener()) { this.externalSearcherManager.addListener(listener); } @@ -241,20 +232,10 @@ private LocalCheckpointTracker createLocalCheckpointTracker( BiFunction localCheckpointTrackerSupplier, IndexCommit startingCommit) throws IOException { final long maxSeqNo; final long localCheckpoint; - switch (openMode) { - case CREATE_INDEX_AND_TRANSLOG: - maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; - localCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; - break; - case OPEN_INDEX_AND_TRANSLOG: - case OPEN_INDEX_CREATE_TRANSLOG: - final SequenceNumbers.CommitInfo seqNoStats = store.loadSeqNoInfo(startingCommit); - maxSeqNo = seqNoStats.maxSeqNo; - localCheckpoint = seqNoStats.localCheckpoint; - logger.trace("recovered maximum sequence number [{}] and local checkpoint [{}]", maxSeqNo, localCheckpoint); - break; - default: throw new IllegalArgumentException("unknown type: " + openMode); - } + final SequenceNumbers.CommitInfo seqNoStats = Store.loadSeqNoInfo(startingCommit); + maxSeqNo = seqNoStats.maxSeqNo; + localCheckpoint = seqNoStats.localCheckpoint; + logger.trace("recovered maximum sequence number [{}] and local checkpoint [{}]", maxSeqNo, localCheckpoint); return localCheckpointTrackerSupplier.apply(maxSeqNo, localCheckpoint); } @@ -380,9 +361,6 @@ public InternalEngine recoverFromTranslog() throws IOException { flushLock.lock(); try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); - if (openMode != EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - throw new IllegalStateException("Can't recover from translog with open mode: " + openMode); - } if (pendingTranslogRecovery.get() == false) { throw new IllegalStateException("Engine has already been recovered"); } @@ -405,50 +383,31 @@ public InternalEngine recoverFromTranslog() throws IOException { @Override public void skipTranslogRecovery() { - if (openMode != EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - throw new IllegalStateException("Can't skip translog recovery with open mode: " + openMode); - } assert pendingTranslogRecovery.get() : "translogRecovery is not pending but should be"; pendingTranslogRecovery.set(false); // we are good - now we can commit } private IndexCommit getStartingCommitPoint() throws IOException { final IndexCommit startingIndexCommit; - final List existingCommits; - switch (openMode) { - case CREATE_INDEX_AND_TRANSLOG: - startingIndexCommit = null; - break; - case OPEN_INDEX_CREATE_TRANSLOG: - // Use the last commit - existingCommits = DirectoryReader.listCommits(store.directory()); - startingIndexCommit = existingCommits.get(existingCommits.size() - 1); - break; - case OPEN_INDEX_AND_TRANSLOG: - // Use the safe commit - final long lastSyncedGlobalCheckpoint = translog.getLastSyncedGlobalCheckpoint(); - final long minRetainedTranslogGen = translog.getMinFileGeneration(); - existingCommits = DirectoryReader.listCommits(store.directory()); - // We may not have a safe commit if an index was create before v6.2; and if there is a snapshotted commit whose translog - // are not retained but max_seqno is at most the global checkpoint, we may mistakenly select it as a starting commit. - // To avoid this issue, we only select index commits whose translog are fully retained. - if (engineConfig.getIndexSettings().getIndexVersionCreated().before(Version.V_6_2_0)) { - final List recoverableCommits = new ArrayList<>(); - for (IndexCommit commit : existingCommits) { - if (minRetainedTranslogGen <= Long.parseLong(commit.getUserData().get(Translog.TRANSLOG_GENERATION_KEY))) { - recoverableCommits.add(commit); - } - } - assert recoverableCommits.isEmpty() == false : "No commit point with translog found; " + - "commits [" + existingCommits + "], minRetainedTranslogGen [" + minRetainedTranslogGen + "]"; - startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(recoverableCommits, lastSyncedGlobalCheckpoint); - } else { - // TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint. - startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); + final long lastSyncedGlobalCheckpoint = translog.getLastSyncedGlobalCheckpoint(); + final long minRetainedTranslogGen = translog.getMinFileGeneration(); + final List existingCommits = DirectoryReader.listCommits(store.directory()); + // We may not have a safe commit if an index was create before v6.2; and if there is a snapshotted commit whose translog + // are not retained but max_seqno is at most the global checkpoint, we may mistakenly select it as a starting commit. + // To avoid this issue, we only select index commits whose translog are fully retained. + if (engineConfig.getIndexSettings().getIndexVersionCreated().before(Version.V_6_2_0)) { + final List recoverableCommits = new ArrayList<>(); + for (IndexCommit commit : existingCommits) { + if (minRetainedTranslogGen <= Long.parseLong(commit.getUserData().get(Translog.TRANSLOG_GENERATION_KEY))) { + recoverableCommits.add(commit); } - break; - default: - throw new IllegalArgumentException("unknown mode: " + openMode); + } + assert recoverableCommits.isEmpty() == false : "No commit point with translog found; " + + "commits [" + existingCommits + "], minRetainedTranslogGen [" + minRetainedTranslogGen + "]"; + startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(recoverableCommits, lastSyncedGlobalCheckpoint); + } else { + // TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint. + startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); } return startingIndexCommit; } @@ -469,58 +428,20 @@ private void recoverFromTranslogInternal() throws IOException { if (opsRecovered > 0) { logger.trace("flushing post recovery from translog. ops recovered [{}]. committed translog id [{}]. current id [{}]", opsRecovered, translogGeneration == null ? null : translogGeneration.translogFileGeneration, translog.currentFileGeneration()); - flush(true, true); - refresh("translog_recovery"); - } else if (translog.isCurrent(translogGeneration) == false) { - commitIndexWriter(indexWriter, translog, lastCommittedSegmentInfos.getUserData().get(Engine.SYNC_COMMIT_ID)); + commitIndexWriter(indexWriter, translog, null); refreshLastCommittedSegmentInfos(); + refresh("translog_recovery"); } - // clean up what's not needed translog.trimUnreferencedReaders(); } private Translog openTranslog(EngineConfig engineConfig, TranslogDeletionPolicy translogDeletionPolicy, LongSupplier globalCheckpointSupplier) throws IOException { - assert openMode != null; final TranslogConfig translogConfig = engineConfig.getTranslogConfig(); - final String translogUUID; - switch (openMode) { - case CREATE_INDEX_AND_TRANSLOG: - case OPEN_INDEX_CREATE_TRANSLOG: - translogUUID = - Translog.createEmptyTranslog(translogConfig.getTranslogPath(), globalCheckpointSupplier.getAsLong(), shardId); - break; - case OPEN_INDEX_AND_TRANSLOG: - translogUUID = loadTranslogUUIDFromLastCommit(); - break; - default: - throw new AssertionError("Unknown openMode " + openMode); - } + final String translogUUID = loadTranslogUUIDFromLastCommit(); + // We expect that this shard already exists, so it must already have an existing translog else something is badly wrong! return new Translog(translogConfig, translogUUID, translogDeletionPolicy, globalCheckpointSupplier); } - /** If needed, updates the metadata in the index writer to match the potentially new translog and history uuid */ - private void updateWriterOnOpen() throws IOException { - Objects.requireNonNull(historyUUID); - final Map commitUserData = commitDataAsMap(indexWriter); - boolean needsCommit = false; - if (historyUUID.equals(commitUserData.get(HISTORY_UUID_KEY)) == false) { - needsCommit = true; - } else { - assert config().getForceNewHistoryUUID() == false : "config forced a new history uuid but it didn't change"; - assert openMode != EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG : "new index but it already has an existing history uuid"; - } - if (translog.getTranslogUUID().equals(commitUserData.get(Translog.TRANSLOG_UUID_KEY)) == false) { - needsCommit = true; - } else { - assert openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG : "translog uuid didn't change but open mode is " + openMode; - } - if (needsCommit) { - commitIndexWriter(indexWriter, translog, openMode == EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG - ? commitUserData.get(SYNC_COMMIT_ID) : null); - } - } - - @Override public Translog getTranslog() { ensureOpen(); @@ -564,31 +485,20 @@ public long getWritingBytes() { */ @Nullable private String loadTranslogUUIDFromLastCommit() throws IOException { - assert openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG : - "Only reuse existing translogUUID with OPEN_INDEX_AND_TRANSLOG; openMode = [" + openMode + "]"; final Map commitUserData = store.readLastCommittedSegmentsInfo().getUserData(); - if (commitUserData.containsKey(Translog.TRANSLOG_UUID_KEY)) { - if (commitUserData.containsKey(Translog.TRANSLOG_GENERATION_KEY) == false) { - throw new IllegalStateException("commit doesn't contain translog generation id"); - } - return commitUserData.get(Translog.TRANSLOG_UUID_KEY); - } else { - return null; + if (commitUserData.containsKey(Translog.TRANSLOG_GENERATION_KEY) == false) { + throw new IllegalStateException("commit doesn't contain translog generation id"); } + return commitUserData.get(Translog.TRANSLOG_UUID_KEY); } /** - * Reads the current stored history ID from the IW commit data. Generates a new UUID if not found or if generation is forced. + * Reads the current stored history ID from the IW commit data. */ - private String loadOrGenerateHistoryUUID(final IndexWriter writer, boolean forceNew) throws IOException { - String uuid = commitDataAsMap(writer).get(HISTORY_UUID_KEY); - if (uuid == null || forceNew) { - assert - forceNew || // recovery from a local store creates an index that doesn't have yet a history_uuid - openMode == EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG || - config().getIndexSettings().getIndexVersionCreated().before(Version.V_6_0_0_rc1) : - "existing index was created after 6_0_0_rc1 but has no history uuid"; - uuid = UUIDs.randomBase64UUID(); + private String loadOrGenerateHistoryUUID(final IndexWriter writer) throws IOException { + final String uuid = commitDataAsMap(writer).get(HISTORY_UUID_KEY); + if (uuid == null) { + throw new IllegalStateException("commit doesn't contain history uuid"); } return uuid; } @@ -1530,6 +1440,8 @@ public CommitId flush(boolean force, boolean waitIfOngoing) throws EngineExcepti // we need to refresh in order to clear older version values refresh("version_table_flush", SearcherScope.INTERNAL); translog.trimUnreferencedReaders(); + } catch (AlreadyClosedException e) { + throw e; } catch (Exception e) { throw new FlushFailedEngineException(shardId, e); } @@ -1898,9 +1810,9 @@ private long loadCurrentVersionFromIndex(Term uid) throws IOException { } } - private IndexWriter createWriter(boolean create, IndexCommit startingCommit) throws IOException { + private IndexWriter createWriter(IndexCommit startingCommit) throws IOException { try { - final IndexWriterConfig iwc = getIndexWriterConfig(create, startingCommit); + final IndexWriterConfig iwc = getIndexWriterConfig(startingCommit); return createWriter(store.directory(), iwc); } catch (LockObtainFailedException ex) { logger.warn("could not lock IndexWriter", ex); @@ -1913,10 +1825,10 @@ IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOEx return new IndexWriter(directory, iwc); } - private IndexWriterConfig getIndexWriterConfig(boolean create, IndexCommit startingCommit) { + private IndexWriterConfig getIndexWriterConfig(IndexCommit startingCommit) { final IndexWriterConfig iwc = new IndexWriterConfig(engineConfig.getAnalyzer()); iwc.setCommitOnClose(false); // we by default don't commit on close - iwc.setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND); + iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND); iwc.setIndexCommit(startingCommit); iwc.setIndexDeletionPolicy(combinedDeletionPolicy); // with tests.verbose, lucene sets this up: plumb to align with filesystem stream diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 8a45246034ab1..13708add48124 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1283,44 +1283,12 @@ int runTranslogRecovery(Engine engine, Translog.Snapshot snapshot) throws IOExce return opsRecovered; } - /** creates an empty index and translog and opens the engine **/ - public void createIndexAndTranslog() throws IOException { - assert recoveryState.getRecoverySource().getType() == RecoverySource.Type.EMPTY_STORE; - assert shardRouting.primary() && shardRouting.isRelocationTarget() == false; - // note: these are set when recovering from the translog - final RecoveryState.Translog translogStats = recoveryState().getTranslog(); - translogStats.totalOperations(0); - translogStats.totalOperationsOnStart(0); - replicationTracker.updateGlobalCheckpointOnReplica(SequenceNumbers.NO_OPS_PERFORMED, "index created"); - innerOpenEngineAndTranslog(EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG, false); - } - - /** opens the engine on top of the existing lucene engine but creates an empty translog **/ - public void openIndexAndCreateTranslog(boolean forceNewHistoryUUID, long globalCheckpoint) throws IOException { - if (Assertions.ENABLED) { - assert recoveryState.getRecoverySource().getType() != RecoverySource.Type.EMPTY_STORE && - recoveryState.getRecoverySource().getType() != RecoverySource.Type.EXISTING_STORE; - SequenceNumbers.CommitInfo commitInfo = store.loadSeqNoInfo(null); - assert commitInfo.localCheckpoint >= globalCheckpoint : - "trying to create a shard whose local checkpoint [" + commitInfo.localCheckpoint + "] is < global checkpoint [" - + globalCheckpoint + "]"; - // This assertion is only guaranteed if all nodes are on 6.2+. - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_2_0)) { - final List existingCommits = DirectoryReader.listCommits(store.directory()); - assert existingCommits.size() == 1 : "Open index create translog should have one commit, commits[" + existingCommits + "]"; - } - } - replicationTracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "opening index with a new translog"); - innerOpenEngineAndTranslog(EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG, forceNewHistoryUUID); - } - /** * opens the engine on top of the existing lucene engine and translog. * Operations from the translog will be replayed to bring lucene up to date. **/ - public void openIndexAndRecoveryFromTranslog() throws IOException { - assert recoveryState.getRecoverySource().getType() == RecoverySource.Type.EXISTING_STORE; - innerOpenEngineAndTranslog(EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, false); + public void openEngineAndRecoverFromTranslog() throws IOException { + innerOpenEngineAndTranslog(); getEngine().recoverFromTranslog(); } @@ -1328,13 +1296,12 @@ public void openIndexAndRecoveryFromTranslog() throws IOException { * Opens the engine on top of the existing lucene engine and translog. * The translog is kept but its operations won't be replayed. */ - public void openIndexAndSkipTranslogRecovery() throws IOException { - assert recoveryState.getRecoverySource().getType() == RecoverySource.Type.PEER; - innerOpenEngineAndTranslog(EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, false); + public void openEngineAndSkipTranslogRecovery() throws IOException { + innerOpenEngineAndTranslog(); getEngine().skipTranslogRecovery(); } - private void innerOpenEngineAndTranslog(final EngineConfig.OpenMode openMode, final boolean forceNewHistoryUUID) throws IOException { + private void innerOpenEngineAndTranslog() throws IOException { if (state != IndexShardState.RECOVERING) { throw new IndexShardNotRecoveringException(shardId, state); } @@ -1349,29 +1316,25 @@ private void innerOpenEngineAndTranslog(final EngineConfig.OpenMode openMode, fi } recoveryState.setStage(RecoveryState.Stage.TRANSLOG); - assert openMode == EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG || assertMaxUnsafeAutoIdInCommit(); - - - final EngineConfig config = newEngineConfig(openMode, forceNewHistoryUUID); + final EngineConfig config = newEngineConfig(); // we disable deletes since we allow for operations to be executed against the shard while recovering // but we need to make sure we don't loose deletes until we are done recovering config.setEnableGcDeletes(false); - if (openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - // we have to set it before we open an engine and recover from the translog because - // acquiring a snapshot from the translog causes a sync which causes the global checkpoint to be pulled in, - // and an engine can be forced to close in ctor which also causes the global checkpoint to be pulled in. - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); - final long globalCheckpoint = Translog.readGlobalCheckpoint(translogConfig.getTranslogPath(), translogUUID); - replicationTracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "read from translog checkpoint"); - } + // we have to set it before we open an engine and recover from the translog because + // acquiring a snapshot from the translog causes a sync which causes the global checkpoint to be pulled in, + // and an engine can be forced to close in ctor which also causes the global checkpoint to be pulled in. + final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); + final long globalCheckpoint = Translog.readGlobalCheckpoint(translogConfig.getTranslogPath(), translogUUID); + replicationTracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "read from translog checkpoint"); + + assertMaxUnsafeAutoIdInCommit(); + createNewEngine(config); verifyNotClosed(); - if (openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - // We set active because we are now writing operations to the engine; this way, if we go idle after some time and become inactive, - // we still give sync'd flush a chance to run: - active.set(true); - } + // We set active because we are now writing operations to the engine; this way, if we go idle after some time and become inactive, + // we still give sync'd flush a chance to run: + active.set(true); assertSequenceNumbersInCommit(); assert recoveryState.getStage() == RecoveryState.Stage.TRANSLOG : "TRANSLOG stage expected but was: " + recoveryState.getStage(); } @@ -1388,15 +1351,9 @@ private boolean assertSequenceNumbersInCommit() throws IOException { private boolean assertMaxUnsafeAutoIdInCommit() throws IOException { final Map userData = SegmentInfos.readLatestCommit(store.directory()).getUserData(); - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_5_0) && - // TODO: LOCAL_SHARDS need to transfer this information - recoveryState().getRecoverySource().getType() != RecoverySource.Type.LOCAL_SHARDS) { - // as of 5.5.0, the engine stores the maxUnsafeAutoIdTimestamp in the commit point. - // This should have baked into the commit by the primary we recover from, regardless of the index age. - assert userData.containsKey(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID) : - "opening index which was created post 5.5.0 but " + InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID - + " is not found in commit"; - } + assert userData.containsKey(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID) : + "opening index which was created post 5.5.0 but " + InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID + + " is not found in commit"; return true; } @@ -2189,12 +2146,12 @@ private DocumentMapperForType docMapper(String type) { return mapperService.documentMapperWithAutoCreate(type); } - private EngineConfig newEngineConfig(EngineConfig.OpenMode openMode, final boolean forceNewHistoryUUID) { + private EngineConfig newEngineConfig() { Sort indexSort = indexSortSupplier.get(); - return new EngineConfig(openMode, shardId, shardRouting.allocationId().getId(), + return new EngineConfig(shardId, shardRouting.allocationId().getId(), threadPool, indexSettings, warmer, store, indexSettings.getMergePolicy(), mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, - indexCache.query(), cachingPolicy, forceNewHistoryUUID, translogConfig, + indexCache.query(), cachingPolicy, translogConfig, IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), Collections.singletonList(refreshListeners), Collections.singletonList(new RefreshMetricUpdater(refreshMetric)), diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index c3b4525924ae3..224ae60a420d1 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.engine.EngineDiskUtils; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.mapper.MapperService; @@ -389,8 +390,8 @@ private void internalRecoverFromStore(IndexShard indexShard) throws IndexShardRe recoveryState.getIndex().updateVersion(version); if (recoveryState.getRecoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) { assert indexShouldExists; - indexShard.openIndexAndCreateTranslog(true, store.loadSeqNoInfo(null).localCheckpoint); - } else { + EngineDiskUtils.bootstrapNewHistoryFromLuceneIndex(store.directory(), indexShard.shardPath().resolveTranslog(), shardId); + } else if (indexShouldExists) { // since we recover from local, just fill the files and size try { final RecoveryState.Index index = recoveryState.getIndex(); @@ -400,13 +401,11 @@ private void internalRecoverFromStore(IndexShard indexShard) throws IndexShardRe } catch (IOException e) { logger.debug("failed to list file details", e); } - if (indexShouldExists) { - indexShard.openIndexAndRecoveryFromTranslog(); - indexShard.getEngine().fillSeqNoGaps(indexShard.getPrimaryTerm()); - } else { - indexShard.createIndexAndTranslog(); - } + } else { + EngineDiskUtils.createEmpty(store.directory(), indexShard.shardPath().resolveTranslog(), shardId); } + indexShard.openEngineAndRecoverFromTranslog(); + indexShard.getEngine().fillSeqNoGaps(indexShard.getPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("post recovery from shard_store"); } catch (EngineException | IOException e) { @@ -446,16 +445,10 @@ private void restore(final IndexShard indexShard, final Repository repository, f } final IndexId indexId = repository.getRepositoryData().resolveIndexId(indexName); repository.restoreShard(indexShard, restoreSource.snapshot().getSnapshotId(), restoreSource.version(), indexId, snapshotShardId, indexShard.recoveryState()); - final Store store = indexShard.store(); - final long localCheckpoint; - store.incRef(); - try { - localCheckpoint = store.loadSeqNoInfo(null).localCheckpoint; - } finally { - store.decRef(); - } - indexShard.openIndexAndCreateTranslog(true, localCheckpoint); + EngineDiskUtils.bootstrapNewHistoryFromLuceneIndex(indexShard.store().directory(), indexShard.shardPath().resolveTranslog(), + shardId); assert indexShard.shardRouting.primary() : "only primary shards can recover from store"; + indexShard.openEngineAndRecoverFromTranslog(); indexShard.getEngine().fillSeqNoGaps(indexShard.getPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("restore done"); diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 7b73a945d6e31..be9164cec5744 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -218,8 +218,8 @@ private static SegmentInfos readSegmentsInfo(IndexCommit commit, Directory direc * @return {@link SequenceNumbers.CommitInfo} containing information about the last commit * @throws IOException if an I/O exception occurred reading the latest Lucene commit point from disk */ - public SequenceNumbers.CommitInfo loadSeqNoInfo(final IndexCommit commit) throws IOException { - final Map userData = commit != null ? commit.getUserData() : SegmentInfos.readLatestCommit(directory).getUserData(); + public static SequenceNumbers.CommitInfo loadSeqNoInfo(final IndexCommit commit) throws IOException { + final Map userData = commit.getUserData(); return SequenceNumbers.loadSeqNoInfoFromLuceneCommit(userData.entrySet()); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 57aa4cf140392..73764249ce128 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -364,7 +364,7 @@ public static long getStartingSeqNo(final Logger logger, final RecoveryTarget re final long globalCheckpoint = Translog.readGlobalCheckpoint(recoveryTarget.translogLocation(), translogUUID); final List existingCommits = DirectoryReader.listCommits(store.directory()); final IndexCommit safeCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, globalCheckpoint); - final SequenceNumbers.CommitInfo seqNoStats = store.loadSeqNoInfo(safeCommit); + final SequenceNumbers.CommitInfo seqNoStats = Store.loadSeqNoInfo(safeCommit); if (logger.isTraceEnabled()) { final StringJoiner descriptionOfExistingCommits = new StringJoiner(","); for (IndexCommit commit : existingCommits) { @@ -406,7 +406,7 @@ class PrepareForTranslogOperationsRequestHandler implements TransportRequestHand public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { - recoveryRef.target().prepareForTranslogOperations(request.createNewTranslog(), request.totalTranslogOps()); + recoveryRef.target().prepareForTranslogOperations(request.isFileBasedRecovery(), request.totalTranslogOps()); } channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java index 28df2897d9778..65ccb078c94a0 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java @@ -33,13 +33,13 @@ class RecoveryPrepareForTranslogOperationsRequest extends TransportRequest { private final long recoveryId; private final ShardId shardId; private final int totalTranslogOps; - private final boolean createNewTranslog; + private final boolean fileBasedRecovery; - RecoveryPrepareForTranslogOperationsRequest(long recoveryId, ShardId shardId, int totalTranslogOps, boolean createNewTranslog) { + RecoveryPrepareForTranslogOperationsRequest(long recoveryId, ShardId shardId, int totalTranslogOps, boolean fileBasedRecovery) { this.recoveryId = recoveryId; this.shardId = shardId; this.totalTranslogOps = totalTranslogOps; - this.createNewTranslog = createNewTranslog; + this.fileBasedRecovery = fileBasedRecovery; } RecoveryPrepareForTranslogOperationsRequest(StreamInput in) throws IOException { @@ -51,9 +51,9 @@ class RecoveryPrepareForTranslogOperationsRequest extends TransportRequest { in.readLong(); // maxUnsafeAutoIdTimestamp } if (in.getVersion().onOrAfter(Version.V_6_2_0)) { - createNewTranslog = in.readBoolean(); + fileBasedRecovery = in.readBoolean(); } else { - createNewTranslog = true; + fileBasedRecovery = true; } } @@ -70,10 +70,10 @@ public int totalTranslogOps() { } /** - * Whether or not the recover target should create a new local translog + * Whether or not the recovery is file based */ - boolean createNewTranslog() { - return createNewTranslog; + public boolean isFileBasedRecovery() { + return fileBasedRecovery; } @Override @@ -86,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); // maxUnsafeAutoIdTimestamp } if (out.getVersion().onOrAfter(Version.V_6_2_0)) { - out.writeBoolean(createNewTranslog); + out.writeBoolean(fileBasedRecovery); } } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 41df6ec73e020..1b1a2802b52bd 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -31,6 +31,7 @@ import org.apache.lucene.util.BytesRefIterator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; @@ -40,6 +41,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineDiskUtils; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -362,14 +364,9 @@ private void ensureRefCount() { /*** Implementation of {@link RecoveryTargetHandler } */ @Override - public void prepareForTranslogOperations(boolean createNewTranslog, int totalTranslogOps) throws IOException { + public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps) throws IOException { state().getTranslog().totalOperations(totalTranslogOps); - if (createNewTranslog) { - // TODO: Assigns the global checkpoint to the max_seqno of the safe commit if the index version >= 6.2 - indexShard().openIndexAndCreateTranslog(false, SequenceNumbers.UNASSIGNED_SEQ_NO); - } else { - indexShard().openIndexAndSkipTranslogRecovery(); - } + indexShard().openEngineAndSkipTranslogRecovery(); } @Override @@ -440,8 +437,15 @@ public void cleanFiles(int totalTranslogOps, Store.MetadataSnapshot sourceMetaDa // to recover from in case of a full cluster shutdown just when this code executes... renameAllTempFiles(); final Store store = store(); + store.incRef(); try { store.cleanupAndVerify("recovery CleanFilesRequestHandler", sourceMetaData); + if (indexShard.indexSettings().getIndexVersionCreated().before(Version.V_6_0_0_rc1)) { + EngineDiskUtils.ensureIndexHasHistoryUUID(store.directory()); + } + // TODO: Assign the global checkpoint to the max_seqno of the safe commit if the index version >= 6.2 + EngineDiskUtils.createNewTranslog(store.directory(), indexShard.shardPath().resolveTranslog(), + SequenceNumbers.UNASSIGNED_SEQ_NO, shardId); } catch (CorruptIndexException | IndexFormatTooNewException | IndexFormatTooOldException ex) { // this is a fatal exception at this stage. // this means we transferred files from the remote that have not be checksummed and they are @@ -465,6 +469,8 @@ public void cleanFiles(int totalTranslogOps, Store.MetadataSnapshot sourceMetaDa RecoveryFailedException rfe = new RecoveryFailedException(state(), "failed to clean after recovery", ex); fail(rfe, true); throw rfe; + } finally { + store.decRef(); } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java index 9cedfa8039a3d..4e728a72b300f 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java @@ -32,10 +32,10 @@ public interface RecoveryTargetHandler { /** * Prepares the target to receive translog operations, after all file have been copied - * @param createNewTranslog whether or not to delete the local translog on the target + * @param fileBasedRecovery whether or not this call is part of an file based recovery * @param totalTranslogOps total translog operations expected to be sent */ - void prepareForTranslogOperations(boolean createNewTranslog, int totalTranslogOps) throws IOException; + void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps) throws IOException; /** * The finalize request refreshes the engine now that new segments are available, enables garbage collection of tombstone files, and diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java index 966ed426d48b6..edf17595350c4 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java @@ -76,9 +76,9 @@ public RemoteRecoveryTargetHandler(long recoveryId, ShardId shardId, TransportSe } @Override - public void prepareForTranslogOperations(boolean createNewTranslog, int totalTranslogOps) throws IOException { + public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps) throws IOException { transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, - new RecoveryPrepareForTranslogOperationsRequest(recoveryId, shardId, totalTranslogOps, createNewTranslog), + new RecoveryPrepareForTranslogOperationsRequest(recoveryId, shardId, totalTranslogOps, fileBasedRecovery), TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java index 4588010fe9c63..67fd385955f3e 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java @@ -38,8 +38,6 @@ import java.util.concurrent.atomic.AtomicLong; import static java.util.Collections.singletonList; -import static org.elasticsearch.index.engine.EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG; -import static org.elasticsearch.index.engine.EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG; import static org.elasticsearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.doAnswer; @@ -54,8 +52,7 @@ public class CombinedDeletionPolicyTests extends ESTestCase { public void testKeepCommitsAfterGlobalCheckpoint() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy( - OPEN_INDEX_AND_TRANSLOG, logger, translogPolicy, globalCheckpoint::get, null); + CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, globalCheckpoint::get, null); final LongArrayList maxSeqNoList = new LongArrayList(); final LongArrayList translogGenList = new LongArrayList(); @@ -94,8 +91,7 @@ public void testAcquireIndexCommit() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(); final UUID translogUUID = UUID.randomUUID(); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy( - OPEN_INDEX_AND_TRANSLOG, logger, translogPolicy, globalCheckpoint::get, null); + CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, globalCheckpoint::get, null); long lastMaxSeqNo = between(1, 1000); long lastTranslogGen = between(1, 20); int safeIndex = 0; @@ -165,8 +161,7 @@ public void testLegacyIndex() throws Exception { final UUID translogUUID = UUID.randomUUID(); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy( - OPEN_INDEX_AND_TRANSLOG, logger, translogPolicy, globalCheckpoint::get, null); + CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, globalCheckpoint::get, null); long legacyTranslogGen = randomNonNegativeLong(); IndexCommit legacyCommit = mockLegacyIndexCommit(translogUUID, legacyTranslogGen); @@ -199,8 +194,7 @@ public void testLegacyIndex() throws Exception { public void testDeleteInvalidCommits() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(randomNonNegativeLong()); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy( - OPEN_INDEX_CREATE_TRANSLOG, logger, translogPolicy, globalCheckpoint::get, null); + CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, globalCheckpoint::get, null); final int invalidCommits = between(1, 10); final List commitList = new ArrayList<>(); @@ -237,8 +231,7 @@ public void testKeepOnlyStartingCommitOnInit() throws Exception { commitList.add(mockIndexCommit(randomNonNegativeLong(), translogUUID, randomNonNegativeLong())); } final IndexCommit startingCommit = randomFrom(commitList); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy( - OPEN_INDEX_AND_TRANSLOG, logger, translogPolicy, globalCheckpoint::get, startingCommit); + CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, globalCheckpoint::get, startingCommit); indexPolicy.onInit(commitList); for (IndexCommit commit : commitList) { if (commit.equals(startingCommit) == false) { @@ -256,8 +249,7 @@ public void testCheckUnreferencedCommits() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); final UUID translogUUID = UUID.randomUUID(); final TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy( - OPEN_INDEX_AND_TRANSLOG, logger, translogPolicy, globalCheckpoint::get, null); + CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, globalCheckpoint::get, null); final List commitList = new ArrayList<>(); int totalCommits = between(2, 20); long lastMaxSeqNo = between(1, 1000); diff --git a/server/src/test/java/org/elasticsearch/index/engine/EngineDiskUtilsTests.java b/server/src/test/java/org/elasticsearch/index/engine/EngineDiskUtilsTests.java new file mode 100644 index 0000000000000..c57af9b448671 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/engine/EngineDiskUtilsTests.java @@ -0,0 +1,207 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.engine; + +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.search.IndexSearcher; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.codec.CodecService; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.IndexSettingsModule; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; + +import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; + +public class EngineDiskUtilsTests extends EngineTestCase { + + + public void testHistoryUUIDIsSetIfMissing() throws IOException { + final int numDocs = randomIntBetween(0, 3); + for (int i = 0; i < numDocs; i++) { + ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null); + Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, + Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); + Engine.IndexResult index = engine.index(firstIndexRequest); + assertThat(index.getVersion(), equalTo(1L)); + } + assertVisibleCount(engine, numDocs); + engine.close(); + + IndexWriterConfig iwc = new IndexWriterConfig(null) + .setCommitOnClose(false) + // we don't want merges to happen here - we call maybe merge on the engine + // later once we stared it up otherwise we would need to wait for it here + // we also don't specify a codec here and merges should use the engines for this index + .setMergePolicy(NoMergePolicy.INSTANCE) + .setOpenMode(IndexWriterConfig.OpenMode.APPEND); + try (IndexWriter writer = new IndexWriter(store.directory(), iwc)) { + Map newCommitData = new HashMap<>(); + for (Map.Entry entry : writer.getLiveCommitData()) { + if (entry.getKey().equals(Engine.HISTORY_UUID_KEY) == false) { + newCommitData.put(entry.getKey(), entry.getValue()); + } + } + writer.setLiveCommitData(newCommitData.entrySet()); + writer.commit(); + } + + EngineDiskUtils.ensureIndexHasHistoryUUID(store.directory()); + + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() + .put(defaultSettings.getSettings()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0_beta1) + .build()); + + EngineConfig config = engine.config(); + EngineConfig newConfig = new EngineConfig( + shardId, allocationId.getId(), + threadPool, indexSettings, null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), + new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), config.getTranslogConfig(), TimeValue.timeValueMinutes(5), + config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), + new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED); + engine = new InternalEngine(newConfig); + engine.recoverFromTranslog(); + assertVisibleCount(engine, numDocs, false); + assertThat(engine.getHistoryUUID(), notNullValue()); + } + + public void testCurrentTranslogIDisCommitted() throws IOException { + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + try (Store store = createStore()) { + EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), null, null, globalCheckpoint::get); + + // create + { + EngineDiskUtils.createEmpty(store.directory(), config.getTranslogConfig().getTranslogPath(), shardId); + ParsedDocument doc = testParsedDocument(Integer.toString(0), null, testDocument(), new BytesArray("{}"), null); + Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, + Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); + + try (InternalEngine engine = createEngine(config)) { + engine.index(firstIndexRequest); + globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog()); + Map userData = engine.getLastCommittedSegmentInfos().getUserData(); + assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); + assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + } + } + // open and recover tlog + { + for (int i = 0; i < 2; i++) { + try (InternalEngine engine = new InternalEngine(config)) { + assertTrue(engine.isRecovering()); + Map userData = engine.getLastCommittedSegmentInfos().getUserData(); + if (i == 0) { + assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); + } else { + // creating an empty index will create the first translog gen and commit it + // opening the empty index will make the second translog file but not commit it + // opening the engine again (i=0) will make the third translog file, which then be committed + assertEquals("3", userData.get(Translog.TRANSLOG_GENERATION_KEY)); + } + assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + engine.recoverFromTranslog(); + userData = engine.getLastCommittedSegmentInfos().getUserData(); + assertEquals("3", userData.get(Translog.TRANSLOG_GENERATION_KEY)); + assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + } + } + } + // open index with new tlog + { + EngineDiskUtils.createNewTranslog(store.directory(), config.getTranslogConfig().getTranslogPath(), + SequenceNumbers.NO_OPS_PERFORMED, shardId); + try (InternalEngine engine = new InternalEngine(config)) { + Map userData = engine.getLastCommittedSegmentInfos().getUserData(); + assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); + assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + engine.recoverFromTranslog(); + assertEquals(2, engine.getTranslog().currentFileGeneration()); + assertEquals(0L, engine.getTranslog().uncommittedOperations()); + } + } + + // open and recover tlog with empty tlog + { + for (int i = 0; i < 2; i++) { + try (InternalEngine engine = new InternalEngine(config)) { + Map userData = engine.getLastCommittedSegmentInfos().getUserData(); + assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); + assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + engine.recoverFromTranslog(); + userData = engine.getLastCommittedSegmentInfos().getUserData(); + assertEquals("no changes - nothing to commit", "1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); + assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); + } + } + } + } + } + + public void testHistoryUUIDCanBeForced() throws IOException { + final int numDocs = randomIntBetween(0, 3); + for (int i = 0; i < numDocs; i++) { + ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null); + Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, + Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); + Engine.IndexResult index = engine.index(firstIndexRequest); + assertThat(index.getVersion(), equalTo(1L)); + } + assertVisibleCount(engine, numDocs); + final String oldHistoryUUID = engine.getHistoryUUID(); + engine.close(); + EngineConfig config = engine.config(); + EngineDiskUtils.bootstrapNewHistoryFromLuceneIndex(store.directory(), config.getTranslogConfig().getTranslogPath(), shardId); + + EngineConfig newConfig = new EngineConfig( + shardId, allocationId.getId(), + threadPool, config.getIndexSettings(), null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), + new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), config.getTranslogConfig(), TimeValue.timeValueMinutes(5), + config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), + new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED); + engine = new InternalEngine(newConfig); + engine.recoverFromTranslog(); + assertVisibleCount(engine, 0, false); + assertThat(engine.getHistoryUUID(), not(equalTo(oldHistoryUUID))); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index f0bd1cc389e51..2488ca79fe482 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -81,7 +81,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver; @@ -119,7 +118,6 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.test.IndexSettingsModule; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -133,7 +131,6 @@ import java.util.Base64; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; @@ -149,6 +146,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.function.ToLongBiFunction; @@ -641,7 +639,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { InternalEngine engine = createEngine(store, translog); engine.close(); - engine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)); + engine = new InternalEngine(engine.config()); assertTrue(engine.isRecovering()); engine.recoverFromTranslog(); Engine.Searcher searcher = wrapper.wrap(engine.acquireSearcher("test")); @@ -656,7 +654,7 @@ public void testFlushIsDisabledDuringTranslogRecovery() throws IOException { engine.index(indexForDoc(doc)); engine.close(); - engine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)); + engine = new InternalEngine(engine.config()); expectThrows(IllegalStateException.class, () -> engine.flush(true, true)); assertTrue(engine.isRecovering()); engine.recoverFromTranslog(); @@ -690,7 +688,7 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { Engine recoveringEngine = null; try { - recoveringEngine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)); + recoveringEngine = new InternalEngine(engine.config()); recoveringEngine.recoverFromTranslog(); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); @@ -718,20 +716,19 @@ public void testTranslogRecoveryDoesNotReplayIntoTranslog() throws IOException { Engine recoveringEngine = null; try { - final AtomicBoolean flushed = new AtomicBoolean(); - recoveringEngine = new InternalEngine(copy(initialEngine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)) { + final AtomicBoolean committed = new AtomicBoolean(); + recoveringEngine = new InternalEngine(initialEngine.config()) { + @Override - public CommitId flush(boolean force, boolean waitIfOngoing) throws EngineException { - assertThat(getTranslog().uncommittedOperations(), equalTo(docs)); - final CommitId commitId = super.flush(force, waitIfOngoing); - flushed.set(true); - return commitId; + protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException { + committed.set(true); + super.commitIndexWriter(writer, translog, syncId); } }; assertThat(recoveringEngine.getTranslog().uncommittedOperations(), equalTo(docs)); recoveringEngine.recoverFromTranslog(); - assertTrue(flushed.get()); + assertTrue(committed.get()); } finally { IOUtils.close(recoveringEngine); } @@ -762,7 +759,7 @@ public void testTranslogRecoveryWithMultipleGenerations() throws IOException { } } initialEngine.close(); - recoveringEngine = new InternalEngine(copy(initialEngine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)); + recoveringEngine = new InternalEngine(initialEngine.config()); recoveringEngine.recoverFromTranslog(); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), docs); @@ -1009,9 +1006,9 @@ public void testCommitAdvancesMinTranslogForRecovery() throws IOException { IOUtils.close(engine, store); final Path translogPath = createTempDir(); store = createStore(); - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); final LongSupplier globalCheckpointSupplier = () -> globalCheckpoint.get(); - engine = new InternalEngine(config(defaultSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier)); + engine = createEngine(config(defaultSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier)); ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null); engine.index(indexForDoc(doc)); boolean inSync = randomBoolean(); @@ -1021,17 +1018,17 @@ public void testCommitAdvancesMinTranslogForRecovery() throws IOException { engine.flush(); assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L)); - assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 3L : 2L)); + assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 3L : 1L)); assertThat(engine.getTranslog().getDeletionPolicy().getTranslogGenerationOfLastCommit(), equalTo(3L)); engine.flush(); assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L)); - assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 3L : 2L)); + assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 3L : 1L)); assertThat(engine.getTranslog().getDeletionPolicy().getTranslogGenerationOfLastCommit(), equalTo(3L)); engine.flush(true, true); assertThat(engine.getTranslog().currentFileGeneration(), equalTo(4L)); - assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 4L : 2L)); + assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 4L : 1L)); assertThat(engine.getTranslog().getDeletionPolicy().getTranslogGenerationOfLastCommit(), equalTo(4L)); globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); @@ -1043,7 +1040,7 @@ public void testCommitAdvancesMinTranslogForRecovery() throws IOException { public void testSyncedFlush() throws IOException { try (Store store = createStore(); - Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), null))) { + Engine engine = createEngine(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), null)) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null); engine.index(indexForDoc(doc)); @@ -1069,8 +1066,8 @@ public void testRenewSyncFlush() throws Exception { final int iters = randomIntBetween(2, 5); // run this a couple of times to get some coverage for (int i = 0; i < iters; i++) { try (Store store = createStore(); - InternalEngine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), - new LogDocMergePolicy(), null))) { + InternalEngine engine = + createEngine(config(defaultSettings, store, createTempDir(), new LogDocMergePolicy(), null))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); Engine.Index doc1 = indexForDoc(testParsedDocument("1", null, testDocumentWithTextField(), B_1, null)); engine.index(doc1); @@ -1125,7 +1122,7 @@ public void testRenewSyncFlush() throws Exception { } public void testSyncedFlushSurvivesEngineRestart() throws IOException { - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); IOUtils.close(store, engine); store = createStore(); engine = createEngine(store, primaryTranslogDir, globalCheckpoint::get); @@ -1144,12 +1141,13 @@ public void testSyncedFlushSurvivesEngineRestart() throws IOException { } else { engine.flushAndClose(); } - engine = new InternalEngine(copy(config, randomFrom(EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG))); - - if (engine.config().getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG && randomBoolean()) { - engine.recoverFromTranslog(); + if (randomBoolean()) { + EngineDiskUtils.createNewTranslog(store.directory(), config.getTranslogConfig().getTranslogPath(), + SequenceNumbers.UNASSIGNED_SEQ_NO, shardId); } - assertEquals(engine.config().getOpenMode().toString(), engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); + engine = new InternalEngine(config); + engine.recoverFromTranslog(); + assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); } public void testSyncedFlushVanishesOnReplay() throws IOException { @@ -1165,7 +1163,7 @@ public void testSyncedFlushVanishesOnReplay() throws IOException { engine.index(indexForDoc(doc)); EngineConfig config = engine.config(); engine.close(); - engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)); + engine = new InternalEngine(config); engine.recoverFromTranslog(); assertNull("Sync ID must be gone since we have a document to replay", engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID)); } @@ -1270,7 +1268,7 @@ public void testVersioningNewIndex() throws IOException { public void testForceMerge() throws IOException { try (Store store = createStore(); - Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), + Engine engine = createEngine(config(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), null))) { // use log MP here we test some behavior in ESMP int numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { @@ -2051,7 +2049,7 @@ public void testSeqNoAndCheckpoints() throws IOException { InternalEngine recoveringEngine = null; try { - recoveringEngine = new InternalEngine(copy(initialEngine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)); + recoveringEngine = new InternalEngine(initialEngine.config()); recoveringEngine.recoverFromTranslog(); assertEquals(primarySeqNo, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); @@ -2078,10 +2076,9 @@ public void testSeqNoAndCheckpoints() throws IOException { // this test writes documents to the engine while concurrently flushing/commit // and ensuring that the commit points contain the correct sequence number data public void testConcurrentWritesAndCommits() throws Exception { + List commits = new ArrayList<>(); try (Store store = createStore(); - InternalEngine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) { - final List commits = new ArrayList<>(); - + InternalEngine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) { final int numIndexingThreads = scaledRandomIntBetween(2, 4); final int numDocsPerThread = randomIntBetween(500, 1000); final CyclicBarrier barrier = new CyclicBarrier(numIndexingThreads + 1); @@ -2243,7 +2240,7 @@ public void testIndexWriterIFDInfoStream() throws IllegalAccessException, IOExce public void testEnableGcDeletes() throws Exception { try (Store store = createStore(); - Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) { + Engine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) { engine.config().setEnableGcDeletes(false); final BiFunction searcherFactory = engine::acquireSearcher; @@ -2326,7 +2323,7 @@ public void testFailStart() throws IOException { InternalEngine holder; try { holder = createEngine(store, translogPath); - } catch (EngineCreationFailureException ex) { + } catch (EngineCreationFailureException | IOException ex) { assertEquals(store.refCount(), refCount); continue; } @@ -2372,9 +2369,9 @@ public void testMissingTranslog() throws IOException { } catch (EngineCreationFailureException ex) { // expected } - // now it should be OK. - EngineConfig config = copy(config(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null), - EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG); + // when a new translog is created it should be ok + EngineDiskUtils.createNewTranslog(store.directory(), primaryTranslogDir, SequenceNumbers.UNASSIGNED_SEQ_NO, shardId); + EngineConfig config = config(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null); engine = new InternalEngine(config); } @@ -2421,21 +2418,6 @@ public void testTranslogReplayWithFailure() throws IOException { } } - private static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException { - assertVisibleCount(engine, numDocs, true); - } - - private static void assertVisibleCount(InternalEngine engine, int numDocs, boolean refresh) throws IOException { - if (refresh) { - engine.refresh("test"); - } - try (Searcher searcher = engine.acquireSearcher("test")) { - final TotalHitCountCollector collector = new TotalHitCountCollector(); - searcher.searcher().search(new MatchAllDocsQuery(), collector); - assertThat(collector.getTotalHits(), equalTo(numDocs)); - } - } - public void testTranslogCleanUpPostCommitCrash() throws Exception { IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetaData(), defaultSettings.getNodeSettings(), defaultSettings.getScopedSettings()); @@ -2449,8 +2431,9 @@ public void testTranslogCleanUpPostCommitCrash() throws Exception { try (Store store = createStore()) { AtomicBoolean throwErrorOnCommit = new AtomicBoolean(); final Path translogPath = createTempDir(); - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); final LongSupplier globalCheckpointSupplier = () -> globalCheckpoint.get(); + EngineDiskUtils.createEmpty(store.directory(), translogPath, shardId); try (InternalEngine engine = new InternalEngine(config(indexSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier)) { @@ -2463,6 +2446,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s } } }) { + engine.recoverFromTranslog(); final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc1)); globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); @@ -2495,7 +2479,8 @@ public void testSkipTranslogReplay() throws IOException { } assertVisibleCount(engine, numDocs); engine.close(); - engine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG)); + engine = new InternalEngine(engine.config()); + engine.skipTranslogRecovery(); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); assertThat(topDocs.totalHits, equalTo(0L)); @@ -2535,7 +2520,7 @@ public void testTranslogReplay() throws IOException { parser.mappingUpdate = dynamicUpdate(); engine.close(); - engine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, inSyncGlobalCheckpointSupplier)); // we need to reuse the engine config unless the parser.mappingModified won't work + engine = new InternalEngine(copy(engine.config(), inSyncGlobalCheckpointSupplier)); // we need to reuse the engine config unless the parser.mappingModified won't work engine.recoverFromTranslog(); assertVisibleCount(engine, numDocs, false); @@ -2622,10 +2607,10 @@ public void testRecoverFromForeignTranslog() throws IOException { TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE); - EngineConfig brokenConfig = new EngineConfig(EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, shardId, allocationId.getId(), + EngineConfig brokenConfig = new EngineConfig(shardId, allocationId.getId(), threadPool, config.getIndexSettings(), null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), false, translogConfig, TimeValue.timeValueMinutes(5), + IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), new NoneCircuitBreakerService(), () -> SequenceNumbers.UNASSIGNED_SEQ_NO); try { @@ -2638,94 +2623,6 @@ public void testRecoverFromForeignTranslog() throws IOException { assertVisibleCount(engine, numDocs, false); } - public void testHistoryUUIDIsSetIfMissing() throws IOException { - final int numDocs = randomIntBetween(0, 3); - for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null); - Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); - Engine.IndexResult index = engine.index(firstIndexRequest); - assertThat(index.getVersion(), equalTo(1L)); - } - assertVisibleCount(engine, numDocs); - engine.close(); - - IndexWriterConfig iwc = new IndexWriterConfig(null) - .setCommitOnClose(false) - // we don't want merges to happen here - we call maybe merge on the engine - // later once we stared it up otherwise we would need to wait for it here - // we also don't specify a codec here and merges should use the engines for this index - .setMergePolicy(NoMergePolicy.INSTANCE) - .setOpenMode(IndexWriterConfig.OpenMode.APPEND); - try (IndexWriter writer = new IndexWriter(store.directory(), iwc)) { - Map newCommitData = new HashMap<>(); - for (Map.Entry entry: writer.getLiveCommitData()) { - if (entry.getKey().equals(Engine.HISTORY_UUID_KEY) == false) { - newCommitData.put(entry.getKey(), entry.getValue()); - } - } - writer.setLiveCommitData(newCommitData.entrySet()); - writer.commit(); - } - - final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() - .put(defaultSettings.getSettings()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0_beta1) - .build()); - - EngineConfig config = engine.config(); - - EngineConfig newConfig = new EngineConfig( - randomBoolean() ? EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG : EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG, - shardId, allocationId.getId(), - threadPool, indexSettings, null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), - new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), false, config.getTranslogConfig(), TimeValue.timeValueMinutes(5), - config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), - new NoneCircuitBreakerService(), () -> SequenceNumbers.UNASSIGNED_SEQ_NO); - engine = new InternalEngine(newConfig); - if (newConfig.getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - engine.recoverFromTranslog(); - assertVisibleCount(engine, numDocs, false); - } else { - assertVisibleCount(engine, 0, false); - } - assertThat(engine.getHistoryUUID(), notNullValue()); - } - - public void testHistoryUUIDCanBeForced() throws IOException { - final int numDocs = randomIntBetween(0, 3); - for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null); - Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); - Engine.IndexResult index = engine.index(firstIndexRequest); - assertThat(index.getVersion(), equalTo(1L)); - } - assertVisibleCount(engine, numDocs); - final String oldHistoryUUID = engine.getHistoryUUID(); - engine.close(); - EngineConfig config = engine.config(); - - EngineConfig newConfig = new EngineConfig( - randomBoolean() ? EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG : EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG, - shardId, allocationId.getId(), - threadPool, config.getIndexSettings(), null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), - new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), true, config.getTranslogConfig(), TimeValue.timeValueMinutes(5), - config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), - new NoneCircuitBreakerService(), () -> SequenceNumbers.UNASSIGNED_SEQ_NO); - if (newConfig.getOpenMode() == EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG) { - Lucene.cleanLuceneIndex(store.directory()); - } - engine = new InternalEngine(newConfig); - if (newConfig.getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - engine.recoverFromTranslog(); - assertVisibleCount(engine, numDocs, false); - } else { - assertVisibleCount(engine, 0, false); - } - assertThat(engine.getHistoryUUID(), not(equalTo(oldHistoryUUID))); - } - public void testShardNotAvailableExceptionWhenEngineClosedConcurrently() throws IOException, InterruptedException { AtomicReference exception = new AtomicReference<>(); String operation = randomFrom("optimize", "refresh", "flush"); @@ -2818,74 +2715,6 @@ protected void doRun() throws Exception { } } - public void testCurrentTranslogIDisCommitted() throws IOException { - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); - try (Store store = createStore()) { - EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), null, null, globalCheckpoint::get); - - // create - { - ParsedDocument doc = testParsedDocument(Integer.toString(0), null, testDocument(), new BytesArray("{}"), null); - Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); - - try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG))){ - assertFalse(engine.isRecovering()); - engine.index(firstIndexRequest); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); - expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog()); - Map userData = engine.getLastCommittedSegmentInfos().getUserData(); - assertEquals("2", userData.get(Translog.TRANSLOG_GENERATION_KEY)); - assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - } - } - // open and recover tlog - { - for (int i = 0; i < 2; i++) { - try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG))) { - assertTrue(engine.isRecovering()); - Map userData = engine.getLastCommittedSegmentInfos().getUserData(); - if (i == 0) { - assertEquals("2", userData.get(Translog.TRANSLOG_GENERATION_KEY)); - } else { - assertEquals("4", userData.get(Translog.TRANSLOG_GENERATION_KEY)); - } - assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - engine.recoverFromTranslog(); - userData = engine.getLastCommittedSegmentInfos().getUserData(); - assertEquals("4", userData.get(Translog.TRANSLOG_GENERATION_KEY)); - assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - } - } - } - // open index with new tlog - { - try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG))) { - Map userData = engine.getLastCommittedSegmentInfos().getUserData(); - assertEquals("2", userData.get(Translog.TRANSLOG_GENERATION_KEY)); - assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog()); - assertEquals(2, engine.getTranslog().currentFileGeneration()); - assertEquals(0L, engine.getTranslog().uncommittedOperations()); - } - } - - // open and recover tlog with empty tlog - { - for (int i = 0; i < 2; i++) { - try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG))) { - Map userData = engine.getLastCommittedSegmentInfos().getUserData(); - assertEquals("2", userData.get(Translog.TRANSLOG_GENERATION_KEY)); - assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - engine.recoverFromTranslog(); - userData = engine.getLastCommittedSegmentInfos().getUserData(); - assertEquals("no changes - nothing to commit", "2", userData.get(Translog.TRANSLOG_GENERATION_KEY)); - assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - } - } - } - } - } - private static class ThrowingIndexWriter extends IndexWriter { private AtomicReference> failureToThrow = new AtomicReference<>(); @@ -3367,21 +3196,22 @@ public void testRetryConcurrently() throws InterruptedException, IOException { public void testEngineMaxTimestampIsInitialized() throws IOException { + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); final long timestamp1 = Math.abs(randomNonNegativeLong()); final Path storeDir = createTempDir(); final Path translogDir = createTempDir(); final long timestamp2 = randomNonNegativeLong(); final long maxTimestamp12 = Math.max(timestamp1, timestamp2); - try (Store store = createStore(newFSDirectory(storeDir)); - Engine engine = new InternalEngine(config(defaultSettings, store, translogDir, NoMergePolicy.INSTANCE, null))) { + final Function configSupplier = + store -> config(defaultSettings, store, translogDir, NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get); + try (Store store = createStore(newFSDirectory(storeDir)); Engine engine = createEngine(configSupplier.apply(store))) { assertEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); engine.index(appendOnlyPrimary(doc, true, timestamp1)); assertEquals(timestamp1, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); } - try (Store store = createStore(newFSDirectory(storeDir)); - Engine engine = new InternalEngine(config(defaultSettings, store, translogDir, NoMergePolicy.INSTANCE, null))) { + try (Store store = createStore(newFSDirectory(storeDir)); Engine engine = new InternalEngine(configSupplier.apply(store))) { assertEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); engine.recoverFromTranslog(); assertEquals(timestamp1, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); @@ -3389,13 +3219,16 @@ public void testEngineMaxTimestampIsInitialized() throws IOException { new BytesArray("{}".getBytes(Charset.defaultCharset())), null); engine.index(appendOnlyPrimary(doc, true, timestamp2)); assertEquals(maxTimestamp12, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + globalCheckpoint.set(1); // make sure flush cleans up commits for later. engine.flush(); } - try (Store store = createStore(newFSDirectory(storeDir)); - Engine engine = new InternalEngine( - copy(config(defaultSettings, store, translogDir, NoMergePolicy.INSTANCE, null), - randomFrom(EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG)))) { - assertEquals(maxTimestamp12, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + try (Store store = createStore(newFSDirectory(storeDir))) { + if (randomBoolean() || true) { + EngineDiskUtils.createNewTranslog(store.directory(), translogDir, SequenceNumbers.NO_OPS_PERFORMED, shardId); + } + try (Engine engine = new InternalEngine(configSupplier.apply(store))) { + assertEquals(maxTimestamp12, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + } } } @@ -3491,7 +3324,7 @@ public void afterRefresh(boolean didRefresh) throws IOException { } } }); - InternalEngine internalEngine = new InternalEngine(config); + InternalEngine internalEngine = createEngine(config); int docId = 0; final ParsedDocument doc = testParsedDocument(Integer.toString(docId), null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); @@ -3662,53 +3495,13 @@ public void testSequenceNumberAdvancesToMaxSeqOnEngineOpenOnPrimary() throws Bro IOUtils.close(initialEngine); } - try (Engine recoveringEngine = - new InternalEngine(copy(initialEngine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG))) { + try (Engine recoveringEngine = new InternalEngine(initialEngine.config())) { recoveringEngine.recoverFromTranslog(); recoveringEngine.fillSeqNoGaps(2); assertThat(recoveringEngine.getLocalCheckpointTracker().getCheckpoint(), greaterThanOrEqualTo((long) (docs - 1))); } } - public void testSequenceNumberAdvancesToMaxSeqNoOnEngineOpenOnReplica() throws IOException { - final long v = 1; - final VersionType t = VersionType.EXTERNAL; - final long ts = IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP; - final int docs = randomIntBetween(1, 32); - InternalEngine initialEngine = null; - try { - initialEngine = engine; - for (int i = 0; i < docs; i++) { - final String id = Integer.toString(i); - final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); - final Term uid = newUid(doc); - // create a gap at sequence number 3 * i + 1 - initialEngine.index(new Engine.Index(uid, doc, 3 * i, 1, v, t, REPLICA, System.nanoTime(), ts, false)); - initialEngine.delete(new Engine.Delete("type", id, uid, 3 * i + 2, 1, v, t, REPLICA, System.nanoTime())); - } - - // bake the commit with the local checkpoint stuck at 0 and gaps all along the way up to the max sequence number - assertThat(initialEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) 0)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo((long) (3 * (docs - 1) + 2))); - initialEngine.flush(true, true); - - for (int i = 0; i < docs; i++) { - final String id = Integer.toString(i); - final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); - final Term uid = newUid(doc); - initialEngine.index(new Engine.Index(uid, doc, 3 * i + 1, 1, v, t, REPLICA, System.nanoTime(), ts, false)); - } - } finally { - IOUtils.close(initialEngine); - } - - try (Engine recoveringEngine = - new InternalEngine(copy(initialEngine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG))) { - recoveringEngine.recoverFromTranslog(); - recoveringEngine.fillSeqNoGaps(1); - assertThat(recoveringEngine.getLocalCheckpointTracker().getCheckpoint(), greaterThanOrEqualTo((long) (3 * (docs - 1) + 2 - 1))); - } - } /** java docs */ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOException { @@ -3803,7 +3596,7 @@ public void testNoOps() throws IOException { final BiFunction supplier = (ms, lcp) -> new LocalCheckpointTracker( maxSeqNo, localCheckpoint); - noOpEngine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG), supplier) { + noOpEngine = new InternalEngine(engine.config(), supplier) { @Override protected long doGenerateSeqNoForOperation(Operation operation) { throw new UnsupportedOperationException(); @@ -3950,7 +3743,7 @@ public void markSeqNoAsCompleted(long seqNo) { completedSeqNos.add(seqNo); } }; - actualEngine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG), supplier); + actualEngine = new InternalEngine(engine.config(), supplier); final int operations = randomIntBetween(0, 1024); final Set expectedCompletedSeqNos = new HashSet<>(); for (int i = 0; i < operations; i++) { @@ -4013,15 +3806,14 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { boolean flushed = false; - AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); Engine recoveringEngine = null; try { assertEquals(docs - 1, engine.getLocalCheckpointTracker().getMaxSeqNo()); assertEquals(docs - 1, engine.getLocalCheckpointTracker().getCheckpoint()); assertEquals(maxSeqIDOnReplica, replicaEngine.getLocalCheckpointTracker().getMaxSeqNo()); assertEquals(checkpointOnReplica, replicaEngine.getLocalCheckpointTracker().getCheckpoint()); - recoveringEngine = new InternalEngine(copy( - replicaEngine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, globalCheckpoint::get)); + recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get)); assertEquals(numDocsOnReplica, recoveringEngine.getTranslog().uncommittedOperations()); recoveringEngine.recoverFromTranslog(); assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); @@ -4054,8 +3846,7 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { // now do it again to make sure we preserve values etc. try { - recoveringEngine = new InternalEngine( - copy(replicaEngine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, globalCheckpoint::get)); + recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get)); if (flushed) { assertEquals(0, recoveringEngine.getTranslog().uncommittedOperations()); } @@ -4234,10 +4025,11 @@ public void testKeepTranslogAfterGlobalCheckpoint() throws Exception { final Path translogPath = createTempDir(); store = createStore(); - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); final EngineConfig engineConfig = config(indexSettings, store, translogPath, NoMergePolicy.INSTANCE, null, null, () -> globalCheckpoint.get()); + EngineDiskUtils.createEmpty(store.directory(), translogPath, shardId); try (Engine engine = new InternalEngine(engineConfig) { @Override protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException { @@ -4249,6 +4041,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s super.commitIndexWriter(writer, translog, syncId); } }) { + engine.recoverFromTranslog(); int numDocs = scaledRandomIntBetween(10, 100); final String translogUUID = engine.getTranslog().getTranslogUUID(); for (int docId = 0; docId < numDocs; docId++) { @@ -4340,7 +4133,7 @@ public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, public void testAcquireIndexCommit() throws Exception { IOUtils.close(engine, store); store = createStore(); - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) { int numDocs = between(1, 20); for (int i = 0; i < numDocs; i++) { @@ -4377,10 +4170,10 @@ public void testAcquireIndexCommit() throws Exception { public void testOpenIndexAndTranslogKeepOnlySafeCommit() throws Exception { IOUtils.close(engine); - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); - final EngineConfig config = copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, globalCheckpoint::get); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + final EngineConfig config = copy(engine.config(), globalCheckpoint::get); final IndexCommit safeCommit; - try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG))) { + try (InternalEngine engine = createEngine(config)) { final int numDocs = between(5, 50); for (int i = 0; i < numDocs; i++) { index(engine, i); @@ -4394,44 +4187,16 @@ public void testOpenIndexAndTranslogKeepOnlySafeCommit() throws Exception { globalCheckpoint.set(Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.MAX_SEQ_NO))); engine.getTranslog().sync(); } - try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG))) { + try (InternalEngine engine = new InternalEngine(config)) { final List existingCommits = DirectoryReader.listCommits(engine.store.directory()); - assertThat("OPEN_INDEX_AND_TRANSLOG should keep only safe commit", existingCommits, contains(safeCommit)); - } - } - - public void testOpenIndexCreateTranslogKeepOnlyLastCommit() throws Exception { - IOUtils.close(engine); - final EngineConfig config = copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG); - final Map lastCommit; - try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG))) { - engine.skipTranslogRecovery(); - final int numDocs = between(5, 50); - for (int i = 0; i < numDocs; i++) { - index(engine, i); - if (randomBoolean()) { - engine.flush(); - } - } - final List commits = DirectoryReader.listCommits(engine.store.directory()); - lastCommit = commits.get(commits.size() - 1).getUserData(); - } - try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG))) { - final List existingCommits = DirectoryReader.listCommits(engine.store.directory()); - assertThat("OPEN_INDEX_CREATE_TRANSLOG should keep only last commit", existingCommits, hasSize(1)); - final Map userData = existingCommits.get(0).getUserData(); - assertThat(userData.get(SequenceNumbers.MAX_SEQ_NO), equalTo(lastCommit.get(SequenceNumbers.MAX_SEQ_NO))); - assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(lastCommit.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY))); - // Translog tags should be fresh. - assertThat(userData.get(Translog.TRANSLOG_UUID_KEY), not(equalTo(lastCommit.get(Translog.TRANSLOG_UUID_KEY)))); - assertThat(userData.get(Translog.TRANSLOG_GENERATION_KEY), equalTo("2")); + assertThat("safe commit should be kept", existingCommits, contains(safeCommit)); } } public void testCleanUpCommitsWhenGlobalCheckpointAdvanced() throws Exception { IOUtils.close(engine, store); store = createStore(); - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) { final int numDocs = scaledRandomIntBetween(10, 100); for (int docId = 0; docId < numDocs; docId++) { @@ -4456,7 +4221,7 @@ public void testCleanUpCommitsWhenGlobalCheckpointAdvanced() throws Exception { public void testCleanupCommitsWhenReleaseSnapshot() throws Exception { IOUtils.close(engine, store); store = createStore(); - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) { final int numDocs = scaledRandomIntBetween(10, 100); for (int docId = 0; docId < numDocs; docId++) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index 3049c3a45799c..72813cf26372d 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -101,8 +101,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.containsString; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -333,7 +333,7 @@ public void testMaybeFlush() throws Exception { assertFalse(shard.shouldPeriodicallyFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder() .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), - new ByteSizeValue(117 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + new ByteSizeValue(160 /* size of the operation + two generations header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0") .setSource("{}", XContentType.JSON).setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE).get(); assertFalse(shard.shouldPeriodicallyFlush()); @@ -407,15 +407,15 @@ public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldPeriodicallyFlush()); - final String key; final boolean flush = randomBoolean(); + final Settings settings; if (flush) { - key = "index.translog.flush_threshold_size"; + // size of the operation plus two generations of overhead. + settings = Settings.builder().put("index.translog.flush_threshold_size", "180b").build(); } else { - key = "index.translog.generation_threshold_size"; + // size of the operation plus header and footer + settings = Settings.builder().put("index.translog.generation_threshold_size", "117b").build(); } - // size of the operation plus header and footer - final Settings settings = Settings.builder().put(key, "117b").build(); client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); client().prepareIndex("test", "test", "0") .setSource("{}", XContentType.JSON) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 6bd378bdb8529..f05fdc60c5cf7 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2111,7 +2111,7 @@ public void testShardActiveDuringInternalRecovery() throws IOException { shard.prepareForIndexRecovery(); // Shard is still inactive since we haven't started recovering yet assertFalse(shard.isActive()); - shard.openIndexAndRecoveryFromTranslog(); + shard.openEngineAndRecoverFromTranslog(); // Shard should now be active since we did recover: assertTrue(shard.isActive()); closeShards(shard); @@ -2138,14 +2138,6 @@ public void testShardActiveDuringPeerRecovery() throws IOException { recoverReplica(replica, primary, (shard, discoveryNode) -> new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { - @Override - public void prepareForTranslogOperations(boolean createNewTranslog, int totalTranslogOps) throws IOException { - super.prepareForTranslogOperations(createNewTranslog, totalTranslogOps); - // Shard is still inactive since we haven't started recovering yet - assertFalse(replica.isActive()); - - } - @Override public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); @@ -2188,8 +2180,8 @@ public void testRefreshListenersDuringPeerRecovery() throws IOException { }) { // we're only checking that listeners are called when the engine is open, before there is no point @Override - public void prepareForTranslogOperations(boolean createNewTranslog, int totalTranslogOps) throws IOException { - super.prepareForTranslogOperations(createNewTranslog, totalTranslogOps); + public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps) throws IOException { + super.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps); assertListenerCalled.accept(replica); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 25b307e7d300f..1f9c5ae6df359 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.engine.EngineDiskUtils; import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; import org.elasticsearch.index.mapper.IdFieldMapper; @@ -56,9 +57,9 @@ import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.threadpool.Scheduler.Cancellable; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.Scheduler.Cancellable; import org.elasticsearch.threadpool.ThreadPool.Names; import org.junit.After; import org.junit.Before; @@ -120,13 +121,14 @@ public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; - EngineConfig config = new EngineConfig(EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG, shardId, allocationId, threadPool, - indexSettings, null, store, newMergePolicy(), iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), - eventListener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), false, translogConfig, - TimeValue.timeValueMinutes(5), Collections.singletonList(listeners), Collections.emptyList(), null, null, - new NoneCircuitBreakerService(), - () -> SequenceNumbers.UNASSIGNED_SEQ_NO); + EngineDiskUtils.createEmpty(store.directory(), translogConfig.getTranslogPath(), shardId); + EngineConfig config = new EngineConfig(shardId, allocationId, threadPool, + indexSettings, null, store, newMergePolicy(), iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), + eventListener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, + TimeValue.timeValueMinutes(5), Collections.singletonList(listeners), Collections.emptyList(), null, + (e, s) -> 0, new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED); engine = new InternalEngine(config); + engine.recoverFromTranslog(); listeners.setTranslog(engine.getTranslog()); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 117dfe430c891..c75e469f7aff4 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -31,8 +31,10 @@ import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.Sort; +import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.core.internal.io.IOUtils; @@ -89,6 +91,7 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; +import static org.hamcrest.Matchers.equalTo; public abstract class EngineTestCase extends ESTestCase { @@ -109,6 +112,21 @@ public abstract class EngineTestCase extends ESTestCase { protected Path primaryTranslogDir; protected Path replicaTranslogDir; + protected static void assertVisibleCount(Engine engine, int numDocs) throws IOException { + assertVisibleCount(engine, numDocs, true); + } + + protected static void assertVisibleCount(Engine engine, int numDocs, boolean refresh) throws IOException { + if (refresh) { + engine.refresh("test"); + } + try (Engine.Searcher searcher = engine.acquireSearcher("test")) { + final TotalHitCountCollector collector = new TotalHitCountCollector(); + searcher.searcher().search(new MatchAllDocsQuery(), collector); + assertThat(collector.getTotalHits(), equalTo(numDocs)); + } + } + @Override @Before public void setUp() throws Exception { @@ -155,24 +173,20 @@ public void setUp() throws Exception { } } - public EngineConfig copy(EngineConfig config, EngineConfig.OpenMode openMode) { - return copy(config, openMode, config.getAnalyzer()); - } - - public EngineConfig copy(EngineConfig config, EngineConfig.OpenMode openMode, LongSupplier globalCheckpointSupplier) { - return new EngineConfig(openMode, config.getShardId(), config.getAllocationId(), config.getThreadPool(), config.getIndexSettings(), + public EngineConfig copy(EngineConfig config, LongSupplier globalCheckpointSupplier) { + return new EngineConfig(config.getShardId(), config.getAllocationId(), config.getThreadPool(), config.getIndexSettings(), config.getWarmer(), config.getStore(), config.getMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), - config.getForceNewHistoryUUID(), config.getTranslogConfig(), config.getFlushMergesAfter(), + config.getTranslogConfig(), config.getFlushMergesAfter(), config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getTranslogRecoveryRunner(), config.getCircuitBreakerService(), globalCheckpointSupplier); } - public EngineConfig copy(EngineConfig config, EngineConfig.OpenMode openMode, Analyzer analyzer) { - return new EngineConfig(openMode, config.getShardId(), config.getAllocationId(), config.getThreadPool(), config.getIndexSettings(), + public EngineConfig copy(EngineConfig config, Analyzer analyzer) { + return new EngineConfig(config.getShardId(), config.getAllocationId(), config.getThreadPool(), config.getIndexSettings(), config.getWarmer(), config.getStore(), config.getMergePolicy(), analyzer, config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), - config.getForceNewHistoryUUID(), config.getTranslogConfig(), config.getFlushMergesAfter(), + config.getTranslogConfig(), config.getFlushMergesAfter(), config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getTranslogRecoveryRunner(), config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier()); } @@ -253,9 +267,9 @@ protected Translog createTranslog() throws IOException { protected Translog createTranslog(Path translogPath) throws IOException { TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE); - final String translogUUID = Translog.createEmptyTranslog(translogPath, SequenceNumbers.NO_OPS_PERFORMED, shardId); - return new Translog(translogConfig, translogUUID, createTranslogDeletionPolicy(INDEX_SETTINGS), - () -> SequenceNumbers.NO_OPS_PERFORMED); + String translogUUID = Translog.createEmptyTranslog(translogPath, SequenceNumbers.NO_OPS_PERFORMED, shardId); + return new Translog( + translogConfig, translogUUID, createTranslogDeletionPolicy(INDEX_SETTINGS), () -> SequenceNumbers.NO_OPS_PERFORMED); } protected InternalEngine createEngine(Store store, Path translogPath) throws IOException { @@ -338,10 +352,23 @@ protected InternalEngine createEngine( @Nullable Sort indexSort, @Nullable LongSupplier globalCheckpointSupplier) throws IOException { EngineConfig config = config(indexSettings, store, translogPath, mergePolicy, null, indexSort, globalCheckpointSupplier); - InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); - if (config.getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - internalEngine.recoverFromTranslog(); + return createEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); + } + + protected InternalEngine createEngine(EngineConfig config) throws IOException { + return createEngine(null, null, null, config); + } + + private InternalEngine createEngine(@Nullable IndexWriterFactory indexWriterFactory, + @Nullable BiFunction localCheckpointTrackerSupplier, + @Nullable ToLongBiFunction seqNoForOperation, + EngineConfig config) throws IOException { + final Directory directory = config.getStore().directory(); + if (Lucene.indexExists(directory) == false) { + EngineDiskUtils.createEmpty(directory, config.getTranslogConfig().getTranslogPath(), config.getShardId()); } + InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); + internalEngine.recoverFromTranslog(); return internalEngine; } @@ -394,23 +421,13 @@ protected long doGenerateSeqNoForOperation(final Operation operation) { public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, ReferenceManager.RefreshListener refreshListener) { - return config(indexSettings, store, translogPath, mergePolicy, refreshListener, null, () -> SequenceNumbers.UNASSIGNED_SEQ_NO); + return config(indexSettings, store, translogPath, mergePolicy, refreshListener, null, () -> SequenceNumbers.NO_OPS_PERFORMED); } public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, ReferenceManager.RefreshListener refreshListener, Sort indexSort, LongSupplier globalCheckpointSupplier) { IndexWriterConfig iwc = newIndexWriterConfig(); TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); - final EngineConfig.OpenMode openMode; - try { - if (Lucene.indexExists(store.directory()) == false) { - openMode = EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG; - } else { - openMode = EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG; - } - } catch (IOException e) { - throw new ElasticsearchException("can't find index?", e); - } Engine.EventListener listener = new Engine.EventListener() { @Override public void onFailedEngine(String reason, @Nullable Exception e) { @@ -421,14 +438,14 @@ public void onFailedEngine(String reason, @Nullable Exception e) { indexSettings.getSettings())); final List refreshListenerList = refreshListener == null ? emptyList() : Collections.singletonList(refreshListener); - EngineConfig config = new EngineConfig(openMode, shardId, allocationId.getId(), threadPool, indexSettings, null, store, + EngineConfig config = new EngineConfig(shardId, allocationId.getId(), threadPool, indexSettings, null, store, mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), listener, - IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), false, translogConfig, + IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), refreshListenerList, Collections.emptyList(), indexSort, handler, new NoneCircuitBreakerService(), globalCheckpointSupplier == null ? - new ReplicationTracker(shardId, allocationId.getId(), indexSettings, - SequenceNumbers.UNASSIGNED_SEQ_NO) : globalCheckpointSupplier); + new ReplicationTracker(shardId, allocationId.getId(), indexSettings, SequenceNumbers.NO_OPS_PERFORMED) : + globalCheckpointSupplier); return config; } From 3d81497f2523f03d5d399a973cab28e49ec63b0b Mon Sep 17 00:00:00 2001 From: olcbean <26058559+olcbean@users.noreply.github.com> Date: Wed, 14 Mar 2018 21:37:50 +0100 Subject: [PATCH 30/89] REST: Clear Indices Cache API remove deprecated url params (#29068) By the time the master branch is released the deprecated url parameters in the `/_cache/clear` API will have been deprecated for a couple of minor releases. Since master will be the next major release we are fine with removing these parameters. --- .../migration/migrate_7_0/indices.asciidoc | 11 +++++++- .../api/indices.clear_cache.json | 8 ------ .../test/indices.clear_cache/10_basic.yml | 26 ------------------- .../indices/RestClearIndicesCacheAction.java | 6 ++--- 4 files changed, 13 insertions(+), 38 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/indices.asciidoc b/docs/reference/migration/migrate_7_0/indices.asciidoc index 16e437b4156e8..db0c0ede466d0 100644 --- a/docs/reference/migration/migrate_7_0/indices.asciidoc +++ b/docs/reference/migration/migrate_7_0/indices.asciidoc @@ -46,7 +46,7 @@ shards the index has. In order to maintain the exact same distribution as a pre `index.number_of_routing_shards` must be set to the `index.number_of_shards` at index creation time. Note: if the number of routing shards equals the number of shards `_split` operations are not supported. -==== Skipped background refresh on search idle shards. +==== Skipped background refresh on search idle shards Shards belonging to an index that does not have an explicit `index.refresh_interval` configured will no longer refresh in the background @@ -55,3 +55,12 @@ traffic for `index.search.idle.after` seconds (defaults to `30s`). Searches that access a search idle shard will be "parked" until the next refresh happens. Indexing requests with `wait_for_refresh` will also trigger a background refresh. + +==== Remove deprecated url parameters for Clear Indices Cache API + +The following previously deprecated url parameter have been removed: + +* `filter` - use `query` instead +* `filter_cache` - use `query` instead +* `request_cache` - use `request` instead +* `field_data` - use `fielddata` instead diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json index 1523c722da31d..d89d90b25d571 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json @@ -12,10 +12,6 @@ } }, "params": { - "field_data": { - "type" : "boolean", - "description" : "Clear field data. This is deprecated. Prefer `fielddata`." - }, "fielddata": { "type" : "boolean", "description" : "Clear field data" @@ -50,10 +46,6 @@ "type" : "boolean", "description" : "Clear the recycler cache" }, - "request_cache": { - "type" : "boolean", - "description" : "Clear request cache" - }, "request": { "type" : "boolean", "description" : "Clear request cache" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml index b5e98949f03b4..e1c4cac866e3b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml @@ -13,32 +13,6 @@ indices.clear_cache: request: false ---- -"clear_cache with request_cache set to false": - - skip: - version: " - 5.3.99" - reason: request_cache was deprecated in 5.4.0 - features: "warnings" - - - do: - warnings: - - 'Deprecated field [request_cache] used, expected [request] instead' - indices.clear_cache: - request_cache: false - ---- -"clear_cache with field_data set to true": - - skip: - version: " - 6.2.99" - reason: field_data was deprecated in 6.3.0 - features: "warnings" - - - do: - warnings: - - 'Deprecated field [field_data] used, expected [fielddata] instead' - indices.clear_cache: - field_data: true - --- "clear_cache with fielddata set to true": - skip: diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java index d0ec01dc552e4..d9b493ba1f50d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java @@ -99,9 +99,9 @@ public static ClearIndicesCacheRequest fromRequest(final RestRequest request, Cl } public static class Fields { - public static final ParseField QUERY = new ParseField("query", "filter", "filter_cache"); - public static final ParseField REQUEST = new ParseField("request", "request_cache"); - public static final ParseField FIELDDATA = new ParseField("fielddata", "field_data"); + public static final ParseField QUERY = new ParseField("query"); + public static final ParseField REQUEST = new ParseField("request"); + public static final ParseField FIELDDATA = new ParseField("fielddata"); public static final ParseField FIELDS = new ParseField("fields"); } From aa017703028874dbef7642670da6de99338ecd14 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 14 Mar 2018 13:38:24 -0700 Subject: [PATCH 31/89] [DOCS] Add monitoring upgrade details (#29041) --- docs/reference/upgrade/upgrade-node.asciidoc | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/reference/upgrade/upgrade-node.asciidoc b/docs/reference/upgrade/upgrade-node.asciidoc index db9d352e83184..c97b84ef67004 100644 --- a/docs/reference/upgrade/upgrade-node.asciidoc +++ b/docs/reference/upgrade/upgrade-node.asciidoc @@ -16,8 +16,15 @@ To upgrade using a zip or compressed tarball: .. Set `path.data` in `config/elasticsearch.yml` to point to your external data directory. If you are not using an external `data` directory, copy - your old data directory over to the new installation. + your old data directory over to the new installation. + ++ +-- +IMPORTANT: If you use {monitoring}, re-use the data directory when you upgrade +{es}. Monitoring identifies unique {es} nodes by using the persistent UUID, which +is stored in the data directory. + +-- .. Set `path.logs` in `config/elasticsearch.yml` to point to the location where you want to store your logs. If you do not specify this setting, - logs are stored in the directory you extracted the archive to. \ No newline at end of file + logs are stored in the directory you extracted the archive to. From ae912cbde4ceee4f2d4f33e33364241c18c1c266 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 14 Mar 2018 22:02:06 +0100 Subject: [PATCH 32/89] [Docs] Fix Java Api index administration usage (#28260) The Java API documentation for index administration currenty is wrong because the PutMappingRequestBuilder#setSource(Object... source) an CreateIndexRequestBuilder#addMapping(String type, Object... source) methods delegate to methods that check that the input arguments are valid key/value pairs. This changes the docs so the java api code examples are included from documentation integration tests so we detect compile and runtime issues earlier. Closes #28131 --- .../admin/indices/put-mapping.asciidoc | 42 ++---------- .../documentation/IndicesDocumentationIT.java | 67 ++++++++++++++++++- 2 files changed, 71 insertions(+), 38 deletions(-) diff --git a/docs/java-api/admin/indices/put-mapping.asciidoc b/docs/java-api/admin/indices/put-mapping.asciidoc index fa3e72582eec5..3e931dfd7b7e7 100644 --- a/docs/java-api/admin/indices/put-mapping.asciidoc +++ b/docs/java-api/admin/indices/put-mapping.asciidoc @@ -1,4 +1,5 @@ [[java-admin-indices-put-mapping]] + ==== Put Mapping The PUT mapping API allows you to add a new type while creating an index: @@ -13,32 +14,9 @@ include-tagged::{client-tests}/IndicesDocumentationIT.java[index-with-mapping] The PUT mapping API also allows to add a new type to an existing index: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -client.admin().indices().preparePutMapping("twitter") <1> - .setType("user") <2> - .setSource("{\n" + <3> - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}") - .get(); - -// You can also provide the type in the source document -client.admin().indices().preparePutMapping("twitter") - .setType("user") - .setSource("{\n" + - " \"user\":{\n" + <4> - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}") - .get(); +include-tagged::{client-tests}/IndicesDocumentationIT.java[putMapping-request-source] -------------------------------------------------- <1> Puts a mapping on existing index called `twitter` <2> Adds a `user` mapping type. @@ -47,20 +25,12 @@ client.admin().indices().preparePutMapping("twitter") You can use the same API to update an existing mapping: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -client.admin().indices().preparePutMapping("twitter") <1> - .setType("user") <2> - .setSource("{\n" + <3> - " \"properties\": {\n" + - " \"user_name\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}") - .get(); +include-tagged::{client-tests}/IndicesDocumentationIT.java[putMapping-request-source-append] -------------------------------------------------- <1> Puts a mapping on existing index called `twitter` <2> Updates the `user` mapping type. <3> This `user` has now a new field `user_name` +:base-dir!: diff --git a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java index e52b03082254f..064702170d5bb 100644 --- a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java +++ b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java @@ -19,13 +19,32 @@ package org.elasticsearch.client.documentation; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESIntegTestCase; +/** + * This class is used to generate the Java indices administration documentation. + * You need to wrap your code between two tags like: + * // tag::example[] + * // end::example[] + * + * Where example is your tag name. + * + * Then in the documentation, you can extract what is between tag and end tags + * with ["source","java",subs="attributes,callouts,macros"] + * -------------------------------------------------- + * include-tagged::{client-tests}/IndicesDocumentationIT.java[your-example-tag-here] + * -------------------------------------------------- + */ public class IndicesDocumentationIT extends ESIntegTestCase { - public void createMappings() { + /** + * This test method is used to generate the Put Mapping Java Indices API documentation + * at "docs/java-api/admin/indices/put-mapping.asciidoc" so the documentation gets tested + * so that it compiles and runs without throwing errors at runtime. + */ + public void testPutMappingDocumentation() throws Exception { Client client = client(); // tag::index-with-mapping @@ -39,6 +58,50 @@ public void createMappings() { "}") .get(); // end::index-with-mapping + + // we need to delete in order to create a fresh new index with another type + client.admin().indices().prepareDelete("twitter").get(); + client.admin().indices().prepareCreate("twitter").get(); + + // tag::putMapping-request-source + client.admin().indices().preparePutMapping("twitter") // <1> + .setType("user") // <2> + .setSource("{\n" + // <3> + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + "}", XContentType.JSON) + .get(); + + // You can also provide the type in the source document + client.admin().indices().preparePutMapping("twitter") + .setType("user") + .setSource("{\n" + + " \"user\":{\n" + // <4> + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}", XContentType.JSON) + .get(); + // end::putMapping-request-source + + // tag::putMapping-request-source-append + client.admin().indices().preparePutMapping("twitter") // <1> + .setType("user") // <2> + .setSource("{\n" + // <3> + " \"properties\": {\n" + + " \"user_name\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + "}", XContentType.JSON) + .get(); + // end::putMapping-request-source-append } } From 84252575933c4b1bad2b3b5f5cd5f38199ee36cc Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 14 Mar 2018 16:34:40 -0600 Subject: [PATCH 33/89] [TEST] Fix issue parsing response out of order When parsing GetResponse it was possible that the equality check failed because items in the map were in a different order (in the `.equals` implementation). --- .../java/org/elasticsearch/action/get/GetResponseTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java b/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java index d607a473b9add..ee4be1a5396ce 100644 --- a/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java @@ -81,7 +81,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws parsedGetResponse = GetResponse.fromXContent(parser); assertNull(parser.nextToken()); } - assertEquals(expectedGetResponse, parsedGetResponse); + assertEquals(expectedGetResponse.getSourceAsMap(), parsedGetResponse.getSourceAsMap()); //print the parsed object out and test that the output is the same as the original output BytesReference finalBytes = toXContent(parsedGetResponse, xContentType, humanReadable); assertToXContentEquivalent(originalBytes, finalBytes, xContentType); From 0cc1ffdf206eb88df51c3a0521972f24f67337fc Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Wed, 14 Mar 2018 16:19:04 -0700 Subject: [PATCH 34/89] Improve error message for installing plugin (#28298) Provide more actionable error message when installing an offline plugin in the plugins directory, and the `plugins` directory for the node contains plugin distribution. Closes #27401 --- .../plugins/InstallPluginCommand.java | 26 +++++++++++++------ .../plugins/InstallPluginCommandTests.java | 2 +- .../elasticsearch/plugins/PluginsService.java | 2 +- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index b7f201b70aa46..44043f1c68545 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -53,6 +53,7 @@ import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; @@ -218,17 +219,17 @@ void execute(Terminal terminal, String pluginId, boolean isBatch, Environment en throw new UserException(ExitCodes.USAGE, "plugin id is required"); } - Path pluginZip = download(terminal, pluginId, env.tmpFile()); + Path pluginZip = download(terminal, pluginId, env.tmpFile(), env.pluginsFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile()); install(terminal, isBatch, extractedZip, env); } /** Downloads the plugin and returns the file it was downloaded to. */ - private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Exception { + private Path download(Terminal terminal, String pluginId, Path tmpDir, Path pluginsDir) throws Exception { if (OFFICIAL_PLUGINS.contains(pluginId)) { final String url = getElasticUrl(terminal, getStagingHash(), Version.CURRENT, pluginId, Platforms.PLATFORM_NAME); terminal.println("-> Downloading " + pluginId + " from elastic"); - return downloadZipAndChecksum(terminal, url, tmpDir, false); + return downloadZipAndChecksum(terminal, url, tmpDir, pluginsDir, false); } // now try as maven coordinates, a valid URL would only have a colon and slash @@ -236,7 +237,7 @@ private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Ex if (coordinates.length == 3 && pluginId.contains("/") == false && pluginId.startsWith("file:") == false) { String mavenUrl = getMavenUrl(terminal, coordinates, Platforms.PLATFORM_NAME); terminal.println("-> Downloading " + pluginId + " from maven central"); - return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, true); + return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, pluginsDir, true); } // fall back to plain old URL @@ -250,7 +251,7 @@ private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Ex throw new UserException(ExitCodes.USAGE, msg); } terminal.println("-> Downloading " + URLDecoder.decode(pluginId, "UTF-8")); - return downloadZip(terminal, pluginId, tmpDir); + return downloadZip(terminal, pluginId, tmpDir, pluginsDir); } // pkg private so tests can override @@ -324,9 +325,17 @@ private List checkMisspelledPlugin(String pluginId) { /** Downloads a zip from the url, into a temp file under the given temp dir. */ // pkg private for tests @SuppressForbidden(reason = "We use getInputStream to download plugins") - Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException { + Path downloadZip(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir) throws IOException { terminal.println(VERBOSE, "Retrieving zip from " + urlString); URL url = new URL(urlString); + if (url.getProtocol().equals("file")) { + Path pluginsFile = Paths.get(url.getFile()); + if (pluginsFile.startsWith(pluginsDir)) { + throw new IllegalStateException("Installation failed! " + + "Make sure the plugins directory [" + pluginsDir + "] can not contain the plugin distribution [" + + pluginsFile + "]; move the distribution to an alternate location!"); + } + } Path zip = Files.createTempFile(tmpDir, null, ".zip"); URLConnection urlConnection = url.openConnection(); urlConnection.addRequestProperty("User-Agent", "elasticsearch-plugin-installer"); @@ -375,8 +384,9 @@ public void onProgress(int percent) { /** Downloads a zip from the url, as well as a SHA512 (or SHA1) checksum, and checks the checksum. */ // pkg private for tests @SuppressForbidden(reason = "We use openStream to download plugins") - private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, boolean allowSha1) throws Exception { - Path zip = downloadZip(terminal, urlString, tmpDir); + private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir, boolean allowSha1) + throws Exception { + Path zip = downloadZip(terminal, urlString, tmpDir, pluginsDir); pathsToDeleteOnShutdown.add(zip); String checksumUrlString = urlString + ".sha512"; URL checksumUrl = openUrl(checksumUrlString); diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index d799cb0407f58..0735c579a255f 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -981,7 +981,7 @@ void assertInstallPluginFromUrl(String pluginId, String name, String url, String Path pluginZip = createPlugin(name, pluginDir); InstallPluginCommand command = new InstallPluginCommand() { @Override - Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException { + Path downloadZip(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir) throws IOException { assertEquals(url, urlString); Path downloadedPath = tmpDir.resolve("downloaded.zip"); Files.copy(pluginZip, downloadedPath); diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 4514691e4bec4..cca85d28aa137 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -328,7 +328,7 @@ public String name() { public Collection bundles() { return bundles; } - + } /** From 85933161d470691962aca1f327600a51811d5070 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 15 Mar 2018 11:49:45 +0100 Subject: [PATCH 35/89] Mute failing GetResultTests and DocumentFieldTests --- .../java/org/elasticsearch/index/get/DocumentFieldTests.java | 1 + .../test/java/org/elasticsearch/index/get/GetResultTests.java | 2 ++ 2 files changed, 3 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java index 9d581054f46b8..d3c8af0d0f70e 100644 --- a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java @@ -55,6 +55,7 @@ public void testEqualsAndHashcode() { DocumentFieldTests::mutateDocumentField); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomDocumentField(xContentType); diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java index a38d183299cdd..18b14ac4b0506 100644 --- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -49,6 +49,7 @@ public class GetResultTests extends ESTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); @@ -86,6 +87,7 @@ public void testToXContent() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContentEmbedded() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); From 18d848f218c6457c09d0daef6b65e4e939537743 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 15 Mar 2018 16:39:02 +0100 Subject: [PATCH 36/89] Reenable LiveVersionMapTests.testRamBytesUsed on Java 9. (#29063) I also had to make the test more lenient. This is due to the fact that Lucene's RamUsageTester was changed in order not to reflect `java.*` classes and the way that it estimates ram usage of maps is by assuming it has similar memory usage to an `Object[]` array that stores all keys and values. The implementation in `LiveVersionMap` tries to be slightly more realistic by taking the load factor and linked lists into account, so it usually gives a higher estimate which happens to be closer to reality. Closes #22548 --- .../index/engine/LiveVersionMapTests.java | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index 8bfe256fe0b8a..8c5973e8750fd 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -21,10 +21,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.Constants; import org.apache.lucene.util.RamUsageTester; import org.apache.lucene.util.TestUtil; -import org.elasticsearch.Assertions; -import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.test.ESTestCase; @@ -43,7 +42,6 @@ public class LiveVersionMapTests extends ESTestCase { public void testRamBytesUsed() throws Exception { - assumeTrue("Test disabled for JDK 9", JavaVersion.current().compareTo(JavaVersion.parse("9")) < 0); LiveVersionMap map = new LiveVersionMap(); for (int i = 0; i < 100000; ++i) { BytesRefBuilder uid = new BytesRefBuilder(); @@ -72,8 +70,23 @@ public void testRamBytesUsed() throws Exception { } actualRamBytesUsed = RamUsageTester.sizeOf(map); estimatedRamBytesUsed = map.ramBytesUsed(); - // less than 25% off - assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, actualRamBytesUsed / 4); + long tolerance; + if (Constants.JRE_IS_MINIMUM_JAVA9) { + // With Java 9, RamUsageTester computes the memory usage of maps as + // the memory usage of an array that would contain exactly all keys + // and values. This is an under-estimation of the actual memory + // usage since it ignores the impact of the load factor and of the + // linked list/tree that is used to resolve collisions. So we use a + // bigger tolerance. + // less than 50% off + tolerance = actualRamBytesUsed / 2; + } else { + // Java 8 is more accurate by doing reflection into the actual JDK classes + // so we give it a lower error bound. + // less than 25% off + tolerance = actualRamBytesUsed / 4; + } + assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, tolerance); } private BytesRef uid(String string) { From 312ccc05d5721027a1a9ee4b7c9c0fafe9c9735e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 15 Mar 2018 16:42:26 +0100 Subject: [PATCH 37/89] [Tests] Fix GetResultTests and DocumentFieldTests failures (#29083) Changes made in #28972 seems to have changed some assumptions about how SMILE and CBOR write byte[] values and how this is tested. This changes the generation of the randomized DocumentField values back to BytesArray while expecting the JSON and YAML deserialisation to produce Base64 encoded strings and SMILE and CBOR to parse back BytesArray instances. Closes #29080 --- .../org/elasticsearch/index/get/DocumentFieldTests.java | 1 - .../java/org/elasticsearch/index/get/GetResultTests.java | 2 -- .../main/java/org/elasticsearch/test/RandomObjects.java | 8 +++++--- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java index d3c8af0d0f70e..9d581054f46b8 100644 --- a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java @@ -55,7 +55,6 @@ public void testEqualsAndHashcode() { DocumentFieldTests::mutateDocumentField); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomDocumentField(xContentType); diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java index 18b14ac4b0506..a38d183299cdd 100644 --- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -49,7 +49,6 @@ public class GetResultTests extends ESTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); @@ -87,7 +86,6 @@ public void testToXContent() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContentEmbedded() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index 6cdd3ac7796dc..a509645495858 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -21,10 +21,12 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo.Failure; import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ToXContent; @@ -127,14 +129,14 @@ public static Tuple, List> randomStoredFieldValues(Random r break; case 8: byte[] randomBytes = RandomStrings.randomUnicodeOfLengthBetween(random, 10, 50).getBytes(StandardCharsets.UTF_8); + BytesArray randomBytesArray = new BytesArray(randomBytes); + originalValues.add(randomBytesArray); if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { //JSON and YAML write the base64 format expectedParsedValues.add(Base64.getEncoder().encodeToString(randomBytes)); - originalValues.add(Base64.getEncoder().encodeToString(randomBytes)); } else { //SMILE and CBOR write the original bytes as they support binary format - expectedParsedValues.add(randomBytes); - originalValues.add(randomBytes); + expectedParsedValues.add(randomBytesArray); } break; default: From 404e776a4545793cba9a54b94bc8e58c73a0438e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 15 Mar 2018 16:43:56 +0100 Subject: [PATCH 38/89] Validate regular expressions in dynamic templates. (#29013) Today you would only get these errors at index time. Relates #24749 --- .../index/mapper/DynamicTemplate.java | 19 ++++++++++++++++++- .../index/mapper/DynamicTemplateTests.java | 13 +++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java index 8da1915b8ca56..71a2cdb32f9df 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java @@ -216,7 +216,24 @@ public static DynamicTemplate parse(String name, Map conf, } } } - return new DynamicTemplate(name, pathMatch, pathUnmatch, match, unmatch, xcontentFieldType, MatchType.fromString(matchPattern), mapping); + + final MatchType matchType = MatchType.fromString(matchPattern); + + if (indexVersionCreated.onOrAfter(Version.V_6_3_0)) { + // Validate that the pattern + for (String regex : new String[] { pathMatch, match, pathUnmatch, unmatch }) { + if (regex == null) { + continue; + } + try { + matchType.matches(regex, ""); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Pattern [" + regex + "] of type [" + matchType + "] is invalid. Cannot create dynamic template [" + name + "].", e); + } + } + } + + return new DynamicTemplate(name, pathMatch, pathUnmatch, match, unmatch, xcontentFieldType, matchType, mapping); } private final String name; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index 562d54a92babd..f48603d30515f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -62,6 +62,19 @@ public void testParseUnknownMatchType() { e.getMessage()); } + public void testParseInvalidRegex() { + for (String param : new String[] { "path_match", "match", "path_unmatch", "unmatch" }) { + Map templateDef = new HashMap<>(); + templateDef.put("match", "foo"); + templateDef.put(param, "*a"); + templateDef.put("match_pattern", "regex"); + templateDef.put("mapping", Collections.singletonMap("store", true)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> DynamicTemplate.parse("my_template", templateDef, Version.V_6_3_0)); + assertEquals("Pattern [*a] of type [regex] is invalid. Cannot create dynamic template [my_template].", e.getMessage()); + } + } + public void testMatchAllTemplate() { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "*"); From cf60e93a21ec5cef81b06c9d80e37535192f1f29 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 15 Mar 2018 12:10:30 -0400 Subject: [PATCH 39/89] Docs: HighLevelRestClient#exists (#29073) Docs: HighLevelRestClient#exists Add documentation for `HighLevelRestClient#exists`. Relates to #28389 --- .../documentation/CRUDDocumentationIT.java | 43 +++++++++++++ .../high-level/document/exists.asciidoc | 60 +++++++++++++++++++ .../high-level/document/get.asciidoc | 1 + .../high-level/supported-apis.asciidoc | 1 + 4 files changed, 105 insertions(+) create mode 100644 docs/java-rest/high-level/document/exists.asciidoc diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 95e5364756424..a12bd48f22242 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -932,6 +932,49 @@ public void onFailure(Exception e) { } } + public void testExists() throws Exception { + RestHighLevelClient client = highLevelClient(); + // tag::exists-request + GetRequest getRequest = new GetRequest( + "posts", // <1> + "doc", // <2> + "1"); // <3> + getRequest.fetchSourceContext(new FetchSourceContext(false)); // <4> + getRequest.storedFields("_none_"); // <5> + // end::exists-request + { + // tag::exists-execute + boolean exists = client.exists(getRequest); + // end::exists-execute + assertFalse(exists); + } + { + // tag::exists-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(Boolean exists) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::exists-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::exists-execute-async + client.existsAsync(getRequest, listener); // <1> + // end::exists-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testBulkProcessor() throws InterruptedException { RestHighLevelClient client = highLevelClient(); { diff --git a/docs/java-rest/high-level/document/exists.asciidoc b/docs/java-rest/high-level/document/exists.asciidoc new file mode 100644 index 0000000000000..d14c9fdd66a05 --- /dev/null +++ b/docs/java-rest/high-level/document/exists.asciidoc @@ -0,0 +1,60 @@ +[[java-rest-high-document-exists]] +=== Exists API + +The exists API returns `true` if a document exists, and `false` otherwise. + +[[java-rest-high-document-exists-request]] +==== Exists Request + +It uses `GetRequest` just like the <>. +All of its <> +are supported. Since `exists()` only returns `true` or `false`, we recommend +turning off fetching `_source` and any stored fields so the request is +slightly lighter: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-request] +-------------------------------------------------- +<1> Index +<2> Type +<3> Document id +<4> Disable fetching `_source`. +<5> Disable fetching stored fields. + +[[java-rest-high-document-exists-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute] +-------------------------------------------------- + +[[java-rest-high-document-exists-async]] +==== Asynchronous Execution + +The asynchronous execution of exists request requires both the `GetRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute-async] +-------------------------------------------------- +<1> The `GetRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of failure. The raised exception is provided as an argument. diff --git a/docs/java-rest/high-level/document/get.asciidoc b/docs/java-rest/high-level/document/get.asciidoc index 07a0b7c1a6721..9d04e138eea1e 100644 --- a/docs/java-rest/high-level/document/get.asciidoc +++ b/docs/java-rest/high-level/document/get.asciidoc @@ -14,6 +14,7 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request] <2> Type <3> Document id +[[java-rest-high-document-get-request-optional-arguments]] ==== Optional arguments The following arguments can optionally be provided: diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index fa2f57069ba93..79f17db577421 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -17,6 +17,7 @@ Multi-document APIs:: include::document/index.asciidoc[] include::document/get.asciidoc[] +include::document/exists.asciidoc[] include::document/delete.asciidoc[] include::document/update.asciidoc[] include::document/bulk.asciidoc[] From 8cb3d18eace31ad1f554c040acfdf38a690abcca Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 15 Mar 2018 10:47:50 -0700 Subject: [PATCH 40/89] Revert "Improve error message for installing plugin (#28298)" This reverts commit 0cc1ffdf206eb88df51c3a0521972f24f67337fc The reason is that Windows test are failing, because of the incorrect path for the plugin --- .../plugins/InstallPluginCommand.java | 26 ++++++------------- .../plugins/InstallPluginCommandTests.java | 2 +- .../elasticsearch/plugins/PluginsService.java | 2 +- 3 files changed, 10 insertions(+), 20 deletions(-) diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 44043f1c68545..b7f201b70aa46 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -53,7 +53,6 @@ import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; @@ -219,17 +218,17 @@ void execute(Terminal terminal, String pluginId, boolean isBatch, Environment en throw new UserException(ExitCodes.USAGE, "plugin id is required"); } - Path pluginZip = download(terminal, pluginId, env.tmpFile(), env.pluginsFile()); + Path pluginZip = download(terminal, pluginId, env.tmpFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile()); install(terminal, isBatch, extractedZip, env); } /** Downloads the plugin and returns the file it was downloaded to. */ - private Path download(Terminal terminal, String pluginId, Path tmpDir, Path pluginsDir) throws Exception { + private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Exception { if (OFFICIAL_PLUGINS.contains(pluginId)) { final String url = getElasticUrl(terminal, getStagingHash(), Version.CURRENT, pluginId, Platforms.PLATFORM_NAME); terminal.println("-> Downloading " + pluginId + " from elastic"); - return downloadZipAndChecksum(terminal, url, tmpDir, pluginsDir, false); + return downloadZipAndChecksum(terminal, url, tmpDir, false); } // now try as maven coordinates, a valid URL would only have a colon and slash @@ -237,7 +236,7 @@ private Path download(Terminal terminal, String pluginId, Path tmpDir, Path plug if (coordinates.length == 3 && pluginId.contains("/") == false && pluginId.startsWith("file:") == false) { String mavenUrl = getMavenUrl(terminal, coordinates, Platforms.PLATFORM_NAME); terminal.println("-> Downloading " + pluginId + " from maven central"); - return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, pluginsDir, true); + return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, true); } // fall back to plain old URL @@ -251,7 +250,7 @@ private Path download(Terminal terminal, String pluginId, Path tmpDir, Path plug throw new UserException(ExitCodes.USAGE, msg); } terminal.println("-> Downloading " + URLDecoder.decode(pluginId, "UTF-8")); - return downloadZip(terminal, pluginId, tmpDir, pluginsDir); + return downloadZip(terminal, pluginId, tmpDir); } // pkg private so tests can override @@ -325,17 +324,9 @@ private List checkMisspelledPlugin(String pluginId) { /** Downloads a zip from the url, into a temp file under the given temp dir. */ // pkg private for tests @SuppressForbidden(reason = "We use getInputStream to download plugins") - Path downloadZip(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir) throws IOException { + Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException { terminal.println(VERBOSE, "Retrieving zip from " + urlString); URL url = new URL(urlString); - if (url.getProtocol().equals("file")) { - Path pluginsFile = Paths.get(url.getFile()); - if (pluginsFile.startsWith(pluginsDir)) { - throw new IllegalStateException("Installation failed! " + - "Make sure the plugins directory [" + pluginsDir + "] can not contain the plugin distribution [" + - pluginsFile + "]; move the distribution to an alternate location!"); - } - } Path zip = Files.createTempFile(tmpDir, null, ".zip"); URLConnection urlConnection = url.openConnection(); urlConnection.addRequestProperty("User-Agent", "elasticsearch-plugin-installer"); @@ -384,9 +375,8 @@ public void onProgress(int percent) { /** Downloads a zip from the url, as well as a SHA512 (or SHA1) checksum, and checks the checksum. */ // pkg private for tests @SuppressForbidden(reason = "We use openStream to download plugins") - private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir, boolean allowSha1) - throws Exception { - Path zip = downloadZip(terminal, urlString, tmpDir, pluginsDir); + private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, boolean allowSha1) throws Exception { + Path zip = downloadZip(terminal, urlString, tmpDir); pathsToDeleteOnShutdown.add(zip); String checksumUrlString = urlString + ".sha512"; URL checksumUrl = openUrl(checksumUrlString); diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 0735c579a255f..d799cb0407f58 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -981,7 +981,7 @@ void assertInstallPluginFromUrl(String pluginId, String name, String url, String Path pluginZip = createPlugin(name, pluginDir); InstallPluginCommand command = new InstallPluginCommand() { @Override - Path downloadZip(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir) throws IOException { + Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException { assertEquals(url, urlString); Path downloadedPath = tmpDir.resolve("downloaded.zip"); Files.copy(pluginZip, downloadedPath); diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index cca85d28aa137..4514691e4bec4 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -328,7 +328,7 @@ public String name() { public Collection bundles() { return bundles; } - + } /** From f82376c4f8f48ae212ee659ed8c0be92e67547bf Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 15 Mar 2018 11:40:20 -0700 Subject: [PATCH 41/89] [DOCS] Add X-Pack upgrade details (#29038) --- docs/reference/upgrade/cluster_restart.asciidoc | 5 +++++ docs/reference/upgrade/rolling_upgrade.asciidoc | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/docs/reference/upgrade/cluster_restart.asciidoc b/docs/reference/upgrade/cluster_restart.asciidoc index a8f2f51dad2aa..bdd8a8207ff83 100644 --- a/docs/reference/upgrade/cluster_restart.asciidoc +++ b/docs/reference/upgrade/cluster_restart.asciidoc @@ -26,6 +26,9 @@ recovery. include::synced-flush.asciidoc[] -- +. *Stop any machine learning jobs that are running.* See +{xpack-ref}/stopping-ml.html[Stopping Machine Learning]. + . *Shutdown all nodes.* + -- @@ -124,3 +127,5 @@ GET _cat/recovery -------------------------------------------------- // CONSOLE -- + +. *Restart machine learning jobs.* diff --git a/docs/reference/upgrade/rolling_upgrade.asciidoc b/docs/reference/upgrade/rolling_upgrade.asciidoc index 2b46b65f2617f..5af521303175c 100644 --- a/docs/reference/upgrade/rolling_upgrade.asciidoc +++ b/docs/reference/upgrade/rolling_upgrade.asciidoc @@ -32,6 +32,9 @@ include::synced-flush.asciidoc[] -- +. *Stop any machine learning jobs that are running.* See +{xpack-ref}/stopping-ml.html[Stopping Machine Learning]. + . [[upgrade-node]] *Shut down a single node*. + -- @@ -147,6 +150,8 @@ for each node that needs to be updated. -- +. *Restart machine learning jobs.* + [IMPORTANT] ==================================================== From c75790e7c063436a8f0f3ba356ec8ff482e7b855 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 15 Mar 2018 14:42:15 -0400 Subject: [PATCH 42/89] TEST: write ops should execute under shard permit (#28966) Currently ESIndexLevelReplicationTestCase executes write operations without acquiring index shard permit. This may prevent the primary term on replica from being updated or cause a race between resync and indexing on primary. This commit ensures that write operations are always executed under shard permit like the production code. --- .../ESIndexLevelReplicationTestCase.java | 29 ++++++++++++++----- .../IndexLevelReplicationTests.java | 4 +-- .../RecoveryDuringReplicationTests.java | 6 ++-- 3 files changed, 26 insertions(+), 13 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/server/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index f74ffdc4b4dc4..ad046dddc0c27 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -456,6 +456,10 @@ public void onFailure(Exception e) { } } + IndexShard getPrimaryShard() { + return replicationGroup.primary; + } + protected abstract PrimaryResult performOnPrimary(IndexShard primary, Request request) throws Exception; protected abstract void performOnReplica(ReplicaRequest request, IndexShard replica) throws Exception; @@ -592,7 +596,7 @@ protected PrimaryResult performOnPrimary(IndexShard primary, BulkShardRequest re @Override protected void performOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { - executeShardBulkOnReplica(replica, request); + executeShardBulkOnReplica(request, replica, getPrimaryShard().getPrimaryTerm(), getPrimaryShard().getGlobalCheckpoint()); } } @@ -602,15 +606,24 @@ private TransportWriteAction.WritePrimaryResult result = - TransportShardBulkAction.performOnPrimary(request, primary, null, - System::currentTimeMillis, new TransportShardBulkActionTests.NoopMappingUpdatePerformer()); + final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); + primary.acquirePrimaryOperationPermit(permitAcquiredFuture, ThreadPool.Names.SAME, request); + final TransportWriteAction.WritePrimaryResult result; + try (Releasable ignored = permitAcquiredFuture.actionGet()) { + result = TransportShardBulkAction.performOnPrimary(request, primary, null, System::currentTimeMillis, + new TransportShardBulkActionTests.NoopMappingUpdatePerformer()); + } TransportWriteActionTestHelper.performPostWriteActions(primary, request, result.location, logger); return result; } - private void executeShardBulkOnReplica(IndexShard replica, BulkShardRequest request) throws Exception { - final Translog.Location location = TransportShardBulkAction.performOnReplica(request, replica); + private void executeShardBulkOnReplica(BulkShardRequest request, IndexShard replica, long operationPrimaryTerm, long globalCheckpointOnPrimary) throws Exception { + final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); + replica.acquireReplicaOperationPermit(operationPrimaryTerm, globalCheckpointOnPrimary, permitAcquiredFuture, ThreadPool.Names.SAME, request); + final Translog.Location location; + try (Releasable ignored = permitAcquiredFuture.actionGet()) { + location = TransportShardBulkAction.performOnReplica(request, replica); + } TransportWriteActionTestHelper.performPostWriteActions(replica, request, location, logger); } @@ -630,8 +643,8 @@ BulkShardRequest indexOnPrimary(IndexRequest request, IndexShard primary) throws /** * indexes the given requests on the supplied replica shard */ - void indexOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { - executeShardBulkOnReplica(replica, request); + void indexOnReplica(BulkShardRequest request, ReplicationGroup group, IndexShard replica) throws Exception { + executeShardBulkOnReplica(request, replica, group.primary.getPrimaryTerm(), group.primary.getGlobalCheckpoint()); } class GlobalCheckpointSync extends ReplicationAction< diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 8c15a2a84ddb8..86436d8d88ac9 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -209,7 +209,7 @@ public void testConflictingOpsOnReplica() throws Exception { logger.info("--> isolated replica " + replica1.routingEntry()); BulkShardRequest replicationRequest = indexOnPrimary(indexRequest, shards.getPrimary()); for (int i = 1; i < replicas.size(); i++) { - indexOnReplica(replicationRequest, replicas.get(i)); + indexOnReplica(replicationRequest, shards, replicas.get(i)); } logger.info("--> promoting replica to primary " + replica1.routingEntry()); @@ -318,7 +318,7 @@ public void testSeqNoCollision() throws Exception { logger.info("--> Isolate replica1"); IndexRequest indexDoc1 = new IndexRequest(index.getName(), "type", "d1").source("{}", XContentType.JSON); BulkShardRequest replicationRequest = indexOnPrimary(indexDoc1, shards.getPrimary()); - indexOnReplica(replicationRequest, replica2); + indexOnReplica(replicationRequest, shards, replica2); final Translog.Operation op1; final List initOperations = new ArrayList<>(initDocs); diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index dcfa2cb34a2db..66e2a09750a2d 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -236,7 +236,7 @@ public void testRecoveryAfterPrimaryPromotion() throws Exception { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "rollback_" + i) .source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); - indexOnReplica(bulkShardRequest, replica); + indexOnReplica(bulkShardRequest, shards, replica); } if (randomBoolean()) { oldPrimary.flush(new FlushRequest(index.getName())); @@ -326,7 +326,7 @@ public void testReplicaRollbackStaleDocumentsInPeerRecovery() throws Exception { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "stale_" + i) .source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); - indexOnReplica(bulkShardRequest, replica); + indexOnReplica(bulkShardRequest, shards, replica); } shards.flush(); shards.promoteReplicaToPrimary(newPrimary).get(); @@ -374,7 +374,7 @@ public void testResyncAfterPrimaryPromotion() throws Exception { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_" + i) .source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); - indexOnReplica(bulkShardRequest, newPrimary); + indexOnReplica(bulkShardRequest, shards, newPrimary); } logger.info("--> resyncing replicas"); PrimaryReplicaSyncer.ResyncTask task = shards.promoteReplicaToPrimary(newPrimary).get(); From a685784ceaffe34e5277d03041cc402a5aba697c Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Fri, 16 Mar 2018 09:59:23 +1100 Subject: [PATCH 43/89] CLI: Close subcommands in MultiCommand (#28954) * CLI Command: MultiCommand must close subcommands to release resources properly - Changes are done to override the close method and call close on subcommands using IOUtils#close - Unit Test Closes #28953 --- server/cli/build.gradle | 1 + .../org/elasticsearch/cli/MultiCommand.java | 10 +++ .../elasticsearch/cli/MultiCommandTests.java | 75 ++++++++++++++++++- 3 files changed, 84 insertions(+), 2 deletions(-) diff --git a/server/cli/build.gradle b/server/cli/build.gradle index c41c4d975b082..91fbca19eca99 100644 --- a/server/cli/build.gradle +++ b/server/cli/build.gradle @@ -36,6 +36,7 @@ archivesBaseName = 'elasticsearch-cli' dependencies { compile 'net.sf.jopt-simple:jopt-simple:5.0.2' + compile "org.elasticsearch:elasticsearch-core:${version}" } test.enabled = false diff --git a/server/cli/src/main/java/org/elasticsearch/cli/MultiCommand.java b/server/cli/src/main/java/org/elasticsearch/cli/MultiCommand.java index ba6b447792aa1..054a29e78a6cc 100644 --- a/server/cli/src/main/java/org/elasticsearch/cli/MultiCommand.java +++ b/server/cli/src/main/java/org/elasticsearch/cli/MultiCommand.java @@ -19,6 +19,8 @@ package org.elasticsearch.cli; +import java.io.Closeable; +import java.io.IOException; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; @@ -26,6 +28,8 @@ import joptsimple.NonOptionArgumentSpec; import joptsimple.OptionSet; +import org.elasticsearch.core.internal.io.IOUtils; + /** * A cli tool which is made up of multiple subcommands. */ @@ -74,4 +78,10 @@ protected void execute(Terminal terminal, OptionSet options) throws Exception { } subcommand.mainWithoutErrorHandling(Arrays.copyOfRange(args, 1, args.length), terminal); } + + @Override + public void close() throws IOException { + IOUtils.close(subcommands.values()); + } + } diff --git a/server/src/test/java/org/elasticsearch/cli/MultiCommandTests.java b/server/src/test/java/org/elasticsearch/cli/MultiCommandTests.java index f4448bbedfef5..41fe851ed2561 100644 --- a/server/src/test/java/org/elasticsearch/cli/MultiCommandTests.java +++ b/server/src/test/java/org/elasticsearch/cli/MultiCommandTests.java @@ -22,22 +22,57 @@ import joptsimple.OptionSet; import org.junit.Before; +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; + public class MultiCommandTests extends CommandTestCase { static class DummyMultiCommand extends MultiCommand { + + final AtomicBoolean closed = new AtomicBoolean(); + DummyMultiCommand() { - super("A dummy multi command", () -> {}); + super("A dummy multi command", () -> { + }); + } + + @Override + public void close() throws IOException { + super.close(); + if (this.closed.compareAndSet(false, true) == false) { + throw new IllegalStateException("DummyMultiCommand already closed"); + } } } static class DummySubCommand extends Command { + final boolean throwsExceptionOnClose; + final AtomicBoolean closeCalled = new AtomicBoolean(); + DummySubCommand() { - super("A dummy subcommand", () -> {}); + this(false); } + + DummySubCommand(final boolean throwsExceptionOnClose) { + super("A dummy subcommand", () -> { + }); + this.throwsExceptionOnClose = throwsExceptionOnClose; + } + @Override protected void execute(Terminal terminal, OptionSet options) throws Exception { terminal.println("Arguments: " + options.nonOptionArguments().toString()); } + + @Override + public void close() throws IOException { + if (this.closeCalled.compareAndSet(false, true) == false) { + throw new IllegalStateException("DummySubCommand already closed"); + } + if (throwsExceptionOnClose) { + throw new IOException("Error occurred while closing DummySubCommand"); + } + } } DummyMultiCommand multiCommand; @@ -102,4 +137,40 @@ public void testSubcommandArguments() throws Exception { assertFalse(output, output.contains("command1")); assertTrue(output, output.contains("Arguments: [foo, bar]")); } + + public void testClose() throws Exception { + DummySubCommand subCommand1 = new DummySubCommand(); + DummySubCommand subCommand2 = new DummySubCommand(); + multiCommand.subcommands.put("command1", subCommand1); + multiCommand.subcommands.put("command2", subCommand2); + multiCommand.close(); + assertTrue("MultiCommand was not closed when close method is invoked", multiCommand.closed.get()); + assertTrue("SubCommand1 was not closed when close method is invoked", subCommand1.closeCalled.get()); + assertTrue("SubCommand2 was not closed when close method is invoked", subCommand2.closeCalled.get()); + } + + public void testCloseWhenSubCommandCloseThrowsException() throws Exception { + final boolean command1Throws = randomBoolean(); + final boolean command2Throws = randomBoolean(); + final DummySubCommand subCommand1 = new DummySubCommand(command1Throws); + final DummySubCommand subCommand2 = new DummySubCommand(command2Throws); + multiCommand.subcommands.put("command1", subCommand1); + multiCommand.subcommands.put("command2", subCommand2); + if (command1Throws || command2Throws) { + // verify exception is thrown, as well as other non failed sub-commands closed + // properly. + IOException ioe = expectThrows(IOException.class, multiCommand::close); + assertEquals("Error occurred while closing DummySubCommand", ioe.getMessage()); + if (command1Throws && command2Throws) { + assertEquals(1, ioe.getSuppressed().length); + assertTrue("Missing suppressed exceptions", ioe.getSuppressed()[0] instanceof IOException); + assertEquals("Error occurred while closing DummySubCommand", ioe.getSuppressed()[0].getMessage()); + } + } else { + multiCommand.close(); + } + assertTrue("SubCommand1 was not closed when close method is invoked", subCommand1.closeCalled.get()); + assertTrue("SubCommand2 was not closed when close method is invoked", subCommand2.closeCalled.get()); + } + } From 55683d89c42c27e10872a71afcd82e9a0cd83a67 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 15 Mar 2018 20:58:37 -0400 Subject: [PATCH 44/89] Clarify how to set compiler and runtime JDKs (#29101) This commit enhances the error messages reported when JAVA_HOME and RUNTIME_JAVA_HOME are not correctly set to point towards the minimum compiler and minimum runtime JDKs that are expected by the builds. The previous error message would say: Java 1.9 or above is required to build Elasticsearch which is confusing if the user does have a JDK 9 installation and is even the version that they have on their path yet they have JAVA_HOME pointing to another JDK installation. The error message reported after this change is: the environment variable JAVA_HOME must be set to a JDK installation directory for Java 1.9 but is [/usr/java/jdk-8] corresponding to [1.8] --- .../org/elasticsearch/gradle/BuildPlugin.groovy | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index b72d5696af720..6043ce210906a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -140,16 +140,22 @@ class BuildPlugin implements Plugin { final GradleVersion minGradle = GradleVersion.version('4.3') if (currentGradleVersion < minGradle) { - throw new GradleException("${minGradle} or above is required to build elasticsearch") + throw new GradleException("${minGradle} or above is required to build Elasticsearch") } // enforce Java version if (compilerJavaVersionEnum < minimumCompilerVersion) { - throw new GradleException("Java ${minimumCompilerVersion} or above is required to build Elasticsearch") + final String message = + "the environment variable JAVA_HOME must be set to a JDK installation directory for Java ${minimumCompilerVersion}" + + " but is [${compilerJavaHome}] corresponding to [${compilerJavaVersionEnum}]" + throw new GradleException(message) } if (runtimeJavaVersionEnum < minimumRuntimeVersion) { - throw new GradleException("Java ${minimumRuntimeVersion} or above is required to run Elasticsearch") + final String message = + "the environment variable RUNTIME_JAVA_HOME must be set to a JDK installation directory for Java ${minimumRuntimeVersion}" + + " but is [${runtimeJavaHome}] corresponding to [${runtimeJavaVersionEnum}]" + throw new GradleException(message) } project.rootProject.ext.compilerJavaHome = compilerJavaHome From 4897e0034befd61f16a81185388ae91b988db2ea Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 15 Mar 2018 21:35:40 -0400 Subject: [PATCH 45/89] Allow overriding JVM options in Windows service (#29044) Today we allow any other method of starting Elastisearch to override jvm.options via ES_JAVA_OPTS. Yet, for some settings in the Windows service, we do not allow this. This commit removes this in favor of being consistent with other packaging choices. --- .../src/bin/elasticsearch-service.bat | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/distribution/src/bin/elasticsearch-service.bat b/distribution/src/bin/elasticsearch-service.bat index 065725f8bdb72..e4f3e92b084c4 100644 --- a/distribution/src/bin/elasticsearch-service.bat +++ b/distribution/src/bin/elasticsearch-service.bat @@ -120,50 +120,26 @@ echo %ES_JAVA_OPTS% for %%a in ("%ES_JAVA_OPTS:;=","%") do ( set var=%%a if "!var:~1,4!" == "-Xms" ( - if not "!JVM_MS!" == "" ( - echo duplicate min heap size settings found - goto:eof - ) set XMS=!var:~5,-1! call:convertxm !XMS! JVM_MS ) if "!var:~1,16!" == "-XX:MinHeapSize=" ( - if not "!JVM_MS!" == "" ( - echo duplicate min heap size settings found - goto:eof - ) set XMS=!var:~17,-1! call:convertxm !XMS! JVM_MS ) if "!var:~1,4!" == "-Xmx" ( - if not "!JVM_MX!" == "" ( - echo duplicate max heap size settings found - goto:eof - ) set XMX=!var:~5,-1! call:convertxm !XMX! JVM_MX ) if "!var:~1,16!" == "-XX:MaxHeapSize=" ( - if not "!JVM_MX!" == "" ( - echo duplicate max heap size settings found - goto:eof - ) set XMX=!var:~17,-1! call:convertxm !XMX! JVM_MX ) if "!var:~1,4!" == "-Xss" ( - if not "!JVM_SS!" == "" ( - echo duplicate thread stack size settings found - exit 1 - ) set XSS=!var:~5,-1! call:convertxk !XSS! JVM_SS ) if "!var:~1,20!" == "-XX:ThreadStackSize=" ( - if not "!JVM_SS!" == "" ( - echo duplicate thread stack size settings found - goto:eof - ) set XSS=!var:~21,-1! call:convertxk !XSS! JVM_SS ) From 069a87654246cad9ecba25e040485adf3b00056c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 15 Mar 2018 20:36:00 +0100 Subject: [PATCH 46/89] Added minimal docs for reindex api in java-api docs Additionally: * Included the existing update by query java api docs in java-api docs. (for some reason it was never included, it needed some tweaking and then it was good to go) * moved delete-by-query / update-by-query code samples to java file so that we can verify that these samples at least compile. Closes #24203 --- docs/java-api/docs.asciidoc | 8 +- docs/java-api/docs/delete.asciidoc | 26 +-- docs/java-api/docs/reindex.asciidoc | 11 + docs/java-api/docs/update-by-query.asciidoc | 131 ++++-------- docs/java-api/index.asciidoc | 2 + .../documentation/ReindexDocumentationIT.java | 194 ++++++++++++++++++ 6 files changed, 255 insertions(+), 117 deletions(-) create mode 100644 docs/java-api/docs/reindex.asciidoc create mode 100644 modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java diff --git a/docs/java-api/docs.asciidoc b/docs/java-api/docs.asciidoc index c355714bdd636..181c5d8e0bd99 100644 --- a/docs/java-api/docs.asciidoc +++ b/docs/java-api/docs.asciidoc @@ -7,12 +7,14 @@ This section describes the following CRUD APIs: * <> * <> * <> -* <> * <> .Multi-document APIs * <> * <> +* <> +* <> +* <> NOTE: All CRUD APIs are single-index APIs. The `index` parameter accepts a single index name, or an `alias` which points to a single index. @@ -28,3 +30,7 @@ include::docs/update.asciidoc[] include::docs/multi-get.asciidoc[] include::docs/bulk.asciidoc[] + +include::docs/update-by-query.asciidoc[] + +include::docs/reindex.asciidoc[] \ No newline at end of file diff --git a/docs/java-api/docs/delete.asciidoc b/docs/java-api/docs/delete.asciidoc index 218ea14553b4c..9572c32c3a5d5 100644 --- a/docs/java-api/docs/delete.asciidoc +++ b/docs/java-api/docs/delete.asciidoc @@ -20,15 +20,9 @@ For more information on the delete operation, check out the The delete by query API allows one to delete a given set of documents based on the result of a query: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -BulkByScrollResponse response = - DeleteByQueryAction.INSTANCE.newRequestBuilder(client) - .filter(QueryBuilders.matchQuery("gender", "male")) <1> - .source("persons") <2> - .get(); <3> - -long deleted = response.getDeleted(); <4> +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[delete-by-query-sync] -------------------------------------------------- <1> query <2> index @@ -38,21 +32,9 @@ long deleted = response.getDeleted(); <4> As it can be a long running operation, if you wish to do it asynchronously, you can call `execute` instead of `get` and provide a listener like: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -DeleteByQueryAction.INSTANCE.newRequestBuilder(client) - .filter(QueryBuilders.matchQuery("gender", "male")) <1> - .source("persons") <2> - .execute(new ActionListener() { <3> - @Override - public void onResponse(BulkByScrollResponse response) { - long deleted = response.getDeleted(); <4> - } - @Override - public void onFailure(Exception e) { - // Handle the exception - } - }); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[delete-by-query-async] -------------------------------------------------- <1> query <2> index diff --git a/docs/java-api/docs/reindex.asciidoc b/docs/java-api/docs/reindex.asciidoc new file mode 100644 index 0000000000000..842e763f74d71 --- /dev/null +++ b/docs/java-api/docs/reindex.asciidoc @@ -0,0 +1,11 @@ +[[java-docs-reindex]] +=== Reindex API + +See {ref}/docs-reindex.html[reindex API]. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[reindex1] +-------------------------------------------------- +<1> Optionally a query can provided to filter what documents should be + re-indexed from the source to the target index. diff --git a/docs/java-api/docs/update-by-query.asciidoc b/docs/java-api/docs/update-by-query.asciidoc index 8b3d2d71c400a..ae4f8d72ee1d9 100644 --- a/docs/java-api/docs/update-by-query.asciidoc +++ b/docs/java-api/docs/update-by-query.asciidoc @@ -1,18 +1,13 @@ -[[docs-update-by-query]] -== Update By Query API +[[java-docs-update-by-query]] +=== Update By Query API The simplest usage of `updateByQuery` updates each document in an index without changing the source. This usage enables -<> or another online -mapping change. +picking up a new property or another online mapping change. -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("source_index").abortOnVersionConflict(false); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query] -------------------------------------------------- Calls to the `updateByQuery` API start by getting a snapshot of the index, indexing @@ -41,78 +36,50 @@ The `UpdateByQueryRequestBuilder` API supports filtering the updated documents, limiting the total number of documents to update, and updating documents with a script: -[source,java] --------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); -updateByQuery.source("source_index") - .filter(termQuery("level", "awesome")) - .size(1000) - .script(new Script("ctx._source.awesome = 'absolutely'", ScriptType.INLINE, "painless", emptyMap())); - -BulkByScrollResponse response = updateByQuery.get(); +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-filter] -------------------------------------------------- `UpdateByQueryRequestBuilder` also enables direct access to the query used to select the documents. You can use this access to change the default scroll size or otherwise modify the request for matching documents. -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("source_index") - .source().setSize(500); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-size] -------------------------------------------------- You can also combine `size` with sorting to limit the documents updated: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("source_index").size(100) - .source().addSort("cat", SortOrder.DESC); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-sort] -------------------------------------------------- In addition to changing the `_source` field for the document, you can use a script to change the action, similar to the Update API: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("source_index") - .script(new Script( - "if (ctx._source.awesome == 'absolutely) {" - + " ctx.op='noop' - + "} else if (ctx._source.awesome == 'lame') {" - + " ctx.op='delete'" - + "} else {" - + "ctx._source.awesome = 'absolutely'}", ScriptType.INLINE, "painless", emptyMap())); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-script] -------------------------------------------------- -As in the <>, you can set the value of `ctx.op` to change the +As in the <>, you can set the value of `ctx.op` to change the operation that executes: `noop`:: Set `ctx.op = "noop"` if your script doesn't make any changes. The `updateByQuery` operaton then omits that document from the updates. -This behavior increments the `noop` counter in the -<>. +This behavior increments the `noop` counter in the response body. `delete`:: Set `ctx.op = "delete"` if your script decides that the document must be deleted. The deletion will be reported in the `deleted` counter in the -<>. +response body. Setting `ctx.op` to any other value generates an error. Setting any other field in `ctx` generates an error. @@ -123,79 +90,55 @@ from its original location. You can also perform these operations on multiple indices and types at once, similar to the search API: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("foo", "bar").source().setTypes("a", "b"); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-multi-index] -------------------------------------------------- If you provide a `routing` value then the process copies the routing value to the scroll query, limiting the process to the shards that match that routing value: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source().setRouting("cat"); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-routing] -------------------------------------------------- -`updateByQuery` can also use the <> feature by +`updateByQuery` can also use the ingest node by specifying a `pipeline` like this: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.setPipeline("hurray"); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-pipeline] -------------------------------------------------- [float] -[[docs-update-by-query-task-api]] +[[java-docs-update-by-query-task-api]] === Works with the Task API -You can fetch the status of all running update-by-query requests with the -<>: +You can fetch the status of all running update-by-query requests with the Task API: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -ListTasksResponse tasksList = client.admin().cluster().prepareListTasks() - .setActions(UpdateByQueryAction.NAME).setDetailed(true).get(); - -for (TaskInfo info: tasksList.getTasks()) { - TaskId taskId = info.getTaskId(); - BulkByScrollTask.Status status = (BulkByScrollTask.Status) info.getStatus(); - // do stuff -} - +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-list-tasks] -------------------------------------------------- With the `TaskId` shown above you can look up the task directly: // provide API Example -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -GetTaskResponse get = client.admin().cluster().prepareGetTask(taskId).get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-get-task] -------------------------------------------------- [float] -[[docs-update-by-query-cancel-task-api]] +[[java-docs-update-by-query-cancel-task-api]] === Works with the Cancel Task API -Any Update By Query can be canceled using the <>: +Any Update By Query can be canceled using the Task Cancel API: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -// Cancel all update-by-query requests -client.admin().cluster().prepareCancelTasks().setActions(UpdateByQueryAction.NAME).get().getTasks() -// Cancel a specific update-by-query request -client.admin().cluster().prepareCancelTasks().setTaskId(taskId).get().getTasks() +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-cancel-task] -------------------------------------------------- Use the `list tasks` API to find the value of `taskId`. @@ -204,14 +147,14 @@ Cancelling a request is typically a very fast process but can take up to a few s The task status API continues to list the task until the cancellation is complete. [float] -[[docs-update-by-query-rethrottle]] +[[java-docs-update-by-query-rethrottle]] === Rethrottling Use the `_rethrottle` API to change the value of `requests_per_second` on a running update: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -RethrottleAction.INSTANCE.newRequestBuilder(client).setTaskId(taskId).setRequestsPerSecond(2.0f).get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-rethrottle] -------------------------------------------------- Use the `list tasks` API to find the value of `taskId`. diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 002804cf6170e..4fb7db4c4abf6 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -132,6 +132,8 @@ and add it as a dependency. As an example, we will use the `slf4j-simple` logger :client-tests: {docdir}/../../server/src/test/java/org/elasticsearch/client/documentation +:client-reindex-tests: {docdir}/../../modules/reindex/src/test/java/org/elasticsearch/client/documentation + include::client.asciidoc[] include::docs.asciidoc[] diff --git a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java new file mode 100644 index 0000000000000..1f99f062d25af --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -0,0 +1,194 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.index.reindex.BulkByScrollTask; +import org.elasticsearch.index.reindex.DeleteByQueryAction; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.index.reindex.ReindexRequest; +import org.elasticsearch.index.reindex.ReindexRequestBuilder; +import org.elasticsearch.index.reindex.RethrottleAction; +import org.elasticsearch.index.reindex.UpdateByQueryAction; +import org.elasticsearch.index.reindex.UpdateByQueryRequestBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Collections; + +public class ReindexDocumentationIT extends ESIntegTestCase { + + public void reindex() { + Client client = client(); + // tag::reindex1 + BulkByScrollResponse response = ReindexAction.INSTANCE.newRequestBuilder(client) + .destination("target_index") + .filter(QueryBuilders.matchQuery("category", "xzy")) // <1> + .get(); + // end::reindex1 + } + + public void updateByQuery() { + Client client = client(); + { + // tag::update-by-query + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index").abortOnVersionConflict(false); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query + } + { + // tag::update-by-query-filter + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index") + .filter(QueryBuilders.termQuery("level", "awesome")) + .size(1000) + .script(new Script(ScriptType.INLINE, "ctx._source.awesome = 'absolutely'", "painless", Collections.emptyMap())); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-filter + } + { + // tag::update-by-query-size + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index") + .source().setSize(500); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-size + } + { + // tag::update-by-query-sort + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index").size(100) + .source().addSort("cat", SortOrder.DESC); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-sort + } + { + // tag::update-by-query-script + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index") + .script(new Script( + ScriptType.INLINE, + "if (ctx._source.awesome == 'absolutely) {" + + " ctx.op='noop'" + + "} else if (ctx._source.awesome == 'lame') {" + + " ctx.op='delete'" + + "} else {" + + "ctx._source.awesome = 'absolutely'}", + "painless", + Collections.emptyMap())); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-script + } + { + // tag::update-by-query-multi-index + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("foo", "bar").source().setTypes("a", "b"); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-multi-index + } + { + // tag::update-by-query-routing + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source().setRouting("cat"); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-routing + } + { + // tag::update-by-query-pipeline + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.setPipeline("hurray"); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-pipeline + } + { + // tag::update-by-query-list-tasks + ListTasksResponse tasksList = client.admin().cluster().prepareListTasks() + .setActions(UpdateByQueryAction.NAME).setDetailed(true).get(); + for (TaskInfo info: tasksList.getTasks()) { + TaskId taskId = info.getTaskId(); + BulkByScrollTask.Status status = (BulkByScrollTask.Status) info.getStatus(); + // do stuff + } + // end::update-by-query-list-tasks + } + { + TaskId taskId = null; + // tag::update-by-query-get-task + GetTaskResponse get = client.admin().cluster().prepareGetTask(taskId).get(); + // end::update-by-query-get-task + } + { + TaskId taskId = null; + // tag::update-by-query-cancel-task + // Cancel all update-by-query requests + client.admin().cluster().prepareCancelTasks().setActions(UpdateByQueryAction.NAME).get().getTasks(); + // Cancel a specific update-by-query request + client.admin().cluster().prepareCancelTasks().setTaskId(taskId).get().getTasks(); + // end::update-by-query-cancel-task + } + { + TaskId taskId = null; + // tag::update-by-query-rethrottle + RethrottleAction.INSTANCE.newRequestBuilder(client) + .setTaskId(taskId) + .setRequestsPerSecond(2.0f) + .get(); + // end::update-by-query-rethrottle + } + } + + public void deleteByQuery() { + Client client = client(); + // tag::delete-by-query-sync + BulkByScrollResponse response = DeleteByQueryAction.INSTANCE.newRequestBuilder(client) + .filter(QueryBuilders.matchQuery("gender", "male")) // <1> + .source("persons") // <2> + .get(); // <3> + long deleted = response.getDeleted(); // <4> + // end::delete-by-query-sync + + // tag::delete-by-query-async + DeleteByQueryAction.INSTANCE.newRequestBuilder(client) + .filter(QueryBuilders.matchQuery("gender", "male")) // <1> + .source("persons") // <2> + .execute(new ActionListener() { // <3> + @Override + public void onResponse(BulkByScrollResponse response) { + long deleted = response.getDeleted(); // <4> + } + @Override + public void onFailure(Exception e) { + // Handle the exception + } + }); + // end::delete-by-query-async + } + +} From f14146982f403cb7d80ed17f2b290cd1d99c693f Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 16 Mar 2018 10:20:56 +0100 Subject: [PATCH 47/89] Use removeTask instead of finishTask in PersistentTasksClusterService (#29055) The method `PersistentTasksClusterService.finishTask()` has been modified since it was added and does not use any `removeOncompletion` flag anymore. Its behavior is now similar to `removeTask()` and can be replaced by this one. When a non existing task is removed, the cluster state update task will fail and its `source` will still indicate `finish persistent task`/`remove persistent task`. --- .../PersistentTasksClusterService.java | 2 +- .../PersistentTasksCustomMetaData.java | 22 +++---------------- .../PersistentTasksCustomMetaDataTests.java | 11 +--------- 3 files changed, 5 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 24d8c5f7be31a..7c395365c1b88 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -117,7 +117,7 @@ public void completePersistentTask(String id, long allocationId, Exception failu public ClusterState execute(ClusterState currentState) throws Exception { PersistentTasksCustomMetaData.Builder tasksInProgress = builder(currentState); if (tasksInProgress.hasTask(id, allocationId)) { - tasksInProgress.finishTask(id); + tasksInProgress.removeTask(id); return update(currentState, tasksInProgress); } else { if (tasksInProgress.hasTask(id)) { diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java index 25b3567ac395d..ee45eb8ffad28 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java @@ -609,7 +609,7 @@ public Builder reassignTask(String taskId, Assignment assignment) { changed = true; tasks.put(taskId, new PersistentTask<>(taskInProgress, getNextAllocationId(), assignment)); } else { - throw new ResourceNotFoundException("cannot reassign task with id {" + taskId + "}, the task no longer exits"); + throw new ResourceNotFoundException("cannot reassign task with id {" + taskId + "}, the task no longer exists"); } return this; } @@ -623,7 +623,7 @@ public Builder updateTaskStatus(String taskId, Status status) { changed = true; tasks.put(taskId, new PersistentTask<>(taskInProgress, status)); } else { - throw new ResourceNotFoundException("cannot update task with id {" + taskId + "}, the task no longer exits"); + throw new ResourceNotFoundException("cannot update task with id {" + taskId + "}, the task no longer exists"); } return this; } @@ -635,23 +635,7 @@ public Builder removeTask(String taskId) { if (tasks.remove(taskId) != null) { changed = true; } else { - throw new ResourceNotFoundException("cannot remove task with id {" + taskId + "}, the task no longer exits"); - } - return this; - } - - /** - * Finishes the task - *

- * If the task is marked with removeOnCompletion flag, it is removed from the list, otherwise it is stopped. - */ - public Builder finishTask(String taskId) { - PersistentTask taskInProgress = tasks.get(taskId); - if (taskInProgress != null) { - changed = true; - tasks.remove(taskId); - } else { - throw new ResourceNotFoundException("cannot finish task with id {" + taskId + "}, the task no longer exits"); + throw new ResourceNotFoundException("cannot remove task with id {" + taskId + "}, the task no longer exists"); } return this; } diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java index 537fc21ed433f..67962b800d2cf 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java @@ -191,7 +191,7 @@ public void testBuilder() { } boolean changed = false; for (int j = 0; j < randomIntBetween(1, 10); j++) { - switch (randomInt(4)) { + switch (randomInt(3)) { case 0: lastKnownTask = addRandomTask(builder); changed = true; @@ -223,15 +223,6 @@ public void testBuilder() { expectThrows(ResourceNotFoundException.class, () -> builder.removeTask(fLastKnownTask)); } break; - case 4: - if (builder.hasTask(lastKnownTask)) { - changed = true; - builder.finishTask(lastKnownTask); - } else { - String fLastKnownTask = lastKnownTask; - expectThrows(ResourceNotFoundException.class, () -> builder.finishTask(fLastKnownTask)); - } - break; } } assertEquals(changed, builder.isChanged()); From 42c7c752980c7ab5f842de3fcb9c16a4783274a5 Mon Sep 17 00:00:00 2001 From: Martijn Laarman Date: Fri, 16 Mar 2018 12:28:24 +0100 Subject: [PATCH 48/89] Fix starting on Windows from another drive (#29086) The cd command on Windows has an oddity regarding changing directories. If the drive of the current directory is a different drive than than of the directory that was passed to the cd command, cd acts in query mode and does not change the current directory. Instead, a flag is needed to put the cd command into set mode so that the directory actually changes. This causes a problem when starting Elasticsearch from a directory different than the one where it is installed and this commit fixes the issue. --- distribution/src/bin/elasticsearch.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/bin/elasticsearch.bat b/distribution/src/bin/elasticsearch.bat index f9f668fc61538..e0f52c54c627f 100644 --- a/distribution/src/bin/elasticsearch.bat +++ b/distribution/src/bin/elasticsearch.bat @@ -50,7 +50,7 @@ if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" ( exit /b 1 ) -cd "%ES_HOME%" +cd /d "%ES_HOME%" %JAVA% %ES_JAVA_OPTS% -Delasticsearch -Des.path.home="%ES_HOME%" -Des.path.conf="%ES_PATH_CONF%" -cp "%ES_CLASSPATH%" "org.elasticsearch.bootstrap.Elasticsearch" !newparams! endlocal From 986e518170e18ce8d540753be24c8020baf4cb08 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 16 Mar 2018 11:39:46 +0000 Subject: [PATCH 49/89] Store offsets in index prefix fields when stored in the parent field (#29067) The index prefix field is normally indexed as docs-only, given that it cannot be used in phrases. However, in the case that the parent field has been indexed with offsets, or has term-vector offsets, we should also store this in the index prefix field for highlighting. Note that this commit does not implement highlighting on prefix fields, but rather ensures that future work can implement this without a backwards-break in index data. Closes #28994 --- .../index/mapper/TextFieldMapper.java | 18 ++++- .../index/mapper/TextFieldMapperTests.java | 75 +++++++++++++++++++ 2 files changed, 89 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 799698ac776d7..e2f8eb4e64f63 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; @@ -152,11 +153,20 @@ public TextFieldMapper build(BuilderContext context) { fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap)); } setupFieldType(context); - if (prefixFieldType != null && fieldType().isSearchable() == false) { - throw new IllegalArgumentException("Cannot set index_prefix on unindexed field [" + name() + "]"); + PrefixFieldMapper prefixMapper = null; + if (prefixFieldType != null) { + if (fieldType().isSearchable() == false) { + throw new IllegalArgumentException("Cannot set index_prefix on unindexed field [" + name() + "]"); + } + if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { + prefixFieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + } + if (fieldType.storeTermVectorOffsets()) { + prefixFieldType.setStoreTermVectorOffsets(true); + } + prefixFieldType.setAnalyzer(fieldType.indexAnalyzer()); + prefixMapper = new PrefixFieldMapper(prefixFieldType, context.indexSettings()); } - PrefixFieldMapper prefixMapper = prefixFieldType == null ? null - : new PrefixFieldMapper(prefixFieldType.setAnalyzer(fieldType.indexAnalyzer()), context.indexSettings()); return new TextFieldMapper( name, fieldType, defaultFieldType, positionIncrementGap, prefixMapper, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 37c82ccc94616..e63ff69f5096d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -595,6 +596,80 @@ public void testEmptyName() throws IOException { assertThat(e.getMessage(), containsString("name cannot be empty string")); } + public void testIndexPrefixIndexTypes() throws IOException { + QueryShardContext queryShardContext = indexService.newQueryShardContext( + randomInt(20), null, () -> { + throw new UnsupportedOperationException(); + }, null); + + { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "english") + .startObject("index_prefix").endObject() + .field("index_options", "offsets") + .endObject().endObject().endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldType ft = prefix.fieldType; + assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, ft.indexOptions()); + } + + { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "english") + .startObject("index_prefix").endObject() + .field("index_options", "positions") + .endObject().endObject().endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldType ft = prefix.fieldType; + assertEquals(IndexOptions.DOCS, ft.indexOptions()); + assertFalse(ft.storeTermVectors()); + } + + { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "english") + .startObject("index_prefix").endObject() + .field("term_vector", "with_positions_offsets") + .endObject().endObject().endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldType ft = prefix.fieldType; + assertEquals(IndexOptions.DOCS, ft.indexOptions()); + assertTrue(ft.storeTermVectorOffsets()); + } + + { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "english") + .startObject("index_prefix").endObject() + .field("term_vector", "with_positions") + .endObject().endObject().endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldType ft = prefix.fieldType; + assertEquals(IndexOptions.DOCS, ft.indexOptions()); + assertFalse(ft.storeTermVectorOffsets()); + } + } + public void testIndexPrefixMapping() throws IOException { QueryShardContext queryShardContext = indexService.newQueryShardContext( From c713d62f88edf72a2357687d10eca8881b87de7d Mon Sep 17 00:00:00 2001 From: Jiri Tyr Date: Fri, 16 Mar 2018 13:13:17 +0000 Subject: [PATCH 50/89] [Docs] Fix link to Grok patterns (#29088) --- docs/reference/ingest/ingest-node.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 3c30648c701dc..c53fe122c50d3 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1216,8 +1216,8 @@ expression that supports aliased expressions that can be reused. This tool is perfect for syslog logs, apache and other webserver logs, mysql logs, and in general, any log format that is generally written for humans and not computer consumption. -This processor comes packaged with over -https://github.com/elastic/elasticsearch/tree/master/modules/ingest-common/src/main/resources/patterns[120 reusable patterns]. +This processor comes packaged with many +https://github.com/elastic/elasticsearch/blob/{branch}/libs/grok/src/main/resources/patterns[reusable patterns]. If you need help building patterns to match your logs, you will find the {kibana-ref}/xpack-grokdebugger.html[Grok Debugger] tool quite useful! The Grok Debugger is an {xpack} feature under the Basic License and is therefore *free to use*. The Grok Constructor at is also a useful tool. From a2d5cf651448ea5394e402712f70b17240bcb0b6 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 16 Mar 2018 13:17:49 +0000 Subject: [PATCH 51/89] Compilation fix for #29067 --- .../index/mapper/TextFieldMapperTests.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index e63ff69f5096d..459fcb1d37731 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -603,13 +603,13 @@ public void testIndexPrefixIndexTypes() throws IOException { }, null); { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") .startObject("index_prefix").endObject() .field("index_options", "offsets") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -619,13 +619,13 @@ public void testIndexPrefixIndexTypes() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") .startObject("index_prefix").endObject() .field("index_options", "positions") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -636,13 +636,13 @@ public void testIndexPrefixIndexTypes() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") .startObject("index_prefix").endObject() .field("term_vector", "with_positions_offsets") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -653,13 +653,13 @@ public void testIndexPrefixIndexTypes() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") .startObject("index_prefix").endObject() .field("term_vector", "with_positions") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); From 695ec05160efef2186796366d7cd5140dbfeb793 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 16 Mar 2018 14:34:33 +0100 Subject: [PATCH 52/89] Clarify that dates are always rendered as strings. (#29093) Even in the case that the date was originally supplied as a long in the JSON document. Closes #26504 --- docs/reference/mapping/types/date.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index bdce1d6c46fce..1beb64083b8c8 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -10,6 +10,13 @@ JSON doesn't have a date datatype, so dates in Elasticsearch can either be: Internally, dates are converted to UTC (if the time-zone is specified) and stored as a long number representing milliseconds-since-the-epoch. +Queries on dates are internally converted to range queries on this long +representation, and the result of aggregations and stored fields is converted +back to a string depending on the date format that is associated with the field. + +NOTE: Dates will always be rendered as strings, even if they were initially +supplied as a long in the JSON document. + Date formats can be customised, but if no `format` is specified then it uses the default: From 0755ff425f74ca6edaef6def985ef3bfa53a88b9 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 16 Mar 2018 14:39:36 +0100 Subject: [PATCH 53/89] Clarify requirements of strict date formats. (#29090) Closes #29014 --- docs/reference/mapping/params/format.asciidoc | 18 ++++++++++++------ .../time/format/StrictISODateTimeFormat.java | 2 +- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/docs/reference/mapping/params/format.asciidoc b/docs/reference/mapping/params/format.asciidoc index 85cad16cb5390..7d621f875224e 100644 --- a/docs/reference/mapping/params/format.asciidoc +++ b/docs/reference/mapping/params/format.asciidoc @@ -44,12 +44,18 @@ http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html[i [[built-in-date-formats]] ==== Built In Formats -Most of the below dates have a `strict` companion dates, which means, that -year, month and day parts of the week must have prepending zeros in order -to be valid. This means, that a date like `5/11/1` would not be valid, but -you would need to specify the full date, which would be `2005/11/01` in this -example. So instead of `date_optional_time` you would need to specify -`strict_date_optional_time`. +Most of the below formats have a `strict` companion format, which means that +year, month and day parts of the week must use respectively 4, 2 and 2 digits +exactly, potentially prepending zeros. For instance a date like `5/11/1` would +be considered invalid and would need to be rewritten to `2005/11/01` to be +accepted by the date parser. + +To use them, you need to prepend `strict_` to the name of the date format, for +instance `strict_date_optional_time` instead of `date_optional_time`. + +These strict date formats are especially useful when +<> in order to make sure to +not accidentally map irrelevant strings as dates. The following tables lists all the defaults ISO formats supported: diff --git a/server/src/main/java/org/joda/time/format/StrictISODateTimeFormat.java b/server/src/main/java/org/joda/time/format/StrictISODateTimeFormat.java index 4533b78b84add..aacb7cf330687 100644 --- a/server/src/main/java/org/joda/time/format/StrictISODateTimeFormat.java +++ b/server/src/main/java/org/joda/time/format/StrictISODateTimeFormat.java @@ -27,7 +27,7 @@ * class is named ISODatetimeFormat * * However there has been done one huge modification in several methods, which forces the date - * year to be at least n digits, so that a year like "5" is invalid and must be "0005" + * year to be exactly n digits, so that a year like "5" is invalid and must be "0005" * * All methods have been marked with an "// ES change" commentary * From 8342ba91084c8c106f2e258c887959d83768dee5 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 16 Mar 2018 10:50:37 -0400 Subject: [PATCH 54/89] Add logging output when starting Wildfly This commit (which will be reverted soon) adds logging on the output of starting Wildfly. This is needed to debug an issue with Wildfly not starting in CI. --- qa/wildfly/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle index b55aa68573c1c..6c6bf5d9b31b9 100644 --- a/qa/wildfly/build.gradle +++ b/qa/wildfly/build.gradle @@ -122,6 +122,7 @@ task startWildfly { String line int httpPort = 0 while ((line = br.readLine()) != null) { + logger.info(line) if (line.matches('.*Undertow HTTP listener default listening on .*:\\d+$')) { assert httpPort == 0 final int index = line.lastIndexOf(":") From 2c1ef3d4c6043b6ddc02f4b35461ac766bf9bc3b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 16 Mar 2018 11:16:30 -0400 Subject: [PATCH 55/89] Do not renew sync-id if all shards are sealed (#29103) Today the synced-flush always issues a new sync-id even though all shards haven't been changed since the last seal. This causes active shards to have different a sync-id from offline shards even though all were sealed and no writes since then. This commit adjusts not to renew sync-id if all active shards are sealed with the same sync-id. Closes #27838 --- .../index/engine/CommitStats.java | 7 ++ .../indices/flush/SyncedFlushService.java | 67 +++++++++++++++---- .../elasticsearch/indices/flush/FlushIT.java | 49 ++++++++++++++ 3 files changed, 110 insertions(+), 13 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/CommitStats.java b/server/src/main/java/org/elasticsearch/index/engine/CommitStats.java index b98c5a0db5729..21025046b8c57 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/CommitStats.java +++ b/server/src/main/java/org/elasticsearch/index/engine/CommitStats.java @@ -76,6 +76,13 @@ public Engine.CommitId getRawCommitId() { return new Engine.CommitId(Base64.getDecoder().decode(id)); } + /** + * The synced-flush id of the commit if existed. + */ + public String syncId() { + return userData.get(InternalEngine.SYNC_COMMIT_ID); + } + /** * Returns the number of documents in the in this commit */ diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 6a3618e668950..dedd577954d70 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -34,6 +34,8 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -65,6 +67,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; @@ -216,9 +219,16 @@ public void onResponse(InFlightOpsResponse response) { if (inflight != 0) { actionListener.onResponse(new ShardsSyncedFlushResult(shardId, totalShards, "[" + inflight + "] ongoing operations on primary")); } else { - // 3. now send the sync request to all the shards - String syncId = UUIDs.randomBase64UUID(); - sendSyncRequests(syncId, activeShards, state, presyncResponses, shardId, totalShards, actionListener); + // 3. now send the sync request to all the shards; + final String sharedSyncId = sharedExistingSyncId(presyncResponses); + if (sharedSyncId != null) { + assert presyncResponses.values().stream().allMatch(r -> r.existingSyncId.equals(sharedSyncId)) : + "Not all shards have the same existing sync id [" + sharedSyncId + "], responses [" + presyncResponses + "]"; + reportSuccessWithExistingSyncId(shardId, sharedSyncId, activeShards, totalShards, presyncResponses, actionListener); + }else { + String syncId = UUIDs.randomBase64UUID(); + sendSyncRequests(syncId, activeShards, state, presyncResponses, shardId, totalShards, actionListener); + } } } @@ -244,6 +254,33 @@ public void onFailure(Exception e) { } } + private String sharedExistingSyncId(Map preSyncedFlushResponses) { + String existingSyncId = null; + for (PreSyncedFlushResponse resp : preSyncedFlushResponses.values()) { + if (Strings.isNullOrEmpty(resp.existingSyncId)) { + return null; + } + if (existingSyncId == null) { + existingSyncId = resp.existingSyncId; + } + if (existingSyncId.equals(resp.existingSyncId) == false) { + return null; + } + } + return existingSyncId; + } + + private void reportSuccessWithExistingSyncId(ShardId shardId, String existingSyncId, List shards, int totalShards, + Map preSyncResponses, ActionListener listener) { + final Map results = new HashMap<>(); + for (final ShardRouting shard : shards) { + if (preSyncResponses.containsKey(shard.currentNodeId())) { + results.put(shard, new ShardSyncedFlushResponse()); + } + } + listener.onResponse(new ShardsSyncedFlushResult(shardId, existingSyncId, totalShards, results)); + } + final IndexShardRoutingTable getShardRoutingTable(ShardId shardId, ClusterState state) { final IndexRoutingTable indexRoutingTable = state.routingTable().index(shardId.getIndexName()); if (indexRoutingTable == null) { @@ -438,7 +475,7 @@ private PreSyncedFlushResponse performPreSyncedFlush(PreShardSyncedFlushRequest final CommitStats commitStats = indexShard.commitStats(); final Engine.CommitId commitId = commitStats.getRawCommitId(); logger.trace("{} pre sync flush done. commit id {}, num docs {}", request.shardId(), commitId, commitStats.getNumDocs()); - return new PreSyncedFlushResponse(commitId, commitStats.getNumDocs()); + return new PreSyncedFlushResponse(commitId, commitStats.getNumDocs(), commitStats.syncId()); } private ShardSyncedFlushResponse performSyncedFlush(ShardSyncedFlushRequest request) { @@ -512,21 +549,15 @@ static final class PreSyncedFlushResponse extends TransportResponse { Engine.CommitId commitId; int numDocs; + @Nullable String existingSyncId = null; PreSyncedFlushResponse() { } - PreSyncedFlushResponse(Engine.CommitId commitId, int numDocs) { + PreSyncedFlushResponse(Engine.CommitId commitId, int numDocs, String existingSyncId) { this.commitId = commitId; this.numDocs = numDocs; - } - - Engine.CommitId commitId() { - return commitId; - } - - int numDocs() { - return numDocs; + this.existingSyncId = existingSyncId; } boolean includeNumDocs(Version version) { @@ -537,6 +568,10 @@ boolean includeNumDocs(Version version) { } } + boolean includeExistingSyncId(Version version) { + return version.onOrAfter(Version.V_7_0_0_alpha1); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -546,6 +581,9 @@ public void readFrom(StreamInput in) throws IOException { } else { numDocs = UNKNOWN_NUM_DOCS; } + if (includeExistingSyncId(in.getVersion())) { + existingSyncId = in.readOptionalString(); + } } @Override @@ -555,6 +593,9 @@ public void writeTo(StreamOutput out) throws IOException { if (includeNumDocs(out.getVersion())) { out.writeInt(numDocs); } + if (includeExistingSyncId(out.getVersion())) { + out.writeOptionalString(existingSyncId); + } } } diff --git a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index c56602f789e5e..934222f9e726a 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.Term; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; @@ -29,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -59,6 +61,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class FlushIT extends ESIntegTestCase { @@ -280,4 +283,50 @@ public void testSyncedFlushSkipOutOfSyncReplicas() throws Exception { assertThat(fullResult.totalShards(), equalTo(numberOfReplicas + 1)); assertThat(fullResult.successfulShards(), equalTo(numberOfReplicas + 1)); } + + public void testDoNotRenewSyncedFlushWhenAllSealed() throws Exception { + internalCluster().ensureAtLeastNumDataNodes(between(2, 3)); + final int numberOfReplicas = internalCluster().numDataNodes() - 1; + assertAcked( + prepareCreate("test").setSettings(Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)).get() + ); + ensureGreen(); + final Index index = clusterService().state().metaData().index("test").getIndex(); + final ShardId shardId = new ShardId(index, 0); + final int numDocs = between(1, 10); + for (int i = 0; i < numDocs; i++) { + index("test", "doc", Integer.toString(i)); + } + final ShardsSyncedFlushResult firstSeal = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), shardId); + assertThat(firstSeal.successfulShards(), equalTo(numberOfReplicas + 1)); + // Do not renew synced-flush + final ShardsSyncedFlushResult secondSeal = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), shardId); + assertThat(secondSeal.successfulShards(), equalTo(numberOfReplicas + 1)); + assertThat(secondSeal.syncId(), equalTo(firstSeal.syncId())); + // Shards were updated, renew synced flush. + final int moreDocs = between(1, 10); + for (int i = 0; i < moreDocs; i++) { + index("test", "doc", Integer.toString(i)); + } + final ShardsSyncedFlushResult thirdSeal = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), shardId); + assertThat(thirdSeal.successfulShards(), equalTo(numberOfReplicas + 1)); + assertThat(thirdSeal.syncId(), not(equalTo(firstSeal.syncId()))); + // Manually remove or change sync-id, renew synced flush. + IndexShard shard = internalCluster().getInstance(IndicesService.class, randomFrom(internalCluster().nodesInclude("test"))) + .getShardOrNull(shardId); + if (randomBoolean()) { + // Change the existing sync-id of a single shard. + shard.syncFlush(UUIDs.randomBase64UUID(random()), shard.commitStats().getRawCommitId()); + assertThat(shard.commitStats().syncId(), not(equalTo(thirdSeal.syncId()))); + } else { + // Flush will create a new commit without sync-id + shard.flush(new FlushRequest(shardId.getIndexName()).force(true).waitIfOngoing(true)); + assertThat(shard.commitStats().syncId(), nullValue()); + } + final ShardsSyncedFlushResult forthSeal = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), shardId); + assertThat(forthSeal.successfulShards(), equalTo(numberOfReplicas + 1)); + assertThat(forthSeal.syncId(), not(equalTo(thirdSeal.syncId()))); + } } From 158bb23887733fd652435167b809d57f00fecc3d Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 16 Mar 2018 15:35:40 +0000 Subject: [PATCH 56/89] Remove usages of obsolete settings (#29087) The settings `indices.recovery.concurrent_streams` and `indices.recovery.concurrent_small_file_streams` were removed in f5e4cd46164630e09f308ed78c512eea8bda8a05. This commit removes their last traces from the codebase. --- .../indices/recovery/RecoverySourceHandlerTests.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 68d789d91c2ab..d1dbaf6bc89fe 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -332,10 +332,8 @@ public void close() throws IOException { } - public void testHandleExceptinoOnSendSendFiles() throws Throwable { - Settings settings = Settings.builder().put("indices.recovery.concurrent_streams", 1). - put("indices.recovery.concurrent_small_file_streams", 1).build(); - final RecoverySettings recoverySettings = new RecoverySettings(settings, service); + public void testHandleExceptionOnSendFiles() throws Throwable { + final RecoverySettings recoverySettings = new RecoverySettings(Settings.EMPTY, service); final StartRecoveryRequest request = getStartRecoveryRequest(); Path tempDir = createTempDir(); Store store = newStore(tempDir, false); From 708c06896b5ef59e1987a917b57820c2a234af12 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 16 Mar 2018 16:50:58 +0100 Subject: [PATCH 57/89] Explain why Elasticsearch doesn't support incremental resharding. (#29082) I have seen this question a couple times already, most recently at https://twitter.com/dimosr7/status/973872744965332993 I tried to keep the explanation as simple as I could, which is not always easy as this is a matter of trade-offs. --- docs/reference/indices/split-index.asciidoc | 34 ++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/docs/reference/indices/split-index.asciidoc b/docs/reference/indices/split-index.asciidoc index 4a7e81e4f4de8..4a6fc16103bba 100644 --- a/docs/reference/indices/split-index.asciidoc +++ b/docs/reference/indices/split-index.asciidoc @@ -31,6 +31,8 @@ index may by split into an arbitrary number of shards greater than 1. The properties of the default number of routing shards will then apply to the newly split index. +[float] +=== How does splitting work? Splitting works as follows: @@ -47,6 +49,36 @@ Splitting works as follows: * Finally, it recovers the target index as though it were a closed index which had just been re-opened. +[float] +=== Why doesn't Elasticsearch support incremental resharding? + +Going from `N` shards to `N+1` shards, aka. incremental resharding, is indeed a +feature that is supported by many key-value stores. Adding a new shard and +pushing new data to this new shard only is not an option: this would likely be +an indexing bottleneck, and figuring out which shard a document belongs to +given its `_id`, which is necessary for get, delete and update requests, would +become quite complex. This means that we need to rebalance existing data using +a different hashing scheme. + +The most common way that key-value stores do this efficiently is by using +consistent hashing. Consistent hashing only requires `1/N`-th of the keys to +be relocated when growing the number of shards from `N` to `N+1`. However +Elasticsearch's unit of storage, shards, are Lucene indices. Because of their +search-oriented data structure, taking a significant portion of a Lucene index, +be it only 5% of documents, deleting them and indexing them on another shard +typically comes with a much higher cost than with a key-value store. This cost +is kept reasonable when growing the number of shards by a multiplicative factor +as described in the above section: this allows Elasticsearch to perform the +split locally, which in-turn allows to perform the split at the index level +rather than reindexing documents that need to move, as well as using hard links +for efficient file copying. + +In the case of append-only data, it is possible to get more flexibility by +creating a new index and pushing new data to it, while adding an alias that +covers both the old and the new index for read operations. Assuming that the +old and new indices have respectively +M+ and +N+ shards, this has no overhead +compared to searching an index that would have +M+N+ shards. + [float] === Preparing an index for splitting @@ -171,4 +203,4 @@ replicas and may decide to relocate the primary shard to another node. Because the split operation creates a new index to split the shards to, the <> setting -on index creation applies to the split index action as well. \ No newline at end of file +on index creation applies to the split index action as well. From 762226bee90006183e5eb2d9f714579a5d96c5c2 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 16 Mar 2018 12:46:39 -0400 Subject: [PATCH 58/89] Docs: Support triple quotes (#28915) Adds support for triple quoted strings to the documentation test generator. Kibana's CONSOLE tool has supported them for a year but we were unable to use them in Elasticsearch's docs because the process that converts example snippets into tests couldn't handle this. This change adds code to convert them into standard JSON so we can pass them to Elasticsearch. --- .../doc/RestTestsFromSnippetsTask.groovy | 40 +++++++++++++++ .../doc/RestTestsFromSnippetsTaskTest.groovy | 50 +++++++++++++++++++ .../painless-getting-started.asciidoc | 47 ++++++++++++++--- docs/reference/ingest/ingest-node.asciidoc | 7 ++- 4 files changed, 135 insertions(+), 9 deletions(-) create mode 100644 buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index 8491c5b45920e..95ec00beca7e0 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -19,6 +19,7 @@ package org.elasticsearch.gradle.doc +import groovy.transform.PackageScope import org.elasticsearch.gradle.doc.SnippetsTask.Snippet import org.gradle.api.InvalidUserDataException import org.gradle.api.tasks.Input @@ -99,6 +100,43 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { return snippet.language == 'js' || snippet.curl } + /** + * Converts Kibana's block quoted strings into standard JSON. These + * {@code """} delimited strings can be embedded in CONSOLE and can + * contain newlines and {@code "} without the normal JSON escaping. + * This has to add it. + */ + @PackageScope + static String replaceBlockQuote(String body) { + int start = body.indexOf('"""'); + if (start < 0) { + return body + } + /* + * 1.3 is a fairly wild guess of the extra space needed to hold + * the escaped string. + */ + StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); + int startOfNormal = 0; + while (start >= 0) { + int end = body.indexOf('"""', start + 3); + if (end < 0) { + throw new InvalidUserDataException( + "Invalid block quote starting at $start in:\n$body") + } + result.append(body.substring(startOfNormal, start)); + result.append('"'); + result.append(body.substring(start + 3, end) + .replace('"', '\\"') + .replace("\n", "\\n")); + result.append('"'); + startOfNormal = end + 3; + start = body.indexOf('"""', startOfNormal); + } + result.append(body.substring(startOfNormal)); + return result.toString(); + } + private class TestBuilder { private static final String SYNTAX = { String method = /(?GET|PUT|POST|HEAD|OPTIONS|DELETE)/ @@ -259,6 +297,8 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { if (body != null) { // Throw out the leading newline we get from parsing the body body = body.substring(1) + // Replace """ quoted strings with valid json ones + body = replaceBlockQuote(body) current.println(" body: |") body.eachLine { current.println(" $it") } } diff --git a/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy b/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy new file mode 100644 index 0000000000000..d0a7a2825e6f2 --- /dev/null +++ b/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.doc + +import org.elasticsearch.gradle.doc.SnippetsTask.Snippet +import org.gradle.api.InvalidUserDataException + +import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote + +class RestTestFromSnippetsTaskTest extends GroovyTestCase { + void testInvalidBlockQuote() { + String input = "\"foo\": \"\"\"bar\""; + String message = shouldFail({ replaceBlockQuote(input) }); + assertEquals("Invalid block quote starting at 7 in:\n$input", message); + } + + void testSimpleBlockQuote() { + assertEquals("\"foo\": \"bort baz\"", + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"")); + } + + void testMultipleBlockQuotes() { + assertEquals("\"foo\": \"bort baz\", \"bar\": \"other\"", + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"")); + } + + void testEscapingInBlockQuote() { + assertEquals("\"foo\": \"bort\\\" baz\"", + replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"")); + assertEquals("\"foo\": \"bort\\n baz\"", + replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")); + } +} diff --git a/docs/painless/painless-getting-started.asciidoc b/docs/painless/painless-getting-started.asciidoc index 7898631416b6b..e82e14b043840 100644 --- a/docs/painless/painless-getting-started.asciidoc +++ b/docs/painless/painless-getting-started.asciidoc @@ -53,7 +53,13 @@ GET hockey/_search "script_score": { "script": { "lang": "painless", - "source": "int total = 0; for (int i = 0; i < doc['goals'].length; ++i) { total += doc['goals'][i]; } return total;" + "source": """ + int total = 0; + for (int i = 0; i < doc['goals'].length; ++i) { + total += doc['goals'][i]; + } + return total; + """ } } } @@ -75,7 +81,13 @@ GET hockey/_search "total_goals": { "script": { "lang": "painless", - "source": "int total = 0; for (int i = 0; i < doc['goals'].length; ++i) { total += doc['goals'][i]; } return total;" + "source": """ + int total = 0; + for (int i = 0; i < doc['goals'].length; ++i) { + total += doc['goals'][i]; + } + return total; + """ } } } @@ -157,7 +169,10 @@ POST hockey/player/1/_update { "script": { "lang": "painless", - "source": "ctx._source.last = params.last; ctx._source.nick = params.nick", + "source": """ + ctx._source.last = params.last; + ctx._source.nick = params.nick + """, "params": { "last": "gaudreau", "nick": "hockey" @@ -228,7 +243,13 @@ POST hockey/player/_update_by_query { "script": { "lang": "painless", - "source": "if (ctx._source.last =~ /b/) {ctx._source.last += \"matched\"} else {ctx.op = 'noop'}" + "source": """ + if (ctx._source.last =~ /b/) { + ctx._source.last += "matched"; + } else { + ctx.op = "noop"; + } + """ } } ---------------------------------------------------------------- @@ -243,7 +264,13 @@ POST hockey/player/_update_by_query { "script": { "lang": "painless", - "source": "if (ctx._source.last ==~ /[^aeiou].*[aeiou]/) {ctx._source.last += \"matched\"} else {ctx.op = 'noop'}" + "source": """ + if (ctx._source.last ==~ /[^aeiou].*[aeiou]/) { + ctx._source.last += "matched"; + } else { + ctx.op = "noop"; + } + """ } } ---------------------------------------------------------------- @@ -296,7 +323,10 @@ POST hockey/player/_update_by_query { "script": { "lang": "painless", - "source": "ctx._source.last = ctx._source.last.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))" + "source": """ + ctx._source.last = ctx._source.last.replaceAll(/[aeiou]/, m -> + m.group().toUpperCase(Locale.ROOT)) + """ } } ---------------------------------------------------------------- @@ -311,7 +341,10 @@ POST hockey/player/_update_by_query { "script": { "lang": "painless", - "source": "ctx._source.last = ctx._source.last.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))" + "source": """ + ctx._source.last = ctx._source.last.replaceFirst(/[aeiou]/, m -> + m.group().toUpperCase(Locale.ROOT)) + """ } } ---------------------------------------------------------------- diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index c53fe122c50d3..b31fc1ef5ea1d 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -563,7 +563,7 @@ to set the index that the document will be indexed into: -------------------------------------------------- // NOTCONSOLE -Dynamic field names are also supported. This example sets the field named after the +Dynamic field names are also supported. This example sets the field named after the value of `service` to the value of the field `code`: [source,js] @@ -1829,7 +1829,10 @@ PUT _ingest/pipeline/my_index "processors": [ { "script": { - "source": " ctx._index = 'my_index'; ctx._type = '_doc' " + "source": """ + ctx._index = 'my_index'; + ctx._type = '_doc'; + """ } } ] From 9b41917266cb25496f8e25835e90f208e9c6d3bf Mon Sep 17 00:00:00 2001 From: Deb Adair Date: Fri, 16 Mar 2018 10:21:14 -0700 Subject: [PATCH 59/89] [DOCS] Removed prerelease footnote from upgrade table. --- docs/reference/upgrade.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/upgrade.asciidoc b/docs/reference/upgrade.asciidoc index 4b03364657b21..2ca2a01249629 100644 --- a/docs/reference/upgrade.asciidoc +++ b/docs/reference/upgrade.asciidoc @@ -44,7 +44,7 @@ required. |5.6 |6.x |<> footnoteref:[reindexfn, You must delete or reindex any indices created in 2.x before upgrading.] |5.0-5.5 |6.x |<> footnoteref:[reindexfn] |<5.x |6.x |<> -|6.x |6.y |<> (where `y > x`) footnote:[Upgrading from a 6.0.0 pre GA version requires a full cluster restart.] +|6.x |6.y |<> (where `y > x`) |======================================================================= [IMPORTANT] From 6bf742dd1b5635cb26c1dc7abb3254df3ea18f2d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 16 Mar 2018 14:34:36 -0400 Subject: [PATCH 60/89] Fix EsAbortPolicy to conform to API (#29075) The rejected execution handler API says that rejectedExecution(Runnable, ThreadPoolExecutor) throws a RejectedExecutionException if the task must be rejected due to capacity on the executor. We do throw something that smells like a RejectedExecutionException (it is named EsRejectedExecutionException) yet we violate the API because EsRejectedExecutionException is not a RejectedExecutionException. This has caused problems before where we try to catch RejectedExecution when invoking rejectedExecution but this causes EsRejectedExecutionException to go uncaught. This commit addresses this by modifying EsRejectedExecutionException to extend RejectedExecutionException. --- .../reindex/AsyncBulkByScrollActionTests.java | 4 ++- .../elasticsearch/ElasticsearchException.java | 3 +-- .../org/elasticsearch/ExceptionsHelper.java | 3 +++ .../common/io/stream/StreamInput.java | 11 ++++++++ .../common/io/stream/StreamOutput.java | 24 ++++++++++++++++-- .../EsRejectedExecutionException.java | 25 ++----------------- .../ElasticsearchExceptionTests.java | 2 ++ .../ExceptionSerializationTests.java | 2 +- .../elasticsearch/ExceptionsHelperTests.java | 7 ++++++ 9 files changed, 52 insertions(+), 29 deletions(-) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index db259de411165..17345f5c85b65 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -115,6 +115,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -330,7 +331,8 @@ public ScheduledFuture schedule(TimeValue delay, String name, Runnable comman ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 0, emptyList(), null); simulateScrollResponse(new DummyAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 10, response); ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get()); - assertThat(e.getMessage(), equalTo("EsRejectedExecutionException[test]")); + assertThat(e.getCause(), instanceOf(EsRejectedExecutionException.class)); + assertThat(e.getCause(), hasToString(containsString("test"))); assertThat(client.scrollsCleared, contains(scrollId)); // When the task is rejected we don't increment the throttled timer diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index ed20f52754dd4..bfa37808402c4 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -827,8 +827,7 @@ private enum ElasticsearchExceptionHandle { org.elasticsearch.indices.IndexTemplateMissingException::new, 57, UNKNOWN_VERSION_ADDED), SEND_REQUEST_TRANSPORT_EXCEPTION(org.elasticsearch.transport.SendRequestTransportException.class, org.elasticsearch.transport.SendRequestTransportException::new, 58, UNKNOWN_VERSION_ADDED), - ES_REJECTED_EXECUTION_EXCEPTION(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class, - org.elasticsearch.common.util.concurrent.EsRejectedExecutionException::new, 59, UNKNOWN_VERSION_ADDED), + // 59 used to be EsRejectedExecutionException // 60 used to be for EarlyTerminationException // 61 used to be for RoutingValidationException NOT_SERIALIZABLE_EXCEPTION_WRAPPER(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class, diff --git a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java index 05ac4d942b35e..0427685b8ef4f 100644 --- a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java +++ b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; import org.elasticsearch.rest.RestStatus; @@ -67,6 +68,8 @@ public static RestStatus status(Throwable t) { return ((ElasticsearchException) t).status(); } else if (t instanceof IllegalArgumentException) { return RestStatus.BAD_REQUEST; + } else if (t instanceof EsRejectedExecutionException) { + return RestStatus.TOO_MANY_REQUESTS; } } return RestStatus.INTERNAL_SERVER_ERROR; diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index a5f01f74ed70c..886a61b29c1ae 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -747,6 +748,13 @@ public T readException() throws IOException { switch (key) { case 0: final int ord = readVInt(); + // TODO: remove the if branch when master is bumped to 8.0.0 + assert Version.CURRENT.major < 8; + if (ord == 59) { + final ElasticsearchException ex = new ElasticsearchException(this); + final boolean isExecutorShutdown = readBoolean(); + return (T) new EsRejectedExecutionException(ex.getMessage(), isExecutorShutdown); + } return (T) ElasticsearchException.readException(this, ord); case 1: String msg1 = readOptionalString(); @@ -831,6 +839,9 @@ public T readException() throws IOException { return (T) readStackTrace(new InterruptedException(readOptionalString()), this); case 17: return (T) readStackTrace(new IOException(readOptionalString(), readException()), this); + case 18: + final boolean isExecutorShutdown = readBoolean(); + return (T) readStackTrace(new EsRejectedExecutionException(readOptionalString(), isExecutorShutdown), this); default: throw new IOException("no such exception for id: " + key); } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 98a126e75e5c3..f52869c5e8054 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.Writeable.Writer; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.joda.time.DateTimeZone; import org.joda.time.ReadableInstant; @@ -852,8 +853,28 @@ public void writeException(Throwable throwable) throws IOException { writeCause = false; } else if (throwable instanceof IOException) { writeVInt(17); + } else if (throwable instanceof EsRejectedExecutionException) { + // TODO: remove the if branch when master is bumped to 8.0.0 + assert Version.CURRENT.major < 8; + if (version.before(Version.V_7_0_0_alpha1)) { + /* + * This is a backwards compatibility layer when speaking to nodes that still treated EsRejectedExceutionException as an + * instance of ElasticsearchException. As such, we serialize this in a way that the receiving node would read this as an + * EsRejectedExecutionException. + */ + final ElasticsearchException ex = new ElasticsearchException(throwable.getMessage()); + writeVInt(0); + writeVInt(59); + ex.writeTo(this); + writeBoolean(((EsRejectedExecutionException) throwable).isExecutorShutdown()); + return; + } else { + writeVInt(18); + writeBoolean(((EsRejectedExecutionException) throwable).isExecutorShutdown()); + writeCause = false; + } } else { - ElasticsearchException ex; + final ElasticsearchException ex; if (throwable instanceof ElasticsearchException && ElasticsearchException.isRegistered(throwable.getClass(), version)) { ex = (ElasticsearchException) throwable; } else { @@ -863,7 +884,6 @@ public void writeException(Throwable throwable) throws IOException { writeVInt(ElasticsearchException.getId(ex.getClass())); ex.writeTo(this); return; - } if (writeMessage) { writeOptionalString(throwable.getMessage()); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionException.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionException.java index a38bbf452b7ed..7174058ab7821 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionException.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionException.java @@ -19,14 +19,9 @@ package org.elasticsearch.common.util.concurrent; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.rest.RestStatus; +import java.util.concurrent.RejectedExecutionException; -import java.io.IOException; - -public class EsRejectedExecutionException extends ElasticsearchException { +public class EsRejectedExecutionException extends RejectedExecutionException { private final boolean isExecutorShutdown; @@ -43,22 +38,6 @@ public EsRejectedExecutionException() { this(null, false); } - @Override - public RestStatus status() { - return RestStatus.TOO_MANY_REQUESTS; - } - - public EsRejectedExecutionException(StreamInput in) throws IOException{ - super(in); - isExecutorShutdown = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(isExecutorShutdown); - } - /** * Checks if the thread pool that rejected the execution was terminated * shortly after the rejection. Its possible that this returns false and the diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 9bce92cb4811a..4c095efbbf8aa 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -76,6 +77,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.hasItems; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.startsWith; diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index b794ded7f8d03..f9d90ffd7fb59 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -728,7 +728,7 @@ public void testIds() { ids.put(56, org.elasticsearch.common.settings.SettingsException.class); ids.put(57, org.elasticsearch.indices.IndexTemplateMissingException.class); ids.put(58, org.elasticsearch.transport.SendRequestTransportException.class); - ids.put(59, org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class); + ids.put(59, null); // weas EsRejectedExecutionException, which is no longer an instance of ElasticsearchException ids.put(60, null); // EarlyTerminationException was removed in 6.0 ids.put(61, null); // RoutingValidationException was removed in 5.0 ids.put(62, org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class); diff --git a/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java b/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java index 011f5b380ecbd..5a36b3b5e8583 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java @@ -20,6 +20,8 @@ package org.elasticsearch; import org.apache.commons.codec.DecoderException; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import java.util.Optional; @@ -84,4 +86,9 @@ private void assertError(final Throwable cause, final Error error) { assertThat(maybeError.get(), equalTo(error)); } + public void testStatus() { + assertThat(ExceptionsHelper.status(new IllegalArgumentException("illegal")), equalTo(RestStatus.BAD_REQUEST)); + assertThat(ExceptionsHelper.status(new EsRejectedExecutionException("rejected")), equalTo(RestStatus.TOO_MANY_REQUESTS)); + } + } From 1f1a4d17b4b06fcf3548e5d49e5262725ea1307d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 16 Mar 2018 14:40:17 -0400 Subject: [PATCH 61/89] Remove BWC layer for rejected execution exception The serialization changes for rejected execution exceptions has been backported to 6.x with the intention to appear in all versions since 6.3.0. Therefore, this BWC layer is no longer needed in master since master would never speak to a node that does not speak the same serialization. --- .../common/io/stream/StreamInput.java | 7 ------ .../common/io/stream/StreamOutput.java | 22 +++---------------- 2 files changed, 3 insertions(+), 26 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 886a61b29c1ae..6706006a0a008 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -748,13 +748,6 @@ public T readException() throws IOException { switch (key) { case 0: final int ord = readVInt(); - // TODO: remove the if branch when master is bumped to 8.0.0 - assert Version.CURRENT.major < 8; - if (ord == 59) { - final ElasticsearchException ex = new ElasticsearchException(this); - final boolean isExecutorShutdown = readBoolean(); - return (T) new EsRejectedExecutionException(ex.getMessage(), isExecutorShutdown); - } return (T) ElasticsearchException.readException(this, ord); case 1: String msg1 = readOptionalString(); diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index f52869c5e8054..5f27f74956f2d 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -854,25 +854,9 @@ public void writeException(Throwable throwable) throws IOException { } else if (throwable instanceof IOException) { writeVInt(17); } else if (throwable instanceof EsRejectedExecutionException) { - // TODO: remove the if branch when master is bumped to 8.0.0 - assert Version.CURRENT.major < 8; - if (version.before(Version.V_7_0_0_alpha1)) { - /* - * This is a backwards compatibility layer when speaking to nodes that still treated EsRejectedExceutionException as an - * instance of ElasticsearchException. As such, we serialize this in a way that the receiving node would read this as an - * EsRejectedExecutionException. - */ - final ElasticsearchException ex = new ElasticsearchException(throwable.getMessage()); - writeVInt(0); - writeVInt(59); - ex.writeTo(this); - writeBoolean(((EsRejectedExecutionException) throwable).isExecutorShutdown()); - return; - } else { - writeVInt(18); - writeBoolean(((EsRejectedExecutionException) throwable).isExecutorShutdown()); - writeCause = false; - } + writeVInt(18); + writeBoolean(((EsRejectedExecutionException) throwable).isExecutorShutdown()); + writeCause = false; } else { final ElasticsearchException ex; if (throwable instanceof ElasticsearchException && ElasticsearchException.isRegistered(throwable.getClass(), version)) { From 4d62640bf116af7e825d89c7319a39c3f2f325b4 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 16 Mar 2018 15:52:39 -0400 Subject: [PATCH 62/89] Fix typo in ExceptionSerializationTests This commit fixes a little typo in ExceptionSerializationTests.java replacing "weas" by "was". --- .../java/org/elasticsearch/ExceptionSerializationTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index f9d90ffd7fb59..0b99b311add8a 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -728,7 +728,7 @@ public void testIds() { ids.put(56, org.elasticsearch.common.settings.SettingsException.class); ids.put(57, org.elasticsearch.indices.IndexTemplateMissingException.class); ids.put(58, org.elasticsearch.transport.SendRequestTransportException.class); - ids.put(59, null); // weas EsRejectedExecutionException, which is no longer an instance of ElasticsearchException + ids.put(59, null); // was EsRejectedExecutionException, which is no longer an instance of ElasticsearchException ids.put(60, null); // EarlyTerminationException was removed in 6.0 ids.put(61, null); // RoutingValidationException was removed in 5.0 ids.put(62, org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class); From 47211c00e978f5f19d143e22998e88e48b47a622 Mon Sep 17 00:00:00 2001 From: olcbean <26058559+olcbean@users.noreply.github.com> Date: Fri, 16 Mar 2018 21:50:34 +0100 Subject: [PATCH 63/89] REST: Clear Indices Cache API simplify param parsing (#29111) Simplify the parsing of the params in Clear Indices Cache API, as a follow up to the removing of the deprecated parameter names. --- .../RestClusterUpdateSettingsAction.java | 1 - .../indices/RestClearIndicesCacheAction.java | 28 ++++--------------- 2 files changed, 5 insertions(+), 24 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java index 2901cdd2d9ba8..4eb5bbe2a8443 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java index d9b493ba1f50d..f72ee8f2cb28b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java @@ -23,10 +23,8 @@ import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -36,7 +34,6 @@ import org.elasticsearch.rest.action.RestBuilderListener; import java.io.IOException; -import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -44,6 +41,7 @@ import static org.elasticsearch.rest.action.RestActions.buildBroadcastShardsHeader; public class RestClearIndicesCacheAction extends BaseRestHandler { + public RestClearIndicesCacheAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(POST, "/_cache/clear", this); @@ -82,27 +80,11 @@ public boolean canTripCircuitBreaker() { } public static ClearIndicesCacheRequest fromRequest(final RestRequest request, ClearIndicesCacheRequest clearIndicesCacheRequest) { - - for (Map.Entry entry : request.params().entrySet()) { - if (Fields.QUERY.match(entry.getKey(), LoggingDeprecationHandler.INSTANCE)) { - clearIndicesCacheRequest.queryCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.queryCache())); - } else if (Fields.REQUEST.match(entry.getKey(), LoggingDeprecationHandler.INSTANCE)) { - clearIndicesCacheRequest.requestCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.requestCache())); - } else if (Fields.FIELDDATA.match(entry.getKey(), LoggingDeprecationHandler.INSTANCE)) { - clearIndicesCacheRequest.fieldDataCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.fieldDataCache())); - } else if (Fields.FIELDS.match(entry.getKey(), LoggingDeprecationHandler.INSTANCE)) { - clearIndicesCacheRequest.fields(request.paramAsStringArray(entry.getKey(), clearIndicesCacheRequest.fields())); - } - } - + clearIndicesCacheRequest.queryCache(request.paramAsBoolean("query", clearIndicesCacheRequest.queryCache())); + clearIndicesCacheRequest.requestCache(request.paramAsBoolean("request", clearIndicesCacheRequest.requestCache())); + clearIndicesCacheRequest.fieldDataCache(request.paramAsBoolean("fielddata", clearIndicesCacheRequest.fieldDataCache())); + clearIndicesCacheRequest.fields(request.paramAsStringArray("fields", clearIndicesCacheRequest.fields())); return clearIndicesCacheRequest; } - public static class Fields { - public static final ParseField QUERY = new ParseField("query"); - public static final ParseField REQUEST = new ParseField("request"); - public static final ParseField FIELDDATA = new ParseField("fielddata"); - public static final ParseField FIELDS = new ParseField("fields"); - } - } From 60cb476cc90901956914e60c089af884d421a258 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 16 Mar 2018 16:55:37 -0400 Subject: [PATCH 64/89] Client: Wrap synchronous exceptions (#28919) In the past the Low Level REST Client was super careful not to wrap any exceptions that it throws from synchronous calls so that callers can catch the exceptions and work with them. The trouble with that is that the exceptions are originally thrown on the async thread pool and then transfered back into calling thread. That means that the stack trace of the exception doesn't have the calling method which is *super* *ultra* confusing. This change always wraps exceptions transferred from the async thread pool so that the stack trace of the thrown exception contains the caller's stack. It tries to preserve the type of the throw exception but this is quite a fiddly thing to get right. We have to catch every type of exception that we want to preserve, wrap with the same type and rethrow. I've preserved the types of all exceptions that we had tests mentioning but no other exceptions. The other exceptions are either wrapped in `IOException` or `RuntimeException`. Closes #28399 --- .../client/ResponseException.java | 10 ++ .../org/elasticsearch/client/RestClient.java | 116 ++++++++++----- .../client/RestClientMultipleHostsTests.java | 80 +++++++--- .../client/RestClientSingleHostTests.java | 28 +++- .../elasticsearch/client/RestClientTests.java | 90 ++++++++---- .../client/SyncResponseListenerTests.java | 137 ++++++++++++++---- 6 files changed, 345 insertions(+), 116 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java index 072e45ffb0e97..5e646d975c89c 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java +++ b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java @@ -39,6 +39,16 @@ public ResponseException(Response response) throws IOException { this.response = response; } + /** + * Wrap a {@linkplain ResponseException} with another one with the current + * stack trace. This is used during synchronous calls so that the caller + * ends up in the stack trace of the exception thrown. + */ + ResponseException(ResponseException e) throws IOException { + super(e.getMessage(), e); + this.response = e.getResponse(); + } + private static String buildMessage(Response response) throws IOException { String message = String.format(Locale.ROOT, "method [%s], host [%s], URI [%s], status line [%s]", diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index e221ed081a597..29e23f948bddb 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -20,6 +20,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.http.ConnectionClosedException; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; @@ -38,6 +39,7 @@ import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.client.utils.URIBuilder; import org.apache.http.concurrent.FutureCallback; +import org.apache.http.conn.ConnectTimeoutException; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.impl.client.BasicAuthCache; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; @@ -47,6 +49,7 @@ import java.io.Closeable; import java.io.IOException; +import java.net.SocketTimeoutException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -201,6 +204,14 @@ public Response performRequest(String method, String endpoint, Map requestParams = new HashMap<>(params); - //ignore is a special parameter supported by the clients, shouldn't be sent to es - String ignoreString = requestParams.remove("ignore"); - Set ignoreErrorCodes; - if (ignoreString == null) { - if (HttpHead.METHOD_NAME.equals(method)) { - //404 never causes error if returned for a HEAD request - ignoreErrorCodes = Collections.singleton(404); - } else { - ignoreErrorCodes = Collections.emptySet(); - } + performRequestAsyncNoCatch(method, endpoint, params, entity, httpAsyncResponseConsumerFactory, + responseListener, headers); + } catch (Exception e) { + responseListener.onFailure(e); + } + } + + void performRequestAsyncNoCatch(String method, String endpoint, Map params, + HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory, + ResponseListener responseListener, Header... headers) { + Objects.requireNonNull(params, "params must not be null"); + Map requestParams = new HashMap<>(params); + //ignore is a special parameter supported by the clients, shouldn't be sent to es + String ignoreString = requestParams.remove("ignore"); + Set ignoreErrorCodes; + if (ignoreString == null) { + if (HttpHead.METHOD_NAME.equals(method)) { + //404 never causes error if returned for a HEAD request + ignoreErrorCodes = Collections.singleton(404); } else { - String[] ignoresArray = ignoreString.split(","); - ignoreErrorCodes = new HashSet<>(); - if (HttpHead.METHOD_NAME.equals(method)) { - //404 never causes error if returned for a HEAD request - ignoreErrorCodes.add(404); - } - for (String ignoreCode : ignoresArray) { - try { - ignoreErrorCodes.add(Integer.valueOf(ignoreCode)); - } catch (NumberFormatException e) { - throw new IllegalArgumentException("ignore value should be a number, found [" + ignoreString + "] instead", e); - } + ignoreErrorCodes = Collections.emptySet(); + } + } else { + String[] ignoresArray = ignoreString.split(","); + ignoreErrorCodes = new HashSet<>(); + if (HttpHead.METHOD_NAME.equals(method)) { + //404 never causes error if returned for a HEAD request + ignoreErrorCodes.add(404); + } + for (String ignoreCode : ignoresArray) { + try { + ignoreErrorCodes.add(Integer.valueOf(ignoreCode)); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("ignore value should be a number, found [" + ignoreString + "] instead", e); } } - URI uri = buildUri(pathPrefix, endpoint, requestParams); - HttpRequestBase request = createHttpRequest(method, uri, entity); - setHeaders(request, headers); - FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(responseListener); - long startTime = System.nanoTime(); - performRequestAsync(startTime, nextHost(), request, ignoreErrorCodes, httpAsyncResponseConsumerFactory, - failureTrackingResponseListener); - } catch (Exception e) { - responseListener.onFailure(e); } + URI uri = buildUri(pathPrefix, endpoint, requestParams); + HttpRequestBase request = createHttpRequest(method, uri, entity); + setHeaders(request, headers); + FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(responseListener); + long startTime = System.nanoTime(); + performRequestAsync(startTime, nextHost(), request, ignoreErrorCodes, httpAsyncResponseConsumerFactory, + failureTrackingResponseListener); } private void performRequestAsync(final long startTime, final HostTuple> hostTuple, final HttpRequestBase request, @@ -674,12 +693,35 @@ Response get() throws IOException { e.addSuppressed(exception); throw e; } - //try and leave the exception untouched as much as possible but we don't want to just add throws Exception clause everywhere + /* + * Wrap and rethrow whatever exception we received, copying the type + * where possible so the synchronous API looks as much as possible + * like the asynchronous API. We wrap the exception so that the caller's + * signature shows up in any exception we throw. + */ + if (exception instanceof ResponseException) { + throw new ResponseException((ResponseException) exception); + } + if (exception instanceof ConnectTimeoutException) { + ConnectTimeoutException e = new ConnectTimeoutException(exception.getMessage()); + e.initCause(exception); + throw e; + } + if (exception instanceof SocketTimeoutException) { + SocketTimeoutException e = new SocketTimeoutException(exception.getMessage()); + e.initCause(exception); + throw e; + } + if (exception instanceof ConnectionClosedException) { + ConnectionClosedException e = new ConnectionClosedException(exception.getMessage()); + e.initCause(exception); + throw e; + } if (exception instanceof IOException) { - throw (IOException) exception; + throw new IOException(exception.getMessage(), exception); } if (exception instanceof RuntimeException){ - throw (RuntimeException) exception; + throw new RuntimeException(exception.getMessage(), exception); } throw new RuntimeException("error while performing request", exception); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index 6f87a244ff59f..a3a834ff3204b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -35,6 +35,7 @@ import org.apache.http.message.BasicStatusLine; import org.apache.http.nio.protocol.HttpAsyncRequestProducer; import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; +import org.junit.After; import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -44,6 +45,8 @@ import java.util.Collections; import java.util.HashSet; import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import java.util.concurrent.Future; import static org.elasticsearch.client.RestClientTestUtil.randomErrorNoRetryStatusCode; @@ -66,6 +69,7 @@ */ public class RestClientMultipleHostsTests extends RestClientTestCase { + private ExecutorService exec = Executors.newFixedThreadPool(1); private RestClient restClient; private HttpHost[] httpHosts; private HostsTrackingFailureListener failureListener; @@ -79,23 +83,28 @@ public void createRestClient() throws IOException { @Override public Future answer(InvocationOnMock invocationOnMock) throws Throwable { HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; - HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest(); - HttpHost httpHost = requestProducer.getTarget(); + final HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest(); + final HttpHost httpHost = requestProducer.getTarget(); HttpClientContext context = (HttpClientContext) invocationOnMock.getArguments()[2]; assertThat(context.getAuthCache().get(httpHost), instanceOf(BasicScheme.class)); - FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[3]; + final FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[3]; //return the desired status code or exception depending on the path - if (request.getURI().getPath().equals("/soe")) { - futureCallback.failed(new SocketTimeoutException(httpHost.toString())); - } else if (request.getURI().getPath().equals("/coe")) { - futureCallback.failed(new ConnectTimeoutException(httpHost.toString())); - } else if (request.getURI().getPath().equals("/ioe")) { - futureCallback.failed(new IOException(httpHost.toString())); - } else { - int statusCode = Integer.parseInt(request.getURI().getPath().substring(1)); - StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, ""); - futureCallback.completed(new BasicHttpResponse(statusLine)); - } + exec.execute(new Runnable() { + @Override + public void run() { + if (request.getURI().getPath().equals("/soe")) { + futureCallback.failed(new SocketTimeoutException(httpHost.toString())); + } else if (request.getURI().getPath().equals("/coe")) { + futureCallback.failed(new ConnectTimeoutException(httpHost.toString())); + } else if (request.getURI().getPath().equals("/ioe")) { + futureCallback.failed(new IOException(httpHost.toString())); + } else { + int statusCode = Integer.parseInt(request.getURI().getPath().substring(1)); + StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, ""); + futureCallback.completed(new BasicHttpResponse(statusLine)); + } + } + }); return null; } }); @@ -108,6 +117,14 @@ public Future answer(InvocationOnMock invocationOnMock) throws Thr restClient = new RestClient(httpClient, 10000, new Header[0], httpHosts, null, failureListener); } + /** + * Shutdown the executor so we don't leak threads into other test runs. + */ + @After + public void shutdownExec() { + exec.shutdown(); + } + public void testRoundRobinOkStatusCodes() throws IOException { int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { @@ -142,7 +159,7 @@ public void testRoundRobinNoRetryErrors() throws IOException { } else { fail("request should have failed"); } - } catch(ResponseException e) { + } catch (ResponseException e) { if (method.equals("HEAD") && statusCode == 404) { throw e; } @@ -162,7 +179,12 @@ public void testRoundRobinRetryErrors() throws IOException { try { restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); fail("request should have failed"); - } catch(ResponseException e) { + } catch (ResponseException e) { + /* + * Unwrap the top level failure that was added so the stack trace contains + * the caller. It wraps the exception that contains the failed hosts. + */ + e = (ResponseException) e.getCause(); Set hostsSet = new HashSet<>(); Collections.addAll(hostsSet, httpHosts); //first request causes all the hosts to be blacklisted, the returned exception holds one suppressed exception each @@ -182,7 +204,12 @@ public void testRoundRobinRetryErrors() throws IOException { } } while(e != null); assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size()); - } catch(IOException e) { + } catch (IOException e) { + /* + * Unwrap the top level failure that was added so the stack trace contains + * the caller. It wraps the exception that contains the failed hosts. + */ + e = (IOException) e.getCause(); Set hostsSet = new HashSet<>(); Collections.addAll(hostsSet, httpHosts); //first request causes all the hosts to be blacklisted, the returned exception holds one suppressed exception each @@ -212,7 +239,7 @@ public void testRoundRobinRetryErrors() throws IOException { try { restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); fail("request should have failed"); - } catch(ResponseException e) { + } catch (ResponseException e) { Response response = e.getResponse(); assertThat(response.getStatusLine().getStatusCode(), equalTo(Integer.parseInt(retryEndpoint.substring(1)))); assertTrue("host [" + response.getHost() + "] not found, most likely used multiple times", @@ -220,7 +247,12 @@ public void testRoundRobinRetryErrors() throws IOException { //after the first request, all hosts are blacklisted, a single one gets resurrected each time failureListener.assertCalled(response.getHost()); assertEquals(0, e.getSuppressed().length); - } catch(IOException e) { + } catch (IOException e) { + /* + * Unwrap the top level failure that was added so the stack trace contains + * the caller. It wraps the exception that contains the failed hosts. + */ + e = (IOException) e.getCause(); HttpHost httpHost = HttpHost.create(e.getMessage()); assertTrue("host [" + httpHost + "] not found, most likely used multiple times", hostsSet.remove(httpHost)); //after the first request, all hosts are blacklisted, a single one gets resurrected each time @@ -238,8 +270,7 @@ public void testRoundRobinRetryErrors() throws IOException { Response response; try { response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); - } - catch(ResponseException e) { + } catch (ResponseException e) { response = e.getResponse(); } assertThat(response.getStatusLine().getStatusCode(), equalTo(statusCode)); @@ -257,12 +288,17 @@ public void testRoundRobinRetryErrors() throws IOException { try { restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); fail("request should have failed"); - } catch(ResponseException e) { + } catch (ResponseException e) { Response response = e.getResponse(); assertThat(response.getStatusLine().getStatusCode(), equalTo(Integer.parseInt(retryEndpoint.substring(1)))); assertThat(response.getHost(), equalTo(selectedHost)); failureListener.assertCalled(selectedHost); } catch(IOException e) { + /* + * Unwrap the top level failure that was added so the stack trace contains + * the caller. It wraps the exception that contains the failed hosts. + */ + e = (IOException) e.getCause(); HttpHost httpHost = HttpHost.create(e.getMessage()); assertThat(httpHost, equalTo(selectedHost)); failureListener.assertCalled(selectedHost); diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 541193c733d56..caf9ce6be2e07 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -47,6 +47,7 @@ import org.apache.http.nio.protocol.HttpAsyncRequestProducer; import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.apache.http.util.EntityUtils; +import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock; @@ -61,6 +62,8 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import java.util.concurrent.Future; import static org.elasticsearch.client.RestClientTestUtil.getAllErrorStatusCodes; @@ -68,6 +71,7 @@ import static org.elasticsearch.client.RestClientTestUtil.getOkStatusCodes; import static org.elasticsearch.client.RestClientTestUtil.randomHttpMethod; import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode; +import static org.elasticsearch.client.SyncResponseListenerTests.assertExceptionStackContainsCallingMethod; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertArrayEquals; @@ -88,6 +92,7 @@ */ public class RestClientSingleHostTests extends RestClientTestCase { + private ExecutorService exec = Executors.newFixedThreadPool(1); private RestClient restClient; private Header[] defaultHeaders; private HttpHost httpHost; @@ -105,7 +110,8 @@ public Future answer(InvocationOnMock invocationOnMock) throws Thr HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; HttpClientContext context = (HttpClientContext) invocationOnMock.getArguments()[2]; assertThat(context.getAuthCache().get(httpHost), instanceOf(BasicScheme.class)); - FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[3]; + final FutureCallback futureCallback = + (FutureCallback) invocationOnMock.getArguments()[3]; HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest(); //return the desired status code or exception depending on the path if (request.getURI().getPath().equals("/soe")) { @@ -116,7 +122,7 @@ public Future answer(InvocationOnMock invocationOnMock) throws Thr int statusCode = Integer.parseInt(request.getURI().getPath().substring(1)); StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, ""); - HttpResponse httpResponse = new BasicHttpResponse(statusLine); + final HttpResponse httpResponse = new BasicHttpResponse(statusLine); //return the same body that was sent if (request instanceof HttpEntityEnclosingRequest) { HttpEntity entity = ((HttpEntityEnclosingRequest) request).getEntity(); @@ -128,7 +134,13 @@ public Future answer(InvocationOnMock invocationOnMock) throws Thr } //return the same headers that were sent httpResponse.setHeaders(request.getAllHeaders()); - futureCallback.completed(httpResponse); + // Call the callback asynchronous to better simulate how async http client works + exec.execute(new Runnable() { + @Override + public void run() { + futureCallback.completed(httpResponse); + } + }); } return null; } @@ -140,6 +152,14 @@ public Future answer(InvocationOnMock invocationOnMock) throws Thr restClient = new RestClient(httpClient, 10000, defaultHeaders, new HttpHost[]{httpHost}, null, failureListener); } + /** + * Shutdown the executor so we don't leak threads into other test runs. + */ + @After + public void shutdownExec() { + exec.shutdown(); + } + public void testNullPath() throws IOException { for (String method : getHttpMethods()) { try { @@ -258,6 +278,7 @@ public void testErrorStatusCodes() throws IOException { throw e; } assertEquals(errorStatusCode, e.getResponse().getStatusLine().getStatusCode()); + assertExceptionStackContainsCallingMethod(e); } if (errorStatusCode <= 500 || expectedIgnores.contains(errorStatusCode)) { failureListener.assertNotCalled(); @@ -309,6 +330,7 @@ public void testBody() throws IOException { Response response = e.getResponse(); assertThat(response.getStatusLine().getStatusCode(), equalTo(errorStatusCode)); assertThat(EntityUtils.toString(response.getEntity()), equalTo(body)); + assertExceptionStackContainsCallingMethod(e); } } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java index dd3a88f53513b..33323d39663e2 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java @@ -26,8 +26,11 @@ import java.io.IOException; import java.net.URI; import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -48,50 +51,83 @@ public void testCloseIsIdempotent() throws IOException { } public void testPerformAsyncWithUnsupportedMethod() throws Exception { - RestClient.SyncResponseListener listener = new RestClient.SyncResponseListener(10000); + final CountDownLatch latch = new CountDownLatch(1); try (RestClient restClient = createRestClient()) { - restClient.performRequestAsync("unsupported", randomAsciiLettersOfLength(5), listener); - listener.get(); - - fail("should have failed because of unsupported method"); - } catch (UnsupportedOperationException exception) { - assertEquals("http method not supported: unsupported", exception.getMessage()); + restClient.performRequestAsync("unsupported", randomAsciiLettersOfLength(5), new ResponseListener() { + @Override + public void onSuccess(Response response) { + fail("should have failed because of unsupported method"); + } + + @Override + public void onFailure(Exception exception) { + assertThat(exception, instanceOf(UnsupportedOperationException.class)); + assertEquals("http method not supported: unsupported", exception.getMessage()); + latch.countDown(); + } + }); + latch.await(); } } public void testPerformAsyncWithNullParams() throws Exception { - RestClient.SyncResponseListener listener = new RestClient.SyncResponseListener(10000); + final CountDownLatch latch = new CountDownLatch(1); try (RestClient restClient = createRestClient()) { - restClient.performRequestAsync(randomAsciiLettersOfLength(5), randomAsciiLettersOfLength(5), null, listener); - listener.get(); - - fail("should have failed because of null parameters"); - } catch (NullPointerException exception) { - assertEquals("params must not be null", exception.getMessage()); + restClient.performRequestAsync(randomAsciiLettersOfLength(5), randomAsciiLettersOfLength(5), null, new ResponseListener() { + @Override + public void onSuccess(Response response) { + fail("should have failed because of null parameters"); + } + + @Override + public void onFailure(Exception exception) { + assertThat(exception, instanceOf(NullPointerException.class)); + assertEquals("params must not be null", exception.getMessage()); + latch.countDown(); + } + }); + latch.await(); } } public void testPerformAsyncWithNullHeaders() throws Exception { - RestClient.SyncResponseListener listener = new RestClient.SyncResponseListener(10000); + final CountDownLatch latch = new CountDownLatch(1); try (RestClient restClient = createRestClient()) { + ResponseListener listener = new ResponseListener() { + @Override + public void onSuccess(Response response) { + fail("should have failed because of null headers"); + } + + @Override + public void onFailure(Exception exception) { + assertThat(exception, instanceOf(NullPointerException.class)); + assertEquals("request header must not be null", exception.getMessage()); + latch.countDown(); + } + }; restClient.performRequestAsync("GET", randomAsciiLettersOfLength(5), listener, (Header) null); - listener.get(); - - fail("should have failed because of null headers"); - } catch (NullPointerException exception) { - assertEquals("request header must not be null", exception.getMessage()); + latch.await(); } } public void testPerformAsyncWithWrongEndpoint() throws Exception { - RestClient.SyncResponseListener listener = new RestClient.SyncResponseListener(10000); + final CountDownLatch latch = new CountDownLatch(1); try (RestClient restClient = createRestClient()) { - restClient.performRequestAsync("GET", "::http:///", listener); - listener.get(); - - fail("should have failed because of wrong endpoint"); - } catch (IllegalArgumentException exception) { - assertEquals("Expected scheme name at index 0: ::http:///", exception.getMessage()); + restClient.performRequestAsync("GET", "::http:///", new ResponseListener() { + @Override + public void onSuccess(Response response) { + fail("should have failed because of wrong endpoint"); + } + + @Override + public void onFailure(Exception exception) { + assertThat(exception, instanceOf(IllegalArgumentException.class)); + assertEquals("Expected scheme name at index 0: ::http:///", exception.getMessage()); + latch.countDown(); + } + }); + latch.await(); } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java b/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java index 154efb4cac34b..f9406a6c4902d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java @@ -19,16 +19,21 @@ package org.elasticsearch.client; +import org.apache.http.ConnectionClosedException; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.RequestLine; import org.apache.http.StatusLine; +import org.apache.http.conn.ConnectTimeoutException; import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicRequestLine; import org.apache.http.message.BasicStatusLine; import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.net.SocketTimeoutException; import java.net.URISyntaxException; import static org.junit.Assert.assertEquals; @@ -37,13 +42,37 @@ import static org.junit.Assert.fail; public class SyncResponseListenerTests extends RestClientTestCase { + /** + * Asserts that the provided {@linkplain Exception} contains the method + * that called this somewhere on its stack. This is + * normally the case for synchronous calls but {@link RestClient} performs + * synchronous calls by performing asynchronous calls and blocking the + * current thread until the call returns so it has to take special care + * to make sure that the caller shows up in the exception. We use this + * assertion to make sure that we don't break that "special care". + */ + static void assertExceptionStackContainsCallingMethod(Exception e) { + // 0 is getStackTrace + // 1 is this method + // 2 is the caller, what we want + StackTraceElement myMethod = Thread.currentThread().getStackTrace()[2]; + for (StackTraceElement se : e.getStackTrace()) { + if (se.getClassName().equals(myMethod.getClassName()) + && se.getMethodName().equals(myMethod.getMethodName())) { + return; + } + } + StringWriter stack = new StringWriter(); + e.printStackTrace(new PrintWriter(stack)); + fail("didn't find the calling method (looks like " + myMethod + ") in:\n" + stack); + } public void testOnSuccessNullResponse() { RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); try { syncResponseListener.onSuccess(null); fail("onSuccess should have failed"); - } catch(NullPointerException e) { + } catch (NullPointerException e) { assertEquals("response must not be null", e.getMessage()); } } @@ -53,7 +82,7 @@ public void testOnFailureNullException() { try { syncResponseListener.onFailure(null); fail("onFailure should have failed"); - } catch(NullPointerException e) { + } catch (NullPointerException e) { assertEquals("exception must not be null", e.getMessage()); } } @@ -68,23 +97,11 @@ public void testOnSuccess() throws Exception { try { syncResponseListener.onSuccess(mockResponse); fail("get should have failed"); - } catch(IllegalStateException e) { + } catch (IllegalStateException e) { assertEquals(e.getMessage(), "response is already set"); } response = syncResponseListener.get(); assertSame(response, mockResponse); - - RuntimeException runtimeException = new RuntimeException("test"); - syncResponseListener.onFailure(runtimeException); - try { - syncResponseListener.get(); - fail("get should have failed"); - } catch(IllegalStateException e) { - assertEquals("response and exception are unexpectedly set at the same time", e.getMessage()); - assertNotNull(e.getSuppressed()); - assertEquals(1, e.getSuppressed().length); - assertSame(runtimeException, e.getSuppressed()[0]); - } } public void testOnFailure() throws Exception { @@ -94,8 +111,9 @@ public void testOnFailure() throws Exception { try { syncResponseListener.get(); fail("get should have failed"); - } catch(RuntimeException e) { - assertSame(firstException, e); + } catch (RuntimeException e) { + assertEquals(firstException.getMessage(), e.getMessage()); + assertSame(firstException, e.getCause()); } RuntimeException secondException = new RuntimeException("second-test"); @@ -107,8 +125,9 @@ public void testOnFailure() throws Exception { try { syncResponseListener.get(); fail("get should have failed"); - } catch(RuntimeException e) { - assertSame(firstException, e); + } catch (RuntimeException e) { + assertEquals(firstException.getMessage(), e.getMessage()); + assertSame(firstException, e.getCause()); } Response response = mockResponse(); @@ -116,7 +135,7 @@ public void testOnFailure() throws Exception { try { syncResponseListener.get(); fail("get should have failed"); - } catch(IllegalStateException e) { + } catch (IllegalStateException e) { assertEquals("response and exception are unexpectedly set at the same time", e.getMessage()); assertNotNull(e.getSuppressed()); assertEquals(1, e.getSuppressed().length); @@ -124,27 +143,88 @@ public void testOnFailure() throws Exception { } } - public void testRuntimeExceptionIsNotWrapped() throws Exception { + public void testRuntimeIsBuiltCorrectly() throws Exception { RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); RuntimeException runtimeException = new RuntimeException(); syncResponseListener.onFailure(runtimeException); try { syncResponseListener.get(); fail("get should have failed"); - } catch(RuntimeException e) { - assertSame(runtimeException, e); + } catch (RuntimeException e) { + // We preserve the original exception in the cause + assertSame(runtimeException, e.getCause()); + // We copy the message + assertEquals(runtimeException.getMessage(), e.getMessage()); + // And we do all that so the thrown exception has our method in the stacktrace + assertExceptionStackContainsCallingMethod(e); + } + } + + public void testConnectTimeoutExceptionIsBuiltCorrectly() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + ConnectTimeoutException timeoutException = new ConnectTimeoutException(); + syncResponseListener.onFailure(timeoutException); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch (IOException e) { + // We preserve the original exception in the cause + assertSame(timeoutException, e.getCause()); + // We copy the message + assertEquals(timeoutException.getMessage(), e.getMessage()); + // And we do all that so the thrown exception has our method in the stacktrace + assertExceptionStackContainsCallingMethod(e); + } + } + + public void testSocketTimeoutExceptionIsBuiltCorrectly() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + SocketTimeoutException timeoutException = new SocketTimeoutException(); + syncResponseListener.onFailure(timeoutException); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch (IOException e) { + // We preserve the original exception in the cause + assertSame(timeoutException, e.getCause()); + // We copy the message + assertEquals(timeoutException.getMessage(), e.getMessage()); + // And we do all that so the thrown exception has our method in the stacktrace + assertExceptionStackContainsCallingMethod(e); + } + } + + public void testConnectionClosedExceptionIsWrapped() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + ConnectionClosedException closedException = new ConnectionClosedException(randomAsciiAlphanumOfLength(5)); + syncResponseListener.onFailure(closedException); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch (ConnectionClosedException e) { + // We preserve the original exception in the cause + assertSame(closedException, e.getCause()); + // We copy the message + assertEquals(closedException.getMessage(), e.getMessage()); + // And we do all that so the thrown exception has our method in the stacktrace + assertExceptionStackContainsCallingMethod(e); } } - public void testIOExceptionIsNotWrapped() throws Exception { + public void testIOExceptionIsBuiltCorrectly() throws Exception { RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); IOException ioException = new IOException(); syncResponseListener.onFailure(ioException); try { syncResponseListener.get(); fail("get should have failed"); - } catch(IOException e) { - assertSame(ioException, e); + } catch (IOException e) { + // We preserve the original exception in the cause + assertSame(ioException, e.getCause()); + // We copy the message + assertEquals(ioException.getMessage(), e.getMessage()); + // And we do all that so the thrown exception has our method in the stacktrace + assertExceptionStackContainsCallingMethod(e); } } @@ -156,9 +236,12 @@ public void testExceptionIsWrapped() throws Exception { try { syncResponseListener.get(); fail("get should have failed"); - } catch(RuntimeException e) { + } catch (RuntimeException e) { assertEquals("error while performing request", e.getMessage()); + // We preserve the original exception in the cause assertSame(exception, e.getCause()); + // And we do all that so the thrown exception has our method in the stacktrace + assertExceptionStackContainsCallingMethod(e); } } From 2f21dc7129ae4fcacad274b1fd078a7e804d2a78 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 16 Mar 2018 18:52:34 -0400 Subject: [PATCH 65/89] Docs: HighLevelRestClient#multiGet (#29095) Add documentation for HighLevelRestClient#multiGet. Relates to #28389. --- .../documentation/CRUDDocumentationIT.java | 208 +++++++++++++++++- .../high-level/document/multi-get.asciidoc | 168 ++++++++++++++ .../high-level/supported-apis.asciidoc | 2 + 3 files changed, 377 insertions(+), 1 deletion(-) create mode 100644 docs/java-rest/high-level/document/multi-get.asciidoc diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index a12bd48f22242..5adc7bee273a0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -37,10 +37,14 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetItemResponse; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; @@ -68,6 +72,11 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; @@ -809,7 +818,7 @@ public void testGet() throws Exception { { GetRequest request = new GetRequest("posts", "doc", "1"); //tag::get-request-no-source - request.fetchSourceContext(new FetchSourceContext(false)); // <1> + request.fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE); // <1> //end::get-request-no-source GetResponse getResponse = client.get(request); assertNull(getResponse.getSourceInternal()); @@ -1066,4 +1075,201 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) // end::bulk-processor-options } } + + public void testMultiGet() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + String mappings = "{\n" + + " \"mappings\" : {\n" + + " \"type\" : {\n" + + " \"properties\" : {\n" + + " \"foo\" : {\n" + + " \"type\": \"text\",\n" + + " \"store\": true\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + + NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON); + Response response = client().performRequest("PUT", "/index", Collections.emptyMap(), entity); + assertEquals(200, response.getStatusLine().getStatusCode()); + } + + Map source = new HashMap<>(); + source.put("foo", "val1"); + source.put("bar", "val2"); + source.put("baz", "val3"); + client.index(new IndexRequest("index", "type", "example_id") + .source(source) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE)); + + { + // tag::multi-get-request + MultiGetRequest request = new MultiGetRequest(); + request.add(new MultiGetRequest.Item( + "index", // <1> + "type", // <2> + "example_id")); // <3> + request.add(new MultiGetRequest.Item("index", "type", "another_id")); // <4> + // end::multi-get-request + + // Add a missing index so we can test it. + request.add(new MultiGetRequest.Item("missing_index", "type", "id")); + + // tag::multi-get-request-item-extras + request.add(new MultiGetRequest.Item("index", "type", "with_routing") + .routing("some_routing")); // <1> + request.add(new MultiGetRequest.Item("index", "type", "with_parent") + .parent("some_parent")); // <2> + request.add(new MultiGetRequest.Item("index", "type", "with_version") + .versionType(VersionType.EXTERNAL) // <3> + .version(10123L)); // <4> + // end::multi-get-request-item-extras + // tag::multi-get-request-top-level-extras + request.preference("some_preference"); // <1> + request.realtime(false); // <2> + request.refresh(true); // <3> + // end::multi-get-request-top-level-extras + + // tag::multi-get-execute + MultiGetResponse response = client.multiGet(request); + // end::multi-get-execute + + // tag::multi-get-response + MultiGetItemResponse firstItem = response.getResponses()[0]; + assertNull(firstItem.getFailure()); // <1> + GetResponse firstGet = firstItem.getResponse(); // <2> + String index = firstItem.getIndex(); + String type = firstItem.getType(); + String id = firstItem.getId(); + if (firstGet.isExists()) { + long version = firstGet.getVersion(); + String sourceAsString = firstGet.getSourceAsString(); // <3> + Map sourceAsMap = firstGet.getSourceAsMap(); // <4> + byte[] sourceAsBytes = firstGet.getSourceAsBytes(); // <5> + } else { + // <6> + } + // end::multi-get-response + + assertTrue(firstGet.isExists()); + assertEquals(source, firstGet.getSource()); + + MultiGetItemResponse missingIndexItem = response.getResponses()[2]; + // tag::multi-get-indexnotfound + assertNull(missingIndexItem.getResponse()); // <1> + Exception e = missingIndexItem.getFailure().getFailure(); // <2> + ElasticsearchException ee = (ElasticsearchException) e; // <3> + // TODO status is broken! fix in a followup + // assertEquals(RestStatus.NOT_FOUND, ee.status()); // <4> + assertThat(e.getMessage(), + containsString("reason=no such index")); // <5> + // end::multi-get-indexnotfound + + // tag::multi-get-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(MultiGetResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::multi-get-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::multi-get-execute-async + client.multiGetAsync(request, listener); // <1> + // end::multi-get-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + { + MultiGetRequest request = new MultiGetRequest(); + // tag::multi-get-request-no-source + request.add(new MultiGetRequest.Item("index", "type", "example_id") + .fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE)); // <1> + // end::multi-get-request-no-source + MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request)); + assertNull(item.getResponse().getSource()); + } + { + MultiGetRequest request = new MultiGetRequest(); + // tag::multi-get-request-source-include + String[] includes = new String[] {"foo", "*r"}; + String[] excludes = Strings.EMPTY_ARRAY; + FetchSourceContext fetchSourceContext = + new FetchSourceContext(true, includes, excludes); + request.add(new MultiGetRequest.Item("index", "type", "example_id") + .fetchSourceContext(fetchSourceContext)); // <1> + // end::multi-get-request-source-include + MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request)); + assertThat(item.getResponse().getSource(), hasEntry("foo", "val1")); + assertThat(item.getResponse().getSource(), hasEntry("bar", "val2")); + assertThat(item.getResponse().getSource(), not(hasKey("baz"))); + } + { + MultiGetRequest request = new MultiGetRequest(); + // tag::multi-get-request-source-exclude + String[] includes = Strings.EMPTY_ARRAY; + String[] excludes = new String[] {"foo", "*r"}; + FetchSourceContext fetchSourceContext = + new FetchSourceContext(true, includes, excludes); + request.add(new MultiGetRequest.Item("index", "type", "example_id") + .fetchSourceContext(fetchSourceContext)); // <1> + // end::multi-get-request-source-exclude + MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request)); + assertThat(item.getResponse().getSource(), not(hasKey("foo"))); + assertThat(item.getResponse().getSource(), not(hasKey("bar"))); + assertThat(item.getResponse().getSource(), hasEntry("baz", "val3")); + } + { + MultiGetRequest request = new MultiGetRequest(); + // tag::multi-get-request-stored + request.add(new MultiGetRequest.Item("index", "type", "example_id") + .storedFields("foo")); // <1> + MultiGetResponse response = client.multiGet(request); + MultiGetItemResponse item = response.getResponses()[0]; + String value = item.getResponse().getField("foo").getValue(); // <2> + // end::multi-get-request-stored + assertNull(item.getResponse().getSource()); + assertEquals("val1", value); + } + { + // tag::multi-get-conflict + MultiGetRequest request = new MultiGetRequest(); + request.add(new MultiGetRequest.Item("index", "type", "example_id") + .version(1000L)); + MultiGetResponse response = client.multiGet(request); + MultiGetItemResponse item = response.getResponses()[0]; + assertNull(item.getResponse()); // <1> + Exception e = item.getFailure().getFailure(); // <2> + ElasticsearchException ee = (ElasticsearchException) e; // <3> + // TODO status is broken! fix in a followup + // assertEquals(RestStatus.CONFLICT, ee.status()); // <4> + assertThat(e.getMessage(), + containsString("version conflict, current version [1] is " + + "different than the one provided [1000]")); // <5> + // end::multi-get-conflict + } + + } + + private MultiGetItemResponse unwrapAndAssertExample(MultiGetResponse response) { + assertThat(response.getResponses(), arrayWithSize(1)); + MultiGetItemResponse item = response.getResponses()[0]; + assertEquals("index", item.getIndex()); + assertEquals("type", item.getType()); + assertEquals("example_id", item.getId()); + return item; + } } diff --git a/docs/java-rest/high-level/document/multi-get.asciidoc b/docs/java-rest/high-level/document/multi-get.asciidoc new file mode 100644 index 0000000000000..1f4628e149c96 --- /dev/null +++ b/docs/java-rest/high-level/document/multi-get.asciidoc @@ -0,0 +1,168 @@ +[[java-rest-high-document-multi-get]] +=== Multi-Get API + +The `multiGet` API executes multiple <> +requests in a single http request in parallel. + +[[java-rest-high-document-mulit-get-request]] +==== Multi-Get Request + +A `MultiGetRequest` is built empty and you add `MultiGetRequest.Item`s to +configure what to fetch: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request] +-------------------------------------------------- +<1> Index +<2> Type +<3> Document id +<4> Add another item to fetch + +==== Optional arguments + +`multiGet` supports the same optional arguments that the +<> supports. +You can set most of these on the `Item`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-no-source] +-------------------------------------------------- +<1> Disable source retrieval, enabled by default + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-source-include] +-------------------------------------------------- +<1> Configure source inclusion for specific fields + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-source-exclude] +-------------------------------------------------- +<1> Configure source exclusion for specific fields + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-stored] +-------------------------------------------------- +<1> Configure retrieval for specific stored fields (requires fields to be +stored separately in the mappings) +<2> Retrieve the `foo` stored field (requires the field to be stored +separately in the mappings) + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-item-extras] +-------------------------------------------------- +<1> Routing value +<2> Parent value +<3> Version +<4> Version type + +{ref}/search-request-preference.html[`preference`], +{ref}/docs-get.html#realtime[`realtime`] +and +{ref}/docs-get.html#get-refresh[`refresh`] can be set on the main request but +not on any items: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-top-level-extras] +-------------------------------------------------- +<1> Preference value +<2> Set realtime flag to `false` (`true` by default) +<3> Perform a refresh before retrieving the document (`false` by default) + +[[java-rest-high-document-multi-get-sync]] +==== Synchronous Execution + +After building the `MultiGetRequest` you can execute it synchronously with +`multiGet`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-execute] +-------------------------------------------------- + +[[java-rest-high-document-multi-get-async]] +==== Asynchronous Execution + +The asynchronous execution of a multi get request requires both the +`MultiGetRequest` instance and an `ActionListener` instance to be passed to +the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-execute-async] +-------------------------------------------------- +<1> The `MultiGetRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once the +request completed the `ActionListener` is called back using the `onResponse` +method if the execution successfully completed or using the `onFailure` method +if it failed. + +A typical listener for `MultiGetResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of failure. The raised exception is provided as an argument. + +[[java-rest-high-document-multi-get-response]] +==== Multi Get Response + +The returned `MultiGetResponse` contains a list of `MultiGetItemResponse`s in +`getResponses` in the same order that they were requested. +`MultiGetItemResponse` contains *either* a +<> if the get succeeded +or a `MultiGetResponse.Failure` if it failed. A success looks just like a +normal `GetResponse`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-response] +-------------------------------------------------- +<1> `getFailure` returns null because there isn't a failure. +<2> `getResponse` returns the `GetResponse`. +<3> Retrieve the document as a `String` +<4> Retrieve the document as a `Map` +<5> Retrieve the document as a `byte[]` +<6> Handle the scenario where the document was not found. Note that although +the returned response has `404` status code, a valid `GetResponse` is +returned rather than an exception thrown. Such response does not hold any +source document and its `isExists` method returns `false`. + +When one of the subrequests as performed against an index that does not exist +`getFailure` will contain an exception: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-indexnotfound] +-------------------------------------------------- +<1> `getResponse` is null. +<2> `getFailure` isn't and contains an `Exception`. +<3> That `Exception` is actually an `ElasticsearchException` +<4> and it has a status of `NOT_FOUND`. It'd have been an HTTP 404 if this +wasn't a multi get. +<5> `getMessage` explains the actual cause, `no such index`. + +In case a specific document version has been requested, and the existing +document has a different version number, a version conflict is raised: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-conflict] +-------------------------------------------------- +<1> `getResponse` is null. +<2> `getFailure` isn't and contains an `Exception`. +<3> That `Exception` is actuall and `ElasticsearchException` +<4> and it has a status of `CONFLICT`. It'd have been an HTTP 409 if this +wasn't a multi get. +<5> `getMessage` explains the actual cause, ` diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 79f17db577421..615634b65f1fd 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -14,6 +14,7 @@ Single document APIs:: [[multi-doc]] Multi-document APIs:: * <> +* <> include::document/index.asciidoc[] include::document/get.asciidoc[] @@ -21,6 +22,7 @@ include::document/exists.asciidoc[] include::document/delete.asciidoc[] include::document/update.asciidoc[] include::document/bulk.asciidoc[] +include::document/multi-get.asciidoc[] == Search APIs From 22ad52a2887cc35db7a95cb9dcd113db5f18cf78 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 16 Mar 2018 21:50:16 -0400 Subject: [PATCH 66/89] TEST: Adjust translog size assumption in new engine A new engine now can have more than one empty translog since #28676. This cause #testShouldPeriodicallyFlush failed because in the test we asssume an engine should have one empty translog. This commit takes into account the extra translog size of a new engine. --- .../org/elasticsearch/index/engine/InternalEngineTests.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 2488ca79fe482..cac74573374aa 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -4251,13 +4251,16 @@ public void testCleanupCommitsWhenReleaseSnapshot() throws Exception { public void testShouldPeriodicallyFlush() throws Exception { assertThat("Empty engine does not need flushing", engine.shouldPeriodicallyFlush(), equalTo(false)); + // A new engine may have more than one empty translog files - the test should account this extra. + final long extraTranslogSizeInNewEngine = engine.getTranslog().uncommittedSizeInBytes() - Translog.DEFAULT_HEADER_SIZE_IN_BYTES; int numDocs = between(10, 100); for (int id = 0; id < numDocs; id++) { final ParsedDocument doc = testParsedDocument(Integer.toString(id), null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); } assertThat("Not exceeded translog flush threshold yet", engine.shouldPeriodicallyFlush(), equalTo(false)); - long flushThreshold = RandomNumbers.randomLongBetween(random(), 100, engine.getTranslog().uncommittedSizeInBytes()); + long flushThreshold = RandomNumbers.randomLongBetween(random(), 100, + engine.getTranslog().uncommittedSizeInBytes() - extraTranslogSizeInNewEngine); final IndexSettings indexSettings = engine.config().getIndexSettings(); final IndexMetaData indexMetaData = IndexMetaData.builder(indexSettings.getIndexMetaData()) .settings(Settings.builder().put(indexSettings.getSettings()) From 2e93a9158f3df9b77f675c77e8288d19e65318e7 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 16 Mar 2018 22:47:06 -0400 Subject: [PATCH 67/89] Align thread pool info to thread pool configuration (#29123) Today we report thread pool info using a common object. This means that we use a shared set of terminology that is not consistent with the terminology used to the configure thread pools. This holds in particular for the minimum and maximum number of threads in the thread pool where we use the following terminology: thread pool info | fixed | scaling min core size max max size This commit changes the display of thread pool info to be dependent on the type of the thread pool so that we can align the terminology in the output of thread pool info with the terminology used to configure a thread pool. --- .../elasticsearch/threadpool/ThreadPool.java | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 6abdc309e21b0..c7d16d1979b20 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -58,6 +58,7 @@ import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; @@ -138,7 +139,9 @@ public static ThreadPoolType fromType(String type) { THREAD_POOL_TYPES = Collections.unmodifiableMap(map); } - private Map executors = new HashMap<>(); + private final Map executors; + + private final ThreadPoolInfo threadPoolInfo; private final CachedTimeThread cachedTimeThread; @@ -207,6 +210,15 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui executors.put(Names.SAME, new ExecutorHolder(DIRECT_EXECUTOR, new Info(Names.SAME, ThreadPoolType.DIRECT))); this.executors = unmodifiableMap(executors); + + final List infos = + executors + .values() + .stream() + .filter(holder -> holder.info.getName().equals("same") == false) + .map(holder -> holder.info) + .collect(Collectors.toList()); + this.threadPoolInfo = new ThreadPoolInfo(infos); this.scheduler = Scheduler.initScheduler(settings); TimeValue estimatedTimeInterval = ESTIMATED_TIME_INTERVAL_SETTING.get(settings); this.cachedTimeThread = new CachedTimeThread(EsExecutors.threadName(settings, "[timer]"), estimatedTimeInterval.millis()); @@ -239,16 +251,7 @@ public Counter estimatedTimeInMillisCounter() { } public ThreadPoolInfo info() { - List infos = new ArrayList<>(); - for (ExecutorHolder holder : executors.values()) { - String name = holder.info.getName(); - // no need to have info on "same" thread pool - if ("same".equals(name)) { - continue; - } - infos.add(holder.info); - } - return new ThreadPoolInfo(infos); + return threadPoolInfo; } public Info info(String name) { @@ -655,32 +658,29 @@ public SizeValue getQueueSize() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name); - builder.field(Fields.TYPE, type.getType()); - if (min != -1) { - builder.field(Fields.MIN, min); - } - if (max != -1) { - builder.field(Fields.MAX, max); + builder.field("type", type.getType()); + + if (type == ThreadPoolType.SCALING) { + assert min != -1; + builder.field("core", min); + assert max != -1; + builder.field("max", max); + } else { + assert max != -1; + builder.field("size", max); } if (keepAlive != null) { - builder.field(Fields.KEEP_ALIVE, keepAlive.toString()); + builder.field("keep_alive", keepAlive.toString()); } if (queueSize == null) { - builder.field(Fields.QUEUE_SIZE, -1); + builder.field("queue_size", -1); } else { - builder.field(Fields.QUEUE_SIZE, queueSize.singles()); + builder.field("queue_size", queueSize.singles()); } builder.endObject(); return builder; } - static final class Fields { - static final String TYPE = "type"; - static final String MIN = "min"; - static final String MAX = "max"; - static final String KEEP_ALIVE = "keep_alive"; - static final String QUEUE_SIZE = "queue_size"; - } } /** From b56afebad1e983221762691ae126ae8aa41b349e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 17 Mar 2018 07:48:40 -0400 Subject: [PATCH 68/89] Fix creating keystore when upgrading (#29121) When upgrading via the RPM package, we can run into a problem where the keystore fails to be created. This arises because the %post script on RPM runs after the new package files are installed but before the removal of the old package files. This means that the contents of the lib folder can contain files from the old package and the new package and thus running the create keystore tool can encounter JAR hell issues and fail. To solve this, we move creating the keystore to the %posttrans script which runs after the old package files are removed. We only need to do this on the RPM package, so we add a switch in the shared post-install script. --- distribution/packages/build.gradle | 3 +++ distribution/packages/src/common/scripts/postinst | 7 ++++++- distribution/packages/src/common/scripts/posttrans | 8 ++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 distribution/packages/src/common/scripts/posttrans diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 0f47f8263a7c5..6c5d149a10a31 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -96,6 +96,9 @@ Closure commonPackageConfig(String type) { postInstall file("${scripts}/postinst") preUninstall file("${scripts}/prerm") postUninstall file("${scripts}/postrm") + if (type == 'rpm') { + postTrans file("${scripts}/posttrans") + } // top level "into" directive is not inherited from ospackage for some reason, so we must // specify it again explicitly for copying common files diff --git a/distribution/packages/src/common/scripts/postinst b/distribution/packages/src/common/scripts/postinst index abc7c91b81d78..38e1f4452ad2a 100644 --- a/distribution/packages/src/common/scripts/postinst +++ b/distribution/packages/src/common/scripts/postinst @@ -19,18 +19,22 @@ case "$1" in if [ -n $2 ]; then IS_UPGRADE=true fi + PACKAGE=deb ;; abort-upgrade|abort-remove|abort-deconfigure) + PACKAGE=deb ;; # RedHat #################################################### 1) # If $1=1 this is an install IS_UPGRADE=false + PACKAGE=rpm ;; 2) # If $1=1 this is an upgrade IS_UPGRADE=true + PACKAGE=rpm ;; *) @@ -99,7 +103,8 @@ if [ -f ${path.env} ]; then chown root:elasticsearch ${path.env} fi -if [ ! -f /etc/elasticsearch/elasticsearch.keystore ]; then +# the equivalent code for rpm is in posttrans +if [ "$PACKAGE" = "deb" -a ! -f /etc/elasticsearch/elasticsearch.keystore ]; then /usr/share/elasticsearch/bin/elasticsearch-keystore create chown root:elasticsearch /etc/elasticsearch/elasticsearch.keystore chmod 660 /etc/elasticsearch/elasticsearch.keystore diff --git a/distribution/packages/src/common/scripts/posttrans b/distribution/packages/src/common/scripts/posttrans new file mode 100644 index 0000000000000..d3550bdbed24b --- /dev/null +++ b/distribution/packages/src/common/scripts/posttrans @@ -0,0 +1,8 @@ +if [ ! -f /etc/elasticsearch/elasticsearch.keystore ]; then + /usr/share/elasticsearch/bin/elasticsearch-keystore create + chown root:elasticsearch /etc/elasticsearch/elasticsearch.keystore + chmod 660 /etc/elasticsearch/elasticsearch.keystore + md5sum /etc/elasticsearch/elasticsearch.keystore > /etc/elasticsearch/.elasticsearch.keystore.initial_md5sum +fi + +${scripts.footer} From cd165d1c4bda6a6dd3fbae4652285889b839148a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Sat, 17 Mar 2018 11:06:05 -0400 Subject: [PATCH 69/89] Client: Wrap SSLHandshakeException in sync calls Adds SSLHandshakeException to the list of Exceptions that are specifically rethrown from the async thread so its type is preserved. This should make it easier to debug synchronous calls with ssl issues. --- .../src/main/java/org/elasticsearch/client/RestClient.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 29e23f948bddb..4aa1a9d815cf4 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -72,6 +72,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import javax.net.ssl.SSLHandshakeException; /** * Client that connects to an Elasticsearch cluster through HTTP. @@ -717,6 +718,11 @@ Response get() throws IOException { e.initCause(exception); throw e; } + if (exception instanceof SSLHandshakeException) { + SSLHandshakeException e = new SSLHandshakeException(exception.getMessage()); + e.initCause(exception); + throw e; + } if (exception instanceof IOException) { throw new IOException(exception.getMessage(), exception); } From c9749180a1a966091949db155cbb4e1396972525 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sat, 17 Mar 2018 11:38:22 -0400 Subject: [PATCH 70/89] Backport - Do not renew sync-id PR to 5.6 and 6.3 Relates ##29103 --- .../org/elasticsearch/indices/flush/SyncedFlushService.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index dedd577954d70..65bacd94910b6 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -569,7 +569,11 @@ boolean includeNumDocs(Version version) { } boolean includeExistingSyncId(Version version) { - return version.onOrAfter(Version.V_7_0_0_alpha1); + if (version.major == Version.V_5_6_9.major) { + return version.onOrAfter(Version.V_5_6_9); + } else { + return version.onOrAfter(Version.V_6_3_0); + } } @Override From f1029aaad5b49084aed543db1a7d9c0e60fb7264 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sat, 17 Mar 2018 17:43:20 -0400 Subject: [PATCH 71/89] getMinGenerationForSeqNo should acquire read lock (#29126) The method Translog#getMinGenerationForSeqNo does not modify the current translog but only access, it therefore should acquire the readLock instead of writeLock. --- .../main/java/org/elasticsearch/index/translog/Translog.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index 6a32ae14fdd3a..c34f851195a9f 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -1515,7 +1515,7 @@ public static void writeOperationNoSize(BufferedChecksumStreamOutput out, Transl * @return the minimum generation for the sequence number */ public TranslogGeneration getMinGenerationForSeqNo(final long seqNo) { - try (ReleasableLock ignored = writeLock.acquire()) { + try (ReleasableLock ignored = readLock.acquire()) { /* * When flushing, the engine will ask the translog for the minimum generation that could contain any sequence number after the * local checkpoint. Immediately after flushing, there will be no such generation, so this minimum generation in this case will From 1d8c507684a47bd298c6f090ee387fc246a337d8 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Sat, 17 Mar 2018 20:08:03 -0400 Subject: [PATCH 72/89] Client: Add missing test Previously I added wrapping for an SSL exception without a test. That was lame. This adds the test. --- .../client/SyncResponseListenerTests.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java b/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java index f9406a6c4902d..683b23a596a16 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java @@ -35,6 +35,7 @@ import java.io.StringWriter; import java.net.SocketTimeoutException; import java.net.URISyntaxException; +import javax.net.ssl.SSLHandshakeException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -211,6 +212,23 @@ public void testConnectionClosedExceptionIsWrapped() throws Exception { } } + public void testSSLHandshakeExceptionIsWrapped() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + SSLHandshakeException exception = new SSLHandshakeException(randomAsciiAlphanumOfLength(5)); + syncResponseListener.onFailure(exception); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch (SSLHandshakeException e) { + // We preserve the original exception in the cause + assertSame(exception, e.getCause()); + // We copy the message + assertEquals(exception.getMessage(), e.getMessage()); + // And we do all that so the thrown exception has our method in the stacktrace + assertExceptionStackContainsCallingMethod(e); + } + } + public void testIOExceptionIsBuiltCorrectly() throws Exception { RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); IOException ioException = new IOException(); From 29fedb2669b51128905226041c719b89ec53ffe6 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 18 Mar 2018 15:32:37 -0400 Subject: [PATCH 73/89] Configure heap dump path for archive packages (#29130) This is a follow up to a previous change which set the heap dump path for the package distributions. The observation here is that we always set the working directory of Elasticsearch to to the root of installation (i.e., Elasticsearch home). Therefore, we can specify the heap dump path relative to this directory and default it to the data directory, similar to the package distributions. --- distribution/build.gradle | 2 +- distribution/src/config/jvm.options | 4 +-- .../heap-dump-path.asciidoc | 28 ++++++++----------- 3 files changed, 15 insertions(+), 19 deletions(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index f3fe27168f70e..c7dad69180865 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -262,7 +262,7 @@ subprojects { 'heap.dump.path': [ 'deb': "-XX:HeapDumpPath=/var/lib/elasticsearch", 'rpm': "-XX:HeapDumpPath=/var/lib/elasticsearch", - 'def': "#-XX:HeapDumpPath=/heap/dump/path" + 'def': "-XX:HeapDumpPath=data" ], 'error.file': [ diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index e862343de8d6b..c5c0f44caeb7a 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -77,8 +77,8 @@ # heap dumps are created in the working directory of the JVM -XX:+HeapDumpOnOutOfMemoryError -# specify an alternative path for heap dumps -# ensure the directory exists and has sufficient space +# specify an alternative path for heap dumps; ensure the directory exists and +# has sufficient space ${heap.dump.path} # specify an alternative path for JVM fatal error logs diff --git a/docs/reference/setup/important-settings/heap-dump-path.asciidoc b/docs/reference/setup/important-settings/heap-dump-path.asciidoc index def7d5962fa38..b0d301b21d0b8 100644 --- a/docs/reference/setup/important-settings/heap-dump-path.asciidoc +++ b/docs/reference/setup/important-settings/heap-dump-path.asciidoc @@ -1,19 +1,15 @@ [[heap-dump-path]] === JVM heap dump path -The <> and <> package distributions default to configuring -the JVM to dump the heap on out of memory exceptions to -`/var/lib/elasticsearch`. If this path is not suitable for storing heap dumps, -you should modify the entry `-XX:HeapDumpPath=/var/lib/elasticsearch` in -<> to an alternate path. If you specify a filename -instead of a directory, the JVM will repeatedly use the same file; this is one -mechanism for preventing heap dumps from accumulating in the heap dump path. -Alternatively, you can configure a scheduled task via your OS to remove heap -dumps that are older than a configured age. - -Note that the archive distributions do not configure the heap dump path by -default. Instead, the JVM will default to dumping to the working directory for -the Elasticsearch process. If you wish to configure a heap dump path, you should -modify the entry `#-XX:HeapDumpPath=/heap/dump/path` in -<> to remove the comment marker `#` and to specify an -actual path. +By default, Elasticsearch configures the JVM to dump the heap on out of +memory exceptions to the default data directory (this is +`/var/lib/elasticsearch` for the <> and <> package +distributions, and the `data` directory under the root of the +Elasticsearch installation for the <> archive +distributions). If this path is not suitable for receiving heap dumps, +you should modify the entry `-XX:HeapDumpPath=...` in +<>. If you specify a fixed filename instead +of a directory, the JVM will repeatedly use the same file; this is one +mechanism for preventing heap dumps from accumulating in the heap dump +path. Alternatively, you can configure a scheduled task via your OS to +remove heap dumps that are older than a configured age. From 0abf51af3de5fceace2f7d3e4a09a0b29068076d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 18 Mar 2018 15:33:17 -0400 Subject: [PATCH 74/89] Configure error file for archive packages (#29129) This is a follow up to a previous change which set the error file path for the package distributions. The observation here is that we always set the working directory of Elasticsearch to the root of the installation (i.e., Elasticsearch home). Therefore, we can specify the error file path relative to this directory and default it to the logs directory, similar to the package distributions. --- distribution/build.gradle | 2 +- .../important-settings/error-file.asciidoc | 20 ++++++++----------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index c7dad69180865..20758deb918c0 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -268,7 +268,7 @@ subprojects { 'error.file': [ 'deb': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", 'rpm': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", - 'def': "#-XX:ErrorFile=/error/file/path" + 'def': "-XX:ErrorFile=logs/hs_err_pid%p.log" ], 'stopping.timeout': [ diff --git a/docs/reference/setup/important-settings/error-file.asciidoc b/docs/reference/setup/important-settings/error-file.asciidoc index 37f1d2a0b14ed..d58a752ac28fa 100644 --- a/docs/reference/setup/important-settings/error-file.asciidoc +++ b/docs/reference/setup/important-settings/error-file.asciidoc @@ -1,16 +1,12 @@ [[error-file-path]] === JVM fatal error logs -The <> and <> package distributions default to configuring -the JVM to write fatal error logs to `/var/lib/elasticsearch`; these are logs -produced by the JVM when it encounters a fatal error (e.g., a segmentation -fault). If this path is not suitable for receiving logs, you should modify the -entry `-XX:ErrorFile=/var/lib/elasticsearch/hs_err_pid%p.log` in +By default, Elasticsearch configures the JVM to write fatal error logs +to the default logging directory (this is `/var/log/elasticsearch` for +the <> and <> package distributions, and the `logs` +directory under the root of the Elasticsearch installation for the +<> archive distributions). These are logs +produced by the JVM when it encounters a fatal error (e.g., a +segmentation fault). If this path is not suitable for receiving logs, +you should modify the entry `-XX:ErrorFile=...` in <> to an alternate path. - -Note that the archive distributions do not configure the error file path by -default. Instead, the JVM will default to writing to the working directory for -the Elasticsearch process. If you wish to configure an error file path, you -should modify the entry `#-XX:ErrorFile=/error/file/path` in -<> to remove the comment marker `#` and to specify an -actual path. From 7608480a621aaa0654349ff6876250adeb64b9b7 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 19 Mar 2018 07:04:47 +0000 Subject: [PATCH 75/89] Update allocation awareness docs (#29116) Update allocation awareness docs Today, the docs imply that if multiple attributes are specified the the whole combination of values is considered as a single entity when performing allocation. In fact, each attribute is considered separately. This change fixes this discrepancy. It also replaces the use of the term "awareness zone" with "zone or domain", and reformats some paragraphs to the right width. Fixes #29105 --- .../cluster/allocation_awareness.asciidoc | 62 +++++++++---------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index 8ffa4e6b06d2a..9eb47e0730c93 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -2,8 +2,8 @@ === Shard Allocation Awareness When running nodes on multiple VMs on the same physical server, on multiple -racks, or across multiple awareness zones, it is more likely that two nodes on -the same physical server, in the same rack, or in the same awareness zone will +racks, or across multiple zones or domains, it is more likely that two nodes on +the same physical server, in the same rack, or in the same zone or domain will crash at the same time, rather than two unrelated nodes crashing simultaneously. @@ -25,7 +25,7 @@ attribute called `rack_id` -- we could use any attribute name. For example: ---------------------- <1> This setting could also be specified in the `elasticsearch.yml` config file. -Now, we need to setup _shard allocation awareness_ by telling Elasticsearch +Now, we need to set up _shard allocation awareness_ by telling Elasticsearch which attributes to use. This can be configured in the `elasticsearch.yml` file on *all* master-eligible nodes, or it can be set (and changed) with the <> API. @@ -37,51 +37,51 @@ For our example, we'll set the value in the config file: cluster.routing.allocation.awareness.attributes: rack_id -------------------------------------------------------- -With this config in place, let's say we start two nodes with `node.attr.rack_id` -set to `rack_one`, and we create an index with 5 primary shards and 1 replica -of each primary. All primaries and replicas are allocated across the two -nodes. +With this config in place, let's say we start two nodes with +`node.attr.rack_id` set to `rack_one`, and we create an index with 5 primary +shards and 1 replica of each primary. All primaries and replicas are +allocated across the two nodes. Now, if we start two more nodes with `node.attr.rack_id` set to `rack_two`, Elasticsearch will move shards across to the new nodes, ensuring (if possible) -that no two copies of the same shard will be in the same rack. However if `rack_two` -were to fail, taking down both of its nodes, Elasticsearch will still allocate the lost -shard copies to nodes in `rack_one`. +that no two copies of the same shard will be in the same rack. However if +`rack_two` were to fail, taking down both of its nodes, Elasticsearch will +still allocate the lost shard copies to nodes in `rack_one`. .Prefer local shards ********************************************* When executing search or GET requests, with shard awareness enabled, Elasticsearch will prefer using local shards -- shards in the same awareness -group -- to execute the request. This is usually faster than crossing racks or -awareness zones. +group -- to execute the request. This is usually faster than crossing between +racks or across zone boundaries. ********************************************* -Multiple awareness attributes can be specified, in which case the combination -of values from each attribute is considered to be a separate value. +Multiple awareness attributes can be specified, in which case each attribute +is considered separately when deciding where to allocate the shards. [source,yaml] ------------------------------------------------------------- cluster.routing.allocation.awareness.attributes: rack_id,zone ------------------------------------------------------------- -NOTE: When using awareness attributes, shards will not be allocated to -nodes that don't have values set for those attributes. +NOTE: When using awareness attributes, shards will not be allocated to nodes +that don't have values set for those attributes. -NOTE: Number of primary/replica of a shard allocated on a specific group -of nodes with the same awareness attribute value is determined by the number -of attribute values. When the number of nodes in groups is unbalanced and -there are many replicas, replica shards may be left unassigned. +NOTE: Number of primary/replica of a shard allocated on a specific group of +nodes with the same awareness attribute value is determined by the number of +attribute values. When the number of nodes in groups is unbalanced and there +are many replicas, replica shards may be left unassigned. [float] [[forced-awareness]] === Forced Awareness -Imagine that you have two awareness zones and enough hardware across the two -zones to host all of your primary and replica shards. But perhaps the -hardware in a single zone, while sufficient to host half the shards, would be -unable to host *ALL* the shards. +Imagine that you have two zones and enough hardware across the two zones to +host all of your primary and replica shards. But perhaps the hardware in a +single zone, while sufficient to host half the shards, would be unable to host +*ALL* the shards. With ordinary awareness, if one zone lost contact with the other zone, Elasticsearch would assign all of the missing replica shards to a single zone. @@ -91,9 +91,9 @@ remaining zone to be overloaded. Forced awareness solves this problem by *NEVER* allowing copies of the same shard to be allocated to the same zone. -For example, lets say we have an awareness attribute called `zone`, and -we know we are going to have two zones, `zone1` and `zone2`. Here is how -we can force awareness on a node: +For example, lets say we have an awareness attribute called `zone`, and we +know we are going to have two zones, `zone1` and `zone2`. Here is how we can +force awareness on a node: [source,yaml] ------------------------------------------------------------------- @@ -102,10 +102,10 @@ cluster.routing.allocation.awareness.attributes: zone ------------------------------------------------------------------- <1> We must list all possible values that the `zone` attribute can have. -Now, if we start 2 nodes with `node.attr.zone` set to `zone1` and create an index -with 5 shards and 1 replica. The index will be created, but only the 5 primary -shards will be allocated (with no replicas). Only when we start more nodes -with `node.attr.zone` set to `zone2` will the replicas be allocated. +Now, if we start 2 nodes with `node.attr.zone` set to `zone1` and create an +index with 5 shards and 1 replica. The index will be created, but only the 5 +primary shards will be allocated (with no replicas). Only when we start more +nodes with `node.attr.zone` set to `zone2` will the replicas be allocated. The `cluster.routing.allocation.awareness.*` settings can all be updated dynamically on a live cluster with the From b57bd695f237693a22c994cf544ec2ecbaebaa1c Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 19 Mar 2018 09:26:17 +0100 Subject: [PATCH 76/89] Small code cleanups and refactorings in persistent tasks (#29109) This commit consists of small code cleanups and refactorings in the persistent tasks framework. Most changes are in PersistentTasksClusterService where some methods have been renamed or merged together, documentation has been added, unused code removed in order to improve readability of the code. --- .../PersistentTasksClusterService.java | 155 +++++++------- .../PersistentTasksCustomMetaData.java | 25 +-- .../persistent/PersistentTasksExecutor.java | 4 +- .../PersistentTasksClusterServiceTests.java | 193 ++++++++++++++---- 4 files changed, 242 insertions(+), 135 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 7c395365c1b88..83bd1f4ca5b2a 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -19,7 +19,6 @@ package org.elasticsearch.persistent; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -33,9 +32,9 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.tasks.Task; import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment; import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; +import org.elasticsearch.tasks.Task; import java.util.Objects; @@ -52,29 +51,31 @@ public PersistentTasksClusterService(Settings settings, PersistentTasksExecutorR this.clusterService = clusterService; clusterService.addListener(this); this.registry = registry; - } /** * Creates a new persistent task on master node * - * @param action the action name - * @param params params - * @param listener the listener that will be called when task is started + * @param taskId the task's id + * @param taskName the task's name + * @param taskParams the task's parameters + * @param listener the listener that will be called when task is started */ - public void createPersistentTask(String taskId, String action, @Nullable Params params, + public void createPersistentTask(String taskId, String taskName, @Nullable Params taskParams, ActionListener> listener) { clusterService.submitStateUpdateTask("create persistent task", new ClusterStateUpdateTask() { @Override - public ClusterState execute(ClusterState currentState) throws Exception { + public ClusterState execute(ClusterState currentState) { PersistentTasksCustomMetaData.Builder builder = builder(currentState); if (builder.hasTask(taskId)) { throw new ResourceAlreadyExistsException("task with id {" + taskId + "} already exist"); } - validate(action, currentState, params); - final Assignment assignment; - assignment = getAssignement(action, currentState, params); - return update(currentState, builder.addTask(taskId, action, params, assignment)); + + PersistentTasksExecutor taskExecutor = registry.getPersistentTaskExecutorSafe(taskName); + taskExecutor.validate(taskParams, currentState); + + Assignment assignment = createAssignment(taskName, taskParams, currentState); + return update(currentState, builder.addTask(taskId, taskName, taskParams, assignment)); } @Override @@ -95,7 +96,6 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS }); } - /** * Restarts a record about a running persistent task from cluster state * @@ -114,7 +114,7 @@ public void completePersistentTask(String id, long allocationId, Exception failu } clusterService.submitStateUpdateTask(source, new ClusterStateUpdateTask() { @Override - public ClusterState execute(ClusterState currentState) throws Exception { + public ClusterState execute(ClusterState currentState) { PersistentTasksCustomMetaData.Builder tasksInProgress = builder(currentState); if (tasksInProgress.hasTask(id, allocationId)) { tasksInProgress.removeTask(id); @@ -185,7 +185,7 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS public void updatePersistentTaskStatus(String id, long allocationId, Task.Status status, ActionListener> listener) { clusterService.submitStateUpdateTask("update task status", new ClusterStateUpdateTask() { @Override - public ClusterState execute(ClusterState currentState) throws Exception { + public ClusterState execute(ClusterState currentState) { PersistentTasksCustomMetaData.Builder tasksInProgress = builder(currentState); if (tasksInProgress.hasTask(id, allocationId)) { return update(currentState, tasksInProgress.updateTaskStatus(id, status)); @@ -211,93 +211,85 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS }); } - private Assignment getAssignement(String taskName, ClusterState currentState, - @Nullable Params params) { - PersistentTasksExecutor persistentTasksExecutor = registry.getPersistentTaskExecutorSafe(taskName); - return persistentTasksExecutor.getAssignment(params, currentState); - } + /** + * Creates a new {@link Assignment} for the given persistent task. + * + * @param taskName the task's name + * @param taskParams the task's parameters + * @param currentState the current {@link ClusterState} - private void validate(String taskName, ClusterState currentState, @Nullable Params params) { + * @return a new {@link Assignment} + */ + private Assignment createAssignment(final String taskName, + final @Nullable Params taskParams, + final ClusterState currentState) { PersistentTasksExecutor persistentTasksExecutor = registry.getPersistentTaskExecutorSafe(taskName); - persistentTasksExecutor.validate(params, currentState); + return persistentTasksExecutor.getAssignment(taskParams, currentState); } @Override public void clusterChanged(ClusterChangedEvent event) { if (event.localNodeMaster()) { - logger.trace("checking task reassignment for cluster state {}", event.state().getVersion()); - if (reassignmentRequired(event, this::getAssignement)) { - logger.trace("task reassignment is needed"); - reassignTasks(); - } else { - logger.trace("task reassignment is not needed"); + if (shouldReassignPersistentTasks(event)) { + logger.trace("checking task reassignment for cluster state {}", event.state().getVersion()); + clusterService.submitStateUpdateTask("reassign persistent tasks", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + return reassignTasks(currentState); + } + + @Override + public void onFailure(String source, Exception e) { + logger.warn("failed to reassign persistent tasks", e); + } + }); } } } - interface ExecutorNodeDecider { - Assignment getAssignment(String action, ClusterState currentState, Params params); - } + /** + * Returns true if the cluster state change(s) require to reassign some persistent tasks. It can happen in the following + * situations: a node left or is added, the routing table changed, the master node changed or the persistent tasks changed. + */ + boolean shouldReassignPersistentTasks(final ClusterChangedEvent event) { + final PersistentTasksCustomMetaData tasks = event.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + if (tasks == null) { + return false; + } - static boolean reassignmentRequired(ClusterChangedEvent event, ExecutorNodeDecider decider) { - PersistentTasksCustomMetaData tasks = event.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData prevTasks = event.previousState().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - if (tasks != null && (Objects.equals(tasks, prevTasks) == false || - event.nodesChanged() || - event.routingTableChanged() || - event.previousState().nodes().isLocalNodeElectedMaster() == false)) { - // We need to check if removed nodes were running any of the tasks and reassign them - boolean reassignmentRequired = false; - for (PersistentTask taskInProgress : tasks.tasks()) { - if (taskInProgress.needsReassignment(event.state().nodes())) { - // there is an unassigned task or task with a disappeared node - we need to try assigning it - if (Objects.equals(taskInProgress.getAssignment(), - decider.getAssignment(taskInProgress.getTaskName(), event.state(), taskInProgress.getParams())) == false) { - // it looks like a assignment for at least one task is possible - let's trigger reassignment - reassignmentRequired = true; - break; - } + boolean masterChanged = event.previousState().nodes().isLocalNodeElectedMaster() == false; + if (persistentTasksChanged(event) || event.nodesChanged() || event.routingTableChanged() || masterChanged) { + for (PersistentTask task : tasks.tasks()) { + if (needsReassignment(task.getAssignment(), event.state().nodes())) { + Assignment assignment = createAssignment(task.getTaskName(), task.getParams(), event.state()); + if (Objects.equals(assignment, task.getAssignment()) == false) { + return true; + } } } - return reassignmentRequired; } return false; } /** - * Evaluates the cluster state and tries to assign tasks to nodes + * Evaluates the cluster state and tries to assign tasks to nodes. + * + * @param currentState the cluster state to analyze + * @return an updated version of the cluster state */ - public void reassignTasks() { - clusterService.submitStateUpdateTask("reassign persistent tasks", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - return reassignTasks(currentState, logger, PersistentTasksClusterService.this::getAssignement); - } - - @Override - public void onFailure(String source, Exception e) { - logger.warn("Unsuccessful persistent task reassignment", e); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - - } - }); - } - - static ClusterState reassignTasks(ClusterState currentState, Logger logger, ExecutorNodeDecider decider) { - PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + ClusterState reassignTasks(final ClusterState currentState) { ClusterState clusterState = currentState; - DiscoveryNodes nodes = currentState.nodes(); + + final PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); if (tasks != null) { logger.trace("reassigning {} persistent tasks", tasks.tasks().size()); + final DiscoveryNodes nodes = currentState.nodes(); + // We need to check if removed nodes were running any of the tasks and reassign them for (PersistentTask task : tasks.tasks()) { - if (task.needsReassignment(nodes)) { - // there is an unassigned task - we need to try assigning it - Assignment assignment = decider.getAssignment(task.getTaskName(), clusterState, task.getParams()); + if (needsReassignment(task.getAssignment(), nodes)) { + Assignment assignment = createAssignment(task.getTaskName(), task.getParams(), clusterState); if (Objects.equals(assignment, task.getAssignment()) == false) { logger.trace("reassigning task {} from node {} to node {}", task.getId(), task.getAssignment().getExecutorNode(), assignment.getExecutorNode()); @@ -313,6 +305,17 @@ static ClusterState reassignTasks(ClusterState currentState, Logger logger, Exec return clusterState; } + /** Returns true if the persistent tasks are not equal between the previous and the current cluster state **/ + static boolean persistentTasksChanged(final ClusterChangedEvent event) { + String type = PersistentTasksCustomMetaData.TYPE; + return Objects.equals(event.state().metaData().custom(type), event.previousState().metaData().custom(type)) == false; + } + + /** Returns true if the task is not assigned or is assigned to a non-existing node */ + static boolean needsReassignment(final Assignment assignment, final DiscoveryNodes nodes) { + return (assignment.isAssigned() == false || nodes.nodeExists(assignment.getExecutorNode()) == false); + } + private static PersistentTasksCustomMetaData.Builder builder(ClusterState currentState) { return PersistentTasksCustomMetaData.builder(currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE)); } diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java index ee45eb8ffad28..6611ff7f2a3cc 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java @@ -145,7 +145,6 @@ private TaskDescriptionBuilder setStatus(Status status) { } } - public Collection> tasks() { return this.tasks.values(); } @@ -165,12 +164,6 @@ public Collection> findTasks(String taskName, Predicate> predicate) { - return this.tasks().stream() - .filter(p -> taskName.equals(p.getTaskName())) - .anyMatch(predicate); - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -279,7 +272,6 @@ public static class PersistentTask

implements Wr @Nullable private final Long allocationIdOnLastStatusUpdate; - public PersistentTask(String id, String taskName, P params, long allocationId, Assignment assignment) { this(id, allocationId, taskName, params, null, assignment, null); } @@ -395,13 +387,6 @@ public boolean isAssigned() { return assignment.isAssigned(); } - /** - * Returns true if the tasks is not stopped and unassigned or assigned to a non-existing node. - */ - public boolean needsReassignment(DiscoveryNodes nodes) { - return (assignment.isAssigned() == false || nodes.nodeExists(assignment.getExecutorNode()) == false); - } - @Nullable public Status getStatus() { return status; @@ -522,16 +507,14 @@ public static NamedDiff readDiffFrom(StreamInput in) throws IOE return readDiffFrom(MetaData.Custom.class, TYPE, in); } - public long getLastAllocationId() { - return lastAllocationId; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("last_allocation_id", lastAllocationId); builder.startArray("tasks"); - for (PersistentTask entry : tasks.values()) { - entry.toXContent(builder, params); + { + for (PersistentTask entry : tasks.values()) { + entry.toXContent(builder, params); + } } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutor.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutor.java index ed61ad5805391..0a1e2095934ef 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutor.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutor.java @@ -95,9 +95,7 @@ protected DiscoveryNode selectLeastLoadedNode(ClusterState clusterState, Predica *

* Throws an exception if the supplied params cannot be executed on the cluster in the current state. */ - public void validate(Params params, ClusterState clusterState) { - - } + public void validate(Params params, ClusterState clusterState) {} /** * Creates a AllocatedPersistentTask for communicating with task manager diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java index 1169ff91e1308..e470c5028aa8f 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java @@ -29,31 +29,42 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment; import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestParams; import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestPersistentTasksExecutor; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.function.BiFunction; import static java.util.Collections.emptyMap; +import static java.util.Collections.singleton; +import static org.elasticsearch.persistent.PersistentTasksClusterService.needsReassignment; +import static org.elasticsearch.persistent.PersistentTasksClusterService.persistentTasksChanged; import static org.elasticsearch.persistent.PersistentTasksExecutor.NO_NODE_FOUND; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; public class PersistentTasksClusterServiceTests extends ESTestCase { public void testReassignmentRequired() { + final PersistentTasksClusterService service = createService((params, clusterState) -> + "never_assign".equals(((TestParams) params).getTestParam()) ? NO_NODE_FOUND : randomNodeAssignment(clusterState.nodes()) + ); + int numberOfIterations = randomIntBetween(1, 30); ClusterState clusterState = initialState(); for (int i = 0; i < numberOfIterations; i++) { @@ -66,17 +77,7 @@ public void testReassignmentRequired() { clusterState = insignificantChange(clusterState); } ClusterChangedEvent event = new ClusterChangedEvent("test", clusterState, previousState); - assertThat(dumpEvent(event), PersistentTasksClusterService.reassignmentRequired(event, - new PersistentTasksClusterService.ExecutorNodeDecider() { - @Override - public Assignment getAssignment( - String action, ClusterState currentState, Params params) { - if ("never_assign".equals(((TestParams) params).getTestParam())) { - return NO_NODE_FOUND; - } - return randomNodeAssignment(currentState.nodes()); - } - }), equalTo(significant)); + assertThat(dumpEvent(event), service.shouldReassignPersistentTasks(event), equalTo(significant)); } } @@ -175,6 +176,115 @@ public void testReassignTasks() { } } + public void testPersistentTasksChangedNoTasks() { + DiscoveryNodes nodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("_node_1", buildNewFakeTransportAddress(), Version.CURRENT)) + .build(); + + ClusterState previous = ClusterState.builder(new ClusterName("_name")) + .nodes(nodes) + .build(); + ClusterState current = ClusterState.builder(new ClusterName("_name")) + .nodes(nodes) + .build(); + + assertFalse("persistent tasks unchanged (no tasks)", + persistentTasksChanged(new ClusterChangedEvent("test", current, previous))); + } + + public void testPersistentTasksChangedTaskAdded() { + DiscoveryNodes nodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("_node_1", buildNewFakeTransportAddress(), Version.CURRENT)) + .build(); + + ClusterState previous = ClusterState.builder(new ClusterName("_name")) + .nodes(nodes) + .build(); + + PersistentTasksCustomMetaData tasks = PersistentTasksCustomMetaData.builder() + .addTask("_task_1", "test", null, new Assignment(null, "_reason")) + .build(); + + ClusterState current = ClusterState.builder(new ClusterName("_name")) + .nodes(nodes) + .metaData(MetaData.builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasks)) + .build(); + + assertTrue("persistent tasks changed (task added)", + persistentTasksChanged(new ClusterChangedEvent("test", current, previous))); + } + + public void testPersistentTasksChangedTaskRemoved() { + DiscoveryNodes nodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("_node_1", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("_node_2", buildNewFakeTransportAddress(), Version.CURRENT)) + .build(); + + PersistentTasksCustomMetaData previousTasks = PersistentTasksCustomMetaData.builder() + .addTask("_task_1", "test", null, new Assignment("_node_1", "_reason")) + .addTask("_task_2", "test", null, new Assignment("_node_1", "_reason")) + .addTask("_task_3", "test", null, new Assignment("_node_2", "_reason")) + .build(); + + ClusterState previous = ClusterState.builder(new ClusterName("_name")) + .nodes(nodes) + .metaData(MetaData.builder().putCustom(PersistentTasksCustomMetaData.TYPE, previousTasks)) + .build(); + + PersistentTasksCustomMetaData currentTasks = PersistentTasksCustomMetaData.builder() + .addTask("_task_1", "test", null, new Assignment("_node_1", "_reason")) + .addTask("_task_3", "test", null, new Assignment("_node_2", "_reason")) + .build(); + + ClusterState current = ClusterState.builder(new ClusterName("_name")) + .nodes(nodes) + .metaData(MetaData.builder().putCustom(PersistentTasksCustomMetaData.TYPE, currentTasks)) + .build(); + + assertTrue("persistent tasks changed (task removed)", + persistentTasksChanged(new ClusterChangedEvent("test", current, previous))); + } + + public void testPersistentTasksAssigned() { + DiscoveryNodes nodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("_node_1", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("_node_2", buildNewFakeTransportAddress(), Version.CURRENT)) + .build(); + + PersistentTasksCustomMetaData previousTasks = PersistentTasksCustomMetaData.builder() + .addTask("_task_1", "test", null, new Assignment("_node_1", "")) + .addTask("_task_2", "test", null, new Assignment(null, "unassigned")) + .build(); + + ClusterState previous = ClusterState.builder(new ClusterName("_name")) + .nodes(nodes) + .metaData(MetaData.builder().putCustom(PersistentTasksCustomMetaData.TYPE, previousTasks)) + .build(); + + PersistentTasksCustomMetaData currentTasks = PersistentTasksCustomMetaData.builder() + .addTask("_task_1", "test", null, new Assignment("_node_1", "")) + .addTask("_task_2", "test", null, new Assignment("_node_2", "")) + .build(); + + ClusterState current = ClusterState.builder(new ClusterName("_name")) + .nodes(nodes) + .metaData(MetaData.builder().putCustom(PersistentTasksCustomMetaData.TYPE, currentTasks)) + .build(); + + assertTrue("persistent tasks changed (task assigned)", + persistentTasksChanged(new ClusterChangedEvent("test", current, previous))); + } + + public void testNeedsReassignment() { + DiscoveryNodes nodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("_node_1", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("_node_2", buildNewFakeTransportAddress(), Version.CURRENT)) + .build(); + + assertTrue(needsReassignment(new Assignment(null, "unassigned"), nodes)); + assertTrue(needsReassignment(new Assignment("_node_left", "assigned to a node that left"), nodes)); + assertFalse(needsReassignment(new Assignment("_node_1", "assigned"), nodes)); + } private void addTestNodes(DiscoveryNodes.Builder nodes, int nonLocalNodesCount) { for (int i = 0; i < nonLocalNodesCount; i++) { @@ -183,29 +293,25 @@ private void addTestNodes(DiscoveryNodes.Builder nodes, int nonLocalNodesCount) } private ClusterState reassign(ClusterState clusterState) { - return PersistentTasksClusterService.reassignTasks(clusterState, logger, - new PersistentTasksClusterService.ExecutorNodeDecider() { - @Override - public Assignment getAssignment( - String action, ClusterState currentState, Params params) { - TestParams testParams = (TestParams) params; - switch (testParams.getTestParam()) { - case "assign_me": - return randomNodeAssignment(currentState.nodes()); - case "dont_assign_me": - return NO_NODE_FOUND; - case "fail_me_if_called": - fail("the decision decider shouldn't be called on this task"); - return null; - case "assign_one": - return assignOnlyOneTaskAtATime(currentState); - default: - fail("unknown param " + testParams.getTestParam()); - } - return NO_NODE_FOUND; - } - }); + PersistentTasksClusterService service = createService((params, currentState) -> { + TestParams testParams = (TestParams) params; + switch (testParams.getTestParam()) { + case "assign_me": + return randomNodeAssignment(currentState.nodes()); + case "dont_assign_me": + return NO_NODE_FOUND; + case "fail_me_if_called": + fail("the decision decider shouldn't be called on this task"); + return null; + case "assign_one": + return assignOnlyOneTaskAtATime(currentState); + default: + fail("unknown param " + testParams.getTestParam()); + } + return NO_NODE_FOUND; + }); + return service.reassignTasks(clusterState); } private Assignment assignOnlyOneTaskAtATime(ClusterState clusterState) { @@ -450,4 +556,21 @@ private void changeRoutingTable(MetaData.Builder metaData, RoutingTable.Builder metaData.put(indexMetaData, false); routingTable.addAsNew(indexMetaData); } + + /** Creates a PersistentTasksClusterService with a single PersistentTasksExecutor implemented by a BiFunction **/ + static

PersistentTasksClusterService createService(final BiFunction fn) { + PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(Settings.EMPTY, + singleton(new PersistentTasksExecutor

(Settings.EMPTY, TestPersistentTasksExecutor.NAME, null) { + @Override + public Assignment getAssignment(P params, ClusterState clusterState) { + return fn.apply(params, clusterState); + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, P params, Task.Status status) { + throw new UnsupportedOperationException(); + } + })); + return new PersistentTasksClusterService(Settings.EMPTY, registry, mock(ClusterService.class)); + } } From 0f93b7abdf65425344f5cedda3e6b04b31e910d4 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 19 Mar 2018 09:46:53 +0100 Subject: [PATCH 77/89] Fix compilation errors in ML integration tests After elastic/elasticsearch#29109, the `needsReassignment` method has been moved to the PersistentTasksClusterService. This commit fixes some compilation in tests I introduced. --- .../elasticsearch/persistent/PersistentTasksClusterService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 83bd1f4ca5b2a..9e064c3d20924 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -312,7 +312,7 @@ static boolean persistentTasksChanged(final ClusterChangedEvent event) { } /** Returns true if the task is not assigned or is assigned to a non-existing node */ - static boolean needsReassignment(final Assignment assignment, final DiscoveryNodes nodes) { + public static boolean needsReassignment(final Assignment assignment, final DiscoveryNodes nodes) { return (assignment.isAssigned() == false || nodes.nodeExists(assignment.getExecutorNode()) == false); } From bf05c600c4822ef049443e6469fed3a6f7e4347a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 19 Mar 2018 10:52:50 -0400 Subject: [PATCH 78/89] REST: Include suppressed exceptions on failures (#29115) This modifies xcontent serialization of Exceptions to contain suppressed exceptions. If there are any suppressed exceptions they are included in the exception response by default. The reasoning here is that they are fairly rare but when they exist they almost always add extra useful information. Take, for example, the response when you specify two broken ingest pipelines: ``` { "error" : { "root_cause" : ...snip... "type" : "parse_exception", "reason" : "[field] required property is missing", "header" : { "processor_type" : "set", "property_name" : "field" }, "suppressed" : [ { "type" : "parse_exception", "reason" : "[field] required property is missing", "header" : { "processor_type" : "convert", "property_name" : "field" } } ] }, "status" : 400 } ``` Moreover, when suppressed exceptions come from 500 level errors should give us more useful debugging information. Closes #23392 --- .../elasticsearch/ElasticsearchException.java | 21 ++++++++++++++++ .../ElasticsearchExceptionTests.java | 24 ++++++++++++++++--- 2 files changed, 42 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index bfa37808402c4..b1c02c4ac2718 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -85,6 +86,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte private static final String TYPE = "type"; private static final String REASON = "reason"; private static final String CAUSED_BY = "caused_by"; + private static final ParseField SUPPRESSED = new ParseField("suppressed"); private static final String STACK_TRACE = "stack_trace"; private static final String HEADER = "header"; private static final String ERROR = "error"; @@ -372,6 +374,17 @@ protected static void innerToXContent(XContentBuilder builder, Params params, if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) { builder.field(STACK_TRACE, ExceptionsHelper.stackTrace(throwable)); } + + Throwable[] allSuppressed = throwable.getSuppressed(); + if (allSuppressed.length > 0) { + builder.startArray(SUPPRESSED.getPreferredName()); + for (Throwable suppressed : allSuppressed) { + builder.startObject(); + generateThrowableXContent(builder, params, suppressed); + builder.endObject(); + } + builder.endArray(); + } } private static void headerToXContent(XContentBuilder builder, String key, List values) throws IOException { @@ -416,6 +429,7 @@ private static ElasticsearchException innerFromXContent(XContentParser parser, b Map> metadata = new HashMap<>(); Map> headers = new HashMap<>(); List rootCauses = new ArrayList<>(); + List suppressed = new ArrayList<>(); for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) { String currentFieldName = parser.currentName(); @@ -467,6 +481,10 @@ private static ElasticsearchException innerFromXContent(XContentParser parser, b while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { rootCauses.add(fromXContent(parser)); } + } else if (SUPPRESSED.match(currentFieldName, parser.getDeprecationHandler())) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + suppressed.add(fromXContent(parser)); + } } else { // Parse the array and add each item to the corresponding list of metadata. // Arrays of objects are not supported yet and just ignored and skipped. @@ -507,6 +525,9 @@ private static ElasticsearchException innerFromXContent(XContentParser parser, b for (ElasticsearchException rootCause : rootCauses) { e.addSuppressed(rootCause); } + for (ElasticsearchException s : suppressed) { + e.addSuppressed(s); + } return e; } diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 4c095efbbf8aa..d3560fc6db355 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -77,7 +76,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.hasItems; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.startsWith; @@ -602,6 +600,13 @@ public void testThrowableToAndFromXContent() throws IOException { final Tuple exceptions = randomExceptions(); final Throwable throwable = exceptions.v1(); + final ElasticsearchException expected = exceptions.v2(); + int suppressedCount = randomBoolean() ? 0 : between(1, 5); + for (int i = 0; i < suppressedCount; i++) { + final Tuple suppressed = randomExceptions(); + throwable.addSuppressed(suppressed.v1()); + expected.addSuppressed(suppressed.v2()); + } BytesReference throwableBytes = toShuffledXContent((builder, params) -> { ElasticsearchException.generateThrowableXContent(builder, params, throwable); @@ -615,7 +620,20 @@ public void testThrowableToAndFromXContent() throws IOException { assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } - assertDeepEquals(exceptions.v2(), parsedException); + assertDeepEquals(expected, parsedException); + + if (suppressedCount > 0) { + XContentBuilder builder = XContentBuilder.builder(xContent); + builder.startObject(); + ElasticsearchException.generateThrowableXContent(builder, ToXContent.EMPTY_PARAMS, throwable); + builder.endObject(); + throwableBytes = BytesReference.bytes(builder); + try (XContentParser parser = createParser(xContent, throwableBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + List keys = new ArrayList<>(parser.mapOrdered().keySet()); + assertEquals("last index should be [suppressed]", "suppressed", keys.get(keys.size() - 1)); + } + } } public void testUnknownFailureToAndFromXContent() throws IOException { From 3025295f7eb0fcdb635fa0f82f4c74196e633595 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 19 Mar 2018 08:54:10 -0600 Subject: [PATCH 79/89] Decouple Text and Geopoint from XContentBuilder (#29119) This removes the `Text` and `Geopoint` special handling from `XContentBuilder`. Instead, these classes now implement `ToXContentFragment` and render themselves accordingly. This allows us to further decouple XContentBuilder from Elasticsearch-specific classes so it can be factored into a standalone lib at a later time. Relates to #28504 --- .../elasticsearch/common/geo/GeoPoint.java | 11 +++++- .../org/elasticsearch/common/text/Text.java | 17 ++++++++- .../common/xcontent/XContentBuilder.java | 37 +------------------ 3 files changed, 27 insertions(+), 38 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index 125bc5aefcf9f..5905695fb73fe 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -25,13 +25,17 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import java.io.IOException; import java.util.Arrays; import static org.elasticsearch.common.geo.GeoHashUtils.mortonEncode; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; -public final class GeoPoint { +public final class GeoPoint implements ToXContentFragment { private double lat; private double lon; @@ -184,4 +188,9 @@ public static GeoPoint fromGeohash(String geohash) { public static GeoPoint fromGeohash(long geohashLong) { return new GeoPoint().resetFromGeoHash(geohashLong); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.latlon(lat, lon); + } } diff --git a/server/src/main/java/org/elasticsearch/common/text/Text.java b/server/src/main/java/org/elasticsearch/common/text/Text.java index d895b7c11b02d..45a1c2d630672 100644 --- a/server/src/main/java/org/elasticsearch/common/text/Text.java +++ b/server/src/main/java/org/elasticsearch/common/text/Text.java @@ -20,14 +20,18 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import java.io.IOException; import java.nio.charset.StandardCharsets; /** * Both {@link String} and {@link BytesReference} representation of the text. Starts with one of those, and if * the other is requests, caches the other one in a local reference so no additional conversion will be needed. */ -public final class Text implements Comparable { +public final class Text implements Comparable, ToXContentFragment { public static final Text[] EMPTY_ARRAY = new Text[0]; @@ -113,4 +117,15 @@ public boolean equals(Object obj) { public int compareTo(Text text) { return bytes().compareTo(text.bytes()); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (hasString()) { + return builder.value(this.string()); + } else { + // TODO: TextBytesOptimization we can use a buffer here to convert it? maybe add a + // request to jackson to support InputStream as well? + return builder.utf8Value(this.bytes().toBytesRef()); + } + } } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index 9e1bb362d4879..b5622a9c0d26e 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -20,9 +20,7 @@ package org.elasticsearch.common.xcontent; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; @@ -98,7 +96,6 @@ public static XContentBuilder builder(XContent xContent, Set includes, S writers.put(double[].class, (b, v) -> b.values((double[]) v)); writers.put(Float.class, (b, v) -> b.value((Float) v)); writers.put(float[].class, (b, v) -> b.values((float[]) v)); - writers.put(GeoPoint.class, (b, v) -> b.value((GeoPoint) v)); writers.put(Integer.class, (b, v) -> b.value((Integer) v)); writers.put(int[].class, (b, v) -> b.values((int[]) v)); writers.put(Long.class, (b, v) -> b.value((Long) v)); @@ -107,7 +104,6 @@ public static XContentBuilder builder(XContent xContent, Set includes, S writers.put(short[].class, (b, v) -> b.values((short[]) v)); writers.put(String.class, (b, v) -> b.value((String) v)); writers.put(String[].class, (b, v) -> b.values((String[]) v)); - writers.put(Text.class, (b, v) -> b.value((Text) v)); WRITERS = Collections.unmodifiableMap(writers); } @@ -630,26 +626,6 @@ public XContentBuilder utf8Value(BytesRef value) throws IOException { return this; } - //////////////////////////////////////////////////////////////////////////// - // Text - ////////////////////////////////// - - public XContentBuilder field(String name, Text value) throws IOException { - return field(name).value(value); - } - - public XContentBuilder value(Text value) throws IOException { - if (value == null) { - return nullValue(); - } else if (value.hasString()) { - return value(value.string()); - } else { - // TODO: TextBytesOptimization we can use a buffer here to convert it? maybe add a - // request to jackson to support InputStream as well? - return utf8Value(value.bytes().toBytesRef()); - } - } - //////////////////////////////////////////////////////////////////////////// // Date ////////////////////////////////// @@ -714,20 +690,9 @@ XContentBuilder value(DateTimeFormatter formatter, long value) throws IOExceptio } //////////////////////////////////////////////////////////////////////////// - // GeoPoint & LatLon + // LatLon ////////////////////////////////// - public XContentBuilder field(String name, GeoPoint value) throws IOException { - return field(name).value(value); - } - - public XContentBuilder value(GeoPoint value) throws IOException { - if (value == null) { - return nullValue(); - } - return latlon(value.getLat(), value.getLon()); - } - public XContentBuilder latlon(String name, double lat, double lon) throws IOException { return field(name).latlon(lat, lon); } From 80532229a9b3484cab033a7fe92ca576b7799f47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 19 Mar 2018 16:26:02 +0100 Subject: [PATCH 80/89] Move indices field from RankEvalSpec to RankEvalRequest (#28341) Currently we store the indices specified in the request URL together with all the other ranking evaluation specification in RankEvalSpec. This is not ideal since e.g. the indices are not rendered to xContent and so cannot be parsed back. Instead we should keep them in RankEvalRequest. --- .../org/elasticsearch/client/Request.java | 5 +- .../org/elasticsearch/client/RankEvalIT.java | 4 +- .../elasticsearch/client/RequestTests.java | 4 +- .../index/rankeval/RankEvalRequest.java | 46 ++++++++++++++++++- .../index/rankeval/RankEvalSpec.java | 25 +--------- .../index/rankeval/RestRankEvalAction.java | 5 +- .../rankeval/TransportRankEvalAction.java | 3 +- .../index/rankeval/RankEvalRequestIT.java | 27 ++++------- .../index/rankeval/RankEvalSpecTests.java | 12 +---- .../rankeval/SmokeMultipleTemplatesIT.java | 4 +- 10 files changed, 64 insertions(+), 71 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java index fb036bf35faf0..8a5c333dc4645 100755 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -82,7 +82,6 @@ import java.nio.charset.Charset; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -517,9 +516,7 @@ static Request existsAlias(GetAliasesRequest getAliasesRequest) { } static Request rankEval(RankEvalRequest rankEvalRequest) throws IOException { - // TODO maybe indices should be property of RankEvalRequest and not of the spec - List indices = rankEvalRequest.getRankEvalSpec().getIndices(); - String endpoint = endpoint(indices.toArray(new String[indices.size()]), Strings.EMPTY_ARRAY, "_rank_eval"); + String endpoint = endpoint(rankEvalRequest.getIndices(), Strings.EMPTY_ARRAY, "_rank_eval"); HttpEntity entity = createEntity(rankEvalRequest.getRankEvalSpec(), REQUEST_BODY_CONTENT_TYPE); return new Request(HttpGet.METHOD_NAME, endpoint, Collections.emptyMap(), entity); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java index c65f7e9da5b73..7e60e5f169f32 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java @@ -71,9 +71,9 @@ public void testRankEvalRequest() throws IOException { specifications.add(berlinRequest); PrecisionAtK metric = new PrecisionAtK(1, false, 10); RankEvalSpec spec = new RankEvalSpec(specifications, metric); - spec.addIndices(Collections.singletonList("index")); - RankEvalResponse response = execute(new RankEvalRequest(spec), highLevelClient()::rankEval, highLevelClient()::rankEvalAsync); + RankEvalResponse response = execute(new RankEvalRequest(spec, new String[] { "index" }), highLevelClient()::rankEval, + highLevelClient()::rankEvalAsync); // the expected Prec@ for the first query is 4/6 and the expected Prec@ for the second is 1/6, divided by 2 to get the average double expectedPrecision = (1.0 / 6.0 + 4.0 / 6.0) / 2.0; assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java index 5a7965ad446cb..beddadf859b1c 100755 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java @@ -104,7 +104,6 @@ import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -1109,8 +1108,7 @@ public void testRankEval() throws Exception { Collections.singletonList(new RatedRequest("queryId", Collections.emptyList(), new SearchSourceBuilder())), new PrecisionAtK()); String[] indices = randomIndicesNames(0, 5); - spec.addIndices(Arrays.asList(indices)); - RankEvalRequest rankEvalRequest = new RankEvalRequest(spec); + RankEvalRequest rankEvalRequest = new RankEvalRequest(spec, indices); Request request = Request.rankEval(rankEvalRequest); StringJoiner endpoint = new StringJoiner("/", "/", ""); diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java index c682ec45ed628..58fd3b0a694ae 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java @@ -19,12 +19,15 @@ package org.elasticsearch.index.rankeval; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.Objects; /** * Request to perform a search ranking evaluation. @@ -32,9 +35,11 @@ public class RankEvalRequest extends ActionRequest { private RankEvalSpec rankingEvaluationSpec; + private String[] indices = Strings.EMPTY_ARRAY; - public RankEvalRequest(RankEvalSpec rankingEvaluationSpec) { + public RankEvalRequest(RankEvalSpec rankingEvaluationSpec, String[] indices) { this.rankingEvaluationSpec = rankingEvaluationSpec; + setIndices(indices); } RankEvalRequest() { @@ -64,16 +69,53 @@ public void setRankEvalSpec(RankEvalSpec task) { this.rankingEvaluationSpec = task; } + /** + * Sets the indices the search will be executed on. + */ + public RankEvalRequest setIndices(String... indices) { + Objects.requireNonNull(indices, "indices must not be null"); + for (String index : indices) { + Objects.requireNonNull(index, "index must not be null"); + } + this.indices = indices; + return this; + } + + /** + * @return the indices for this request + */ + public String[] getIndices() { + return indices; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); rankingEvaluationSpec = new RankEvalSpec(in); - + if (in.getVersion().onOrAfter(Version.V_6_3_0)) { + indices = in.readStringArray(); + } else { + // readStringArray uses readVInt for size, we used readInt in 6.2 + int indicesSize = in.readInt(); + String[] indices = new String[indicesSize]; + for (int i = 0; i < indicesSize; i++) { + indices[i] = in.readString(); + } + } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); rankingEvaluationSpec.writeTo(out); + if (out.getVersion().onOrAfter(Version.V_6_3_0)) { + out.writeStringArray(indices); + } else { + // writeStringArray uses writeVInt for size, we used writeInt in 6.2 + out.writeInt(indices.length); + for (String index : indices) { + out.writeString(index); + } + } } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java index 8b396d0b862b4..8e0828fcfcaea 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java @@ -58,8 +58,6 @@ public class RankEvalSpec implements Writeable, ToXContentObject { private static final int MAX_CONCURRENT_SEARCHES = 10; /** optional: Templates to base test requests on */ private Map templates = new HashMap<>(); - /** the indices this ranking evaluation targets */ - private final List indices; public RankEvalSpec(List ratedRequests, EvaluationMetric metric, Collection templates) { this.metric = Objects.requireNonNull(metric, "Cannot evaluate ranking if no evaluation metric is provided."); @@ -81,7 +79,6 @@ public RankEvalSpec(List ratedRequests, EvaluationMetric metric, C this.templates.put(idScript.id, idScript.script); } } - this.indices = new ArrayList<>(); } public RankEvalSpec(List ratedRequests, EvaluationMetric metric) { @@ -102,11 +99,6 @@ public RankEvalSpec(StreamInput in) throws IOException { this.templates.put(key, value); } maxConcurrentSearches = in.readVInt(); - int indicesSize = in.readInt(); - indices = new ArrayList<>(indicesSize); - for (int i = 0; i < indicesSize; i++) { - this.indices.add(in.readString()); - } } @Override @@ -122,10 +114,6 @@ public void writeTo(StreamOutput out) throws IOException { entry.getValue().writeTo(out); } out.writeVInt(maxConcurrentSearches); - out.writeInt(indices.size()); - for (String index : indices) { - out.writeString(index); - } } /** Returns the metric to use for quality evaluation.*/ @@ -153,14 +141,6 @@ public void setMaxConcurrentSearches(int maxConcurrentSearches) { this.maxConcurrentSearches = maxConcurrentSearches; } - public void addIndices(List indices) { - this.indices.addAll(indices); - } - - public List getIndices() { - return Collections.unmodifiableList(indices); - } - private static final ParseField TEMPLATES_FIELD = new ParseField("templates"); private static final ParseField METRIC_FIELD = new ParseField("metric"); private static final ParseField REQUESTS_FIELD = new ParseField("requests"); @@ -262,12 +242,11 @@ public final boolean equals(Object obj) { return Objects.equals(ratedRequests, other.ratedRequests) && Objects.equals(metric, other.metric) && Objects.equals(maxConcurrentSearches, other.maxConcurrentSearches) && - Objects.equals(templates, other.templates) && - Objects.equals(indices, other.indices); + Objects.equals(templates, other.templates); } @Override public final int hashCode() { - return Objects.hash(ratedRequests, metric, templates, maxConcurrentSearches, indices); + return Objects.hash(ratedRequests, metric, templates, maxConcurrentSearches); } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java index a2c2aeb7584d2..a596caf4f5c7b 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java @@ -29,8 +29,6 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.Arrays; -import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -110,9 +108,8 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } private static void parseRankEvalRequest(RankEvalRequest rankEvalRequest, RestRequest request, XContentParser parser) { - List indices = Arrays.asList(Strings.splitStringByCommaToArray(request.param("index"))); + rankEvalRequest.setIndices(Strings.splitStringByCommaToArray(request.param("index"))); RankEvalSpec spec = RankEvalSpec.parse(parser); - spec.addIndices(indices); rankEvalRequest.setRankEvalSpec(spec); } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java index b677f66399818..a4ce4c7ee92e7 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java @@ -85,7 +85,6 @@ public TransportRankEvalAction(Settings settings, ThreadPool threadPool, ActionF @Override protected void doExecute(RankEvalRequest request, ActionListener listener) { RankEvalSpec evaluationSpecification = request.getRankEvalSpec(); - List indices = evaluationSpecification.getIndices(); EvaluationMetric metric = evaluationSpecification.getMetric(); List ratedRequests = evaluationSpecification.getRatedRequests(); @@ -127,7 +126,7 @@ LoggingDeprecationHandler.INSTANCE, new BytesArray(resolvedRequest), XContentTyp } else { ratedSearchSource.fetchSource(summaryFields.toArray(new String[summaryFields.size()]), new String[0]); } - msearchRequest.add(new SearchRequest(indices.toArray(new String[indices.size()]), ratedSearchSource)); + msearchRequest.add(new SearchRequest(request.getIndices(), ratedSearchSource)); } assert ratedRequestsInSearch.size() == msearchRequest.requests().size(); client.multiSearch(msearchRequest, new RankEvalActionListener(listener, metric, diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index e0108a9e41681..744bc3467861f 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -30,7 +30,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map.Entry; import java.util.Set; @@ -85,13 +84,12 @@ public void testPrecisionAtRequest() { PrecisionAtK metric = new PrecisionAtK(1, false, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - task.addIndices(Collections.singletonList("test")); RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); builder.setRankEvalSpec(task); - RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()) + RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request().setIndices("test")) .actionGet(); // the expected Prec@ for the first query is 4/6 and the expected Prec@ for the // second is 1/6, divided by 2 to get the average @@ -132,9 +130,8 @@ public void testPrecisionAtRequest() { // test that a different window size k affects the result metric = new PrecisionAtK(1, false, 3); task = new RankEvalSpec(specifications, metric); - task.addIndices(Collections.singletonList("test")); - builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); + builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest().setIndices("test")); builder.setRankEvalSpec(task); response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); @@ -165,9 +162,9 @@ public void testDCGRequest() { DiscountedCumulativeGain metric = new DiscountedCumulativeGain(false, null, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - task.addIndices(Collections.singletonList("test")); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, + new RankEvalRequest().setIndices("test")); builder.setRankEvalSpec(task); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); @@ -176,9 +173,8 @@ public void testDCGRequest() { // test that a different window size k affects the result metric = new DiscountedCumulativeGain(false, null, 3); task = new RankEvalSpec(specifications, metric); - task.addIndices(Collections.singletonList("test")); - builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); + builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest().setIndices("test")); builder.setRankEvalSpec(task); response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); @@ -196,9 +192,9 @@ public void testMRRRequest() { MeanReciprocalRank metric = new MeanReciprocalRank(1, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - task.addIndices(Collections.singletonList("test")); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, + new RankEvalRequest().setIndices("test")); builder.setRankEvalSpec(task); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); @@ -211,9 +207,8 @@ public void testMRRRequest() { // test that a different window size k affects the result metric = new MeanReciprocalRank(1, 3); task = new RankEvalSpec(specifications, metric); - task.addIndices(Collections.singletonList("test")); - builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); + builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest().setIndices("test")); builder.setRankEvalSpec(task); response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); @@ -229,8 +224,6 @@ public void testMRRRequest() { * field) will produce an error in the response */ public void testBadQuery() { - List indices = Arrays.asList(new String[] { "test" }); - List specifications = new ArrayList<>(); SearchSourceBuilder amsterdamQuery = new SearchSourceBuilder(); amsterdamQuery.query(new MatchAllQueryBuilder()); @@ -245,9 +238,9 @@ public void testBadQuery() { specifications.add(brokenRequest); RankEvalSpec task = new RankEvalSpec(specifications, new PrecisionAtK()); - task.addIndices(indices); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, + new RankEvalRequest().setIndices("test")); builder.setRankEvalSpec(task); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java index 0b0b30c36e90f..26611679f3494 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java @@ -109,7 +109,6 @@ private static RankEvalSpec createTestItem() throws IOException { for (int i = 0; i < size; i++) { indices.add(randomAlphaOfLengthBetween(0, 50)); } - spec.addIndices(indices); return spec; } @@ -117,11 +116,7 @@ public void testXContentRoundtrip() throws IOException { RankEvalSpec testItem = createTestItem(); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(shuffled))) { - RankEvalSpec parsedItem = RankEvalSpec.parse(parser); - // indices, come from URL parameters, so they don't survive xContent roundtrip - // for the sake of being able to use equals() next, we add it to the parsed object - parsedItem.addIndices(testItem.getIndices()); assertNotSame(testItem, parsedItem); assertEquals(testItem, parsedItem); assertEquals(testItem.hashCode(), parsedItem.hashCode()); @@ -165,9 +160,8 @@ private static RankEvalSpec mutateTestItem(RankEvalSpec original) { List ratedRequests = new ArrayList<>(original.getRatedRequests()); EvaluationMetric metric = original.getMetric(); Map templates = new HashMap<>(original.getTemplates()); - List indices = new ArrayList<>(original.getIndices()); - int mutate = randomIntBetween(0, 3); + int mutate = randomIntBetween(0, 2); switch (mutate) { case 0: RatedRequest request = RatedRequestsTests.createTestItem(true); @@ -183,9 +177,6 @@ private static RankEvalSpec mutateTestItem(RankEvalSpec original) { case 2: templates.put("mutation", new Script(ScriptType.INLINE, "mustache", randomAlphaOfLength(10), new HashMap<>())); break; - case 3: - indices.add(randomAlphaOfLength(5)); - break; default: throw new IllegalStateException("Requested to modify more than available parameters."); } @@ -195,7 +186,6 @@ private static RankEvalSpec mutateTestItem(RankEvalSpec original) { scripts.add(new ScriptWithId(entry.getKey(), entry.getValue())); } RankEvalSpec result = new RankEvalSpec(ratedRequests, metric, scripts); - result.addIndices(indices); return result; } diff --git a/qa/smoke-test-rank-eval-with-mustache/src/test/java/org/elasticsearch/index/rankeval/SmokeMultipleTemplatesIT.java b/qa/smoke-test-rank-eval-with-mustache/src/test/java/org/elasticsearch/index/rankeval/SmokeMultipleTemplatesIT.java index 04d5d94023bc6..178d429ca9ffd 100644 --- a/qa/smoke-test-rank-eval-with-mustache/src/test/java/org/elasticsearch/index/rankeval/SmokeMultipleTemplatesIT.java +++ b/qa/smoke-test-rank-eval-with-mustache/src/test/java/org/elasticsearch/index/rankeval/SmokeMultipleTemplatesIT.java @@ -72,7 +72,6 @@ public void setup() { } public void testPrecisionAtRequest() throws IOException { - List indices = Arrays.asList(new String[] { "test" }); List specifications = new ArrayList<>(); Map ams_params = new HashMap<>(); @@ -100,11 +99,10 @@ public void testPrecisionAtRequest() throws IOException { Set templates = new HashSet<>(); templates.add(template); RankEvalSpec task = new RankEvalSpec(specifications, metric, templates); - task.addIndices(indices); RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); builder.setRankEvalSpec(task); - RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); + RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request().setIndices("test")).actionGet(); assertEquals(0.9, response.getEvaluationResult(), Double.MIN_VALUE); } From 3530a676e0c523e490d8a67be40cebf58358b860 Mon Sep 17 00:00:00 2001 From: Sue Gallagher <36747279+Sue-Gallagher@users.noreply.github.com> Date: Mon, 19 Mar 2018 10:22:40 -0700 Subject: [PATCH 81/89] [Docs]Corrected spelling errors. (#28976) --- docs/reference/docs/refresh.asciidoc | 2 +- docs/reference/how-to/general.asciidoc | 2 +- docs/reference/how-to/recipes/scoring.asciidoc | 2 +- docs/reference/query-dsl/span-not-query.asciidoc | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/reference/docs/refresh.asciidoc b/docs/reference/docs/refresh.asciidoc index f301a8ec3da7a..e5051497ecda3 100644 --- a/docs/reference/docs/refresh.asciidoc +++ b/docs/reference/docs/refresh.asciidoc @@ -46,7 +46,7 @@ compared to `true`. In the case that the index is only changed once every `index.refresh_interval` then it saves no work. * `true` creates less efficient indexes constructs (tiny segments) that must later be merged into more efficient index constructs (larger segments). Meaning -that the cost of `true` is payed at index time to create the tiny segment, at +that the cost of `true` is paid at index time to create the tiny segment, at search time to search the tiny segment, and at merge time to make the larger segments. * Never start multiple `refresh=wait_for` requests in a row. Instead batch them diff --git a/docs/reference/how-to/general.asciidoc b/docs/reference/how-to/general.asciidoc index 0900c49ce0640..e9e26dbaf2a70 100644 --- a/docs/reference/how-to/general.asciidoc +++ b/docs/reference/how-to/general.asciidoc @@ -31,7 +31,7 @@ and <> also become more expensive since their cost directly depends on the size of the original document. It is sometimes useful to reconsider what the unit of information should be. -For instance, the fact you want to make books searchable doesn't necesarily +For instance, the fact you want to make books searchable doesn't necessarily mean that a document should consist of a whole book. It might be a better idea to use chapters or even paragraphs as documents, and then have a property in these documents that identifies which book they belong to. This does not only diff --git a/docs/reference/how-to/recipes/scoring.asciidoc b/docs/reference/how-to/recipes/scoring.asciidoc index 6c3036091a2ed..f9973385c7163 100644 --- a/docs/reference/how-to/recipes/scoring.asciidoc +++ b/docs/reference/how-to/recipes/scoring.asciidoc @@ -65,7 +65,7 @@ documents and scores will be consistent. Otherwise the recommended way to work around this issue is to use the <> search type. This will make -Elasticsearch perform an inital round trip to all involved shards, asking +Elasticsearch perform an initial round trip to all involved shards, asking them for their index statistics relatively to the query, then the coordinating node will merge those statistics and send the merged statistics alongside the request when asking shards to perform the `query` phase, so that shards can diff --git a/docs/reference/query-dsl/span-not-query.asciidoc b/docs/reference/query-dsl/span-not-query.asciidoc index 1632ee03b2fb8..29a803a74767b 100644 --- a/docs/reference/query-dsl/span-not-query.asciidoc +++ b/docs/reference/query-dsl/span-not-query.asciidoc @@ -3,7 +3,7 @@ Removes matches which overlap with another span query or which are within x tokens before (controlled by the parameter `pre`) or y tokens -after (controled by the parameter `post`) another SpanQuery. The span not +after (controlled by the parameter `post`) another SpanQuery. The span not query maps to Lucene `SpanNotQuery`. Here is an example: [source,js] From 5135484a278e7dfe1a4877204c4493abcdea6849 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 19 Mar 2018 14:02:55 -0400 Subject: [PATCH 82/89] Test: Expect extra Exception in die_with_dignity (#29138) I did a little digging. It looks like IOException is thrown when the other side closes its connection while we're waiting on our buffer to fill up. We totally expect that in this test. It feels to me like we should throw a `ConnectionClosedException` but upstream does not agree: https://issues.apache.org/jira/browse/HTTPASYNC-134 While we *could* catch the exception and transform it ourselves that seems like a bigger change than is merited at this point. Closes #29136 --- .../qa/die_with_dignity/DieWithDignityIT.java | 26 ++++++++++++++++--- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java b/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java index 8aaf81968561a..4e69a478562a7 100644 --- a/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java +++ b/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java @@ -20,22 +20,25 @@ package org.elasticsearch.qa.die_with_dignity; import org.apache.http.ConnectionClosedException; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseListener; +import org.apache.lucene.util.Constants; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.rest.ESRestTestCase; +import org.hamcrest.Matcher; import java.io.BufferedReader; +import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.file.Files; import java.nio.file.Path; import java.util.Iterator; import java.util.List; -import java.util.concurrent.CountDownLatch; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; @@ -48,7 +51,22 @@ public void testDieWithDignity() throws Exception { assertThat(pidFileLines, hasSize(1)); final int pid = Integer.parseInt(pidFileLines.get(0)); Files.delete(pidFile); - expectThrows(ConnectionClosedException.class, () -> client().performRequest("GET", "/_die_with_dignity")); + IOException e = expectThrows(IOException.class, () -> client().performRequest("GET", "/_die_with_dignity")); + Matcher failureMatcher = instanceOf(ConnectionClosedException.class); + if (Constants.WINDOWS) { + /* + * If the other side closes the connection while we're waiting to fill our buffer + * we can get IOException with the message below. It seems to only come up on + * Windows and it *feels* like it could be a ConnectionClosedException but + * upstream does not consider this a bug: + * https://issues.apache.org/jira/browse/HTTPASYNC-134 + * + * So we catch it here and consider it "ok". + */ + failureMatcher = either(failureMatcher) + .or(hasToString(containsString("An existing connection was forcibly closed by the remote host"))); + } + assertThat(e, failureMatcher); // the Elasticsearch process should die and disappear from the output of jps assertBusy(() -> { From a813492fe3df3c6f501852cd501c2d8bde84e8fe Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 19 Mar 2018 14:17:09 -0400 Subject: [PATCH 83/89] Tests: Make $_path support dots in paths (#28917) `$_path` is used by documentation tests to ignore a value from a response, for example: ``` [source,js] ---- { "count": 1, "datafeeds": [ { "datafeed_id": "datafeed-total-requests", "state": "started", "node": { ... "attributes": { "ml.machine_memory": "17179869184", "ml.max_open_jobs": "20", "ml.enabled": "true" } }, "assignment_explanation": "" } ] } ---- // TESTRESPONSE[s/"17179869184"/$body.$_path/] ``` That example shows `17179869184` in the compiled docs but when it runs the tests generated by that doc it ignores `17179869184` and asserts instead that there is a value in that field. This is required because we can't predict things like "how many milliseconds will this take?" and "how much memory will this take?". Before this change it was impossible to use `$_path` when any component of the path contained a `.`. This fixes the `$_path` evaluator to properly escape `.`. Closes #28770 --- .../elasticsearch/test/rest/yaml/Stash.java | 4 +-- .../test/rest/yaml/StashTests.java | 33 ++++++++++++------- 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java index c7b8e0fef2f9b..a5edeb0195b53 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java @@ -184,10 +184,10 @@ private Object getValue(List path, String key) throws IOException { StringBuilder pathBuilder = new StringBuilder(); Iterator element = path.iterator(); if (element.hasNext()) { - pathBuilder.append(element.next()); + pathBuilder.append(element.next().toString().replace(".", "\\.")); while (element.hasNext()) { pathBuilder.append('.'); - pathBuilder.append(element.next()); + pathBuilder.append(element.next().toString().replace(".", "\\.")); } } String builtPath = Matcher.quoteReplacement(pathBuilder.toString()); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/StashTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/StashTests.java index 8e7e0284249de..1c616d8be8bcd 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/StashTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/StashTests.java @@ -119,16 +119,22 @@ public void testReplaceStashedValuesStashKeyInList() throws IOException { public void testPathInList() throws IOException { Stash stash = new Stash(); - stash.stashValue("body", singletonMap("foo", Arrays.asList("a", "b"))); + String topLevelKey; + if (randomBoolean()) { + topLevelKey = randomAlphaOfLength(2) + "." + randomAlphaOfLength(2); + } else { + topLevelKey = randomAlphaOfLength(5); + } + stash.stashValue("body", singletonMap(topLevelKey, Arrays.asList("a", "b"))); Map expected; Map map; if (randomBoolean()) { - expected = singletonMap("foo", Arrays.asList("test", "boooooh!")); - map = singletonMap("foo", Arrays.asList("test", "${body.$_path}oooooh!")); + expected = singletonMap(topLevelKey, Arrays.asList("test", "boooooh!")); + map = singletonMap(topLevelKey, Arrays.asList("test", "${body.$_path}oooooh!")); } else { - expected = singletonMap("foo", Arrays.asList("test", "b")); - map = singletonMap("foo", Arrays.asList("test", "$body.$_path")); + expected = singletonMap(topLevelKey, Arrays.asList("test", "b")); + map = singletonMap(topLevelKey, Arrays.asList("test", "$body.$_path")); } Map actual = stash.replaceStashedValues(map); @@ -138,21 +144,26 @@ public void testPathInList() throws IOException { public void testPathInMapValue() throws IOException { Stash stash = new Stash(); - stash.stashValue("body", singletonMap("foo", singletonMap("a", "b"))); + String topLevelKey; + if (randomBoolean()) { + topLevelKey = randomAlphaOfLength(2) + "." + randomAlphaOfLength(2); + } else { + topLevelKey = randomAlphaOfLength(5); + } + stash.stashValue("body", singletonMap(topLevelKey, singletonMap("a", "b"))); Map expected; Map map; if (randomBoolean()) { - expected = singletonMap("foo", singletonMap("a", "boooooh!")); - map = singletonMap("foo", singletonMap("a", "${body.$_path}oooooh!")); + expected = singletonMap(topLevelKey, singletonMap("a", "boooooh!")); + map = singletonMap(topLevelKey, singletonMap("a", "${body.$_path}oooooh!")); } else { - expected = singletonMap("foo", singletonMap("a", "b")); - map = singletonMap("foo", singletonMap("a", "$body.$_path")); + expected = singletonMap(topLevelKey, singletonMap("a", "b")); + map = singletonMap(topLevelKey, singletonMap("a", "$body.$_path")); } Map actual = stash.replaceStashedValues(map); assertEquals(expected, actual); assertThat(actual, not(sameInstance(map))); } - } From 687577a51634639548da1234e1a645bede47399f Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 19 Mar 2018 12:28:15 -0600 Subject: [PATCH 84/89] Fix javadoc warning in Strings for missing parameter description Fixes a parameter in `Strings` that had a javadoc annotation but was missing the description, causing warnings in the build. --- server/src/main/java/org/elasticsearch/common/Strings.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/Strings.java b/server/src/main/java/org/elasticsearch/common/Strings.java index 6c2068197866e..da27bf0187bd2 100644 --- a/server/src/main/java/org/elasticsearch/common/Strings.java +++ b/server/src/main/java/org/elasticsearch/common/Strings.java @@ -757,7 +757,7 @@ public static String toString(ToXContent toXContent) { /** * Returns a string representation of the builder (only applicable for text based xcontent). - * @param xContentBuilder + * @param xContentBuilder builder containing an object to converted to a string */ public static String toString(XContentBuilder xContentBuilder) { return BytesReference.bytes(xContentBuilder).utf8ToString(); From 1eb1d59de8c43eabedfd6d422eca152966b51732 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 19 Mar 2018 16:10:16 -0700 Subject: [PATCH 85/89] Build: Fix meta modules to not install as plugin in tests (#29150) This commit fixes the meta plugin build to conditionally install the plugin as a module or plugin, depending on whether it is a module or plugin. --- .../gradle/plugin/MetaPluginBuildPlugin.groovy | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy index 4dc355a48608a..fa7e0a6a752b4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy @@ -37,19 +37,28 @@ class MetaPluginBuildPlugin implements Plugin { project.plugins.apply(RestTestPlugin) createBundleTask(project) + boolean isModule = project.path.startsWith(':modules:') project.integTestCluster { dependsOn(project.bundlePlugin) - plugin(project.path) } BuildPlugin.configurePomGeneration(project) project.afterEvaluate { PluginBuildPlugin.addZipPomGeneration(project) + if (isModule) { + if (project.integTestCluster.distribution == 'integ-test-zip') { + project.integTestCluster.module(project) + } + } else { + project.integTestCluster.plugin(project.path) + } } RunTask run = project.tasks.create('run', RunTask) run.dependsOn(project.bundlePlugin) - run.clusterConfig.plugin(project.path) + if (isModule == false) { + run.clusterConfig.plugin(project.path) + } } private static void createBundleTask(Project project) { From f7a1267e4f502848b4b2b3a55f15b479c455bf2a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 19 Mar 2018 19:46:43 -0700 Subject: [PATCH 86/89] Build: Simplify rest spec hack configuration (#29149) This commit creates the copyRestSpec task for rest integ tests immediately on creation of the RestIntegTestTask instead of lazily in afterEvaluate. This allows other projects to add additional rest specs to be copied, instead of needing to create another parallel copy task. --- .../gradle/test/RestIntegTestTask.groovy | 62 +++++++++++++-- .../gradle/test/RestSpecHack.groovy | 78 ------------------- 2 files changed, 57 insertions(+), 83 deletions(-) delete mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestSpecHack.groovy diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index bb86c1e0995a8..3c7554453b5e2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -20,10 +20,15 @@ package org.elasticsearch.gradle.test import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin +import org.elasticsearch.gradle.VersionProperties import org.gradle.api.DefaultTask +import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.execution.TaskExecutionAdapter import org.gradle.api.internal.tasks.options.Option +import org.gradle.api.provider.Property +import org.gradle.api.provider.Provider +import org.gradle.api.tasks.Copy import org.gradle.api.tasks.Input import org.gradle.api.tasks.TaskState @@ -47,7 +52,7 @@ public class RestIntegTestTask extends DefaultTask { /** Flag indicating whether the rest tests in the rest spec should be run. */ @Input - boolean includePackaged = false + Property includePackaged = project.objects.property(Boolean) public RestIntegTestTask() { runner = project.tasks.create("${name}Runner", RandomizedTestingTask.class) @@ -92,10 +97,9 @@ public class RestIntegTestTask extends DefaultTask { } // copy the rest spec/tests into the test resources - RestSpecHack.configureDependencies(project) - project.afterEvaluate { - runner.dependsOn(RestSpecHack.configureTask(project, includePackaged)) - } + Task copyRestSpec = createCopyRestSpecTask(project, includePackaged) + runner.dependsOn(copyRestSpec) + // this must run after all projects have been configured, so we know any project // references can be accessed as a fully configured project.gradle.projectsEvaluated { @@ -109,6 +113,11 @@ public class RestIntegTestTask extends DefaultTask { } } + /** Sets the includePackaged property */ + public void includePackaged(boolean include) { + includePackaged.set(include) + } + @Option( option = "debug-jvm", description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch." @@ -184,4 +193,47 @@ public class RestIntegTestTask extends DefaultTask { println('=========================================') } + + /** + * Creates a task (if necessary) to copy the rest spec files. + * + * @param project The project to add the copy task to + * @param includePackagedTests true if the packaged tests should be copied, false otherwise + */ + private static Task createCopyRestSpecTask(Project project, Provider includePackagedTests) { + project.configurations { + restSpec + } + project.dependencies { + restSpec "org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch}" + } + Task copyRestSpec = project.tasks.findByName('copyRestSpec') + if (copyRestSpec != null) { + return copyRestSpec + } + Map copyRestSpecProps = [ + name : 'copyRestSpec', + type : Copy, + dependsOn: [project.configurations.restSpec, 'processTestResources'] + ] + copyRestSpec = project.tasks.create(copyRestSpecProps) { + into project.sourceSets.test.output.resourcesDir + } + project.afterEvaluate { + copyRestSpec.from({ project.zipTree(project.configurations.restSpec.singleFile) }) { + include 'rest-api-spec/api/**' + if (includePackagedTests.get()) { + include 'rest-api-spec/test/**' + } + } + } + project.idea { + module { + if (scopes.TEST != null) { + scopes.TEST.plus.add(project.configurations.restSpec) + } + } + } + return copyRestSpec + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestSpecHack.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestSpecHack.groovy deleted file mode 100644 index 296ae7115789f..0000000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestSpecHack.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle.test - -import org.elasticsearch.gradle.VersionProperties -import org.gradle.api.Project -import org.gradle.api.Task -import org.gradle.api.tasks.Copy - -/** - * The rest-api-spec tests are loaded from the classpath. However, they - * currently must be available on the local filesystem. This class encapsulates - * setting up tasks to copy the rest spec api to test resources. - */ -public class RestSpecHack { - /** - * Sets dependencies needed to copy the rest spec. - * @param project The project to add rest spec dependency to - */ - public static void configureDependencies(Project project) { - project.configurations { - restSpec - } - project.dependencies { - restSpec "org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch}" - } - } - - /** - * Creates a task (if necessary) to copy the rest spec files. - * - * @param project The project to add the copy task to - * @param includePackagedTests true if the packaged tests should be copied, false otherwise - */ - public static Task configureTask(Project project, boolean includePackagedTests) { - Task copyRestSpec = project.tasks.findByName('copyRestSpec') - if (copyRestSpec != null) { - return copyRestSpec - } - Map copyRestSpecProps = [ - name : 'copyRestSpec', - type : Copy, - dependsOn: [project.configurations.restSpec, 'processTestResources'] - ] - copyRestSpec = project.tasks.create(copyRestSpecProps) { - from { project.zipTree(project.configurations.restSpec.singleFile) } - include 'rest-api-spec/api/**' - if (includePackagedTests) { - include 'rest-api-spec/test/**' - } - into project.sourceSets.test.output.resourcesDir - } - project.idea { - module { - if (scopes.TEST != null) { - scopes.TEST.plus.add(project.configurations.restSpec) - } - } - } - return copyRestSpec - } -} From e5825100f02364ff6cb4d531112f64451f4e7dd7 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 19 Mar 2018 19:54:01 -0700 Subject: [PATCH 87/89] Build: Fix meta plugin bundled plugin names (#29147) This commit fixes the directory name bundled plugins are added under within a meta plugin to be the configured name of the bundled plugin, instead of the project name. --- .../elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy index fa7e0a6a752b4..6c1857b3e7bf9 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy @@ -88,12 +88,13 @@ class MetaPluginBuildPlugin implements Plugin { buildProperties.extension.plugins.each { String bundledPluginProjectName -> Project bundledPluginProject = project.project(bundledPluginProjectName) bundledPluginProject.afterEvaluate { + String bundledPluginName = bundledPluginProject.esplugin.name bundle.configure { dependsOn bundledPluginProject.bundlePlugin from(project.zipTree(bundledPluginProject.bundlePlugin.outputs.files.singleFile)) { eachFile { FileCopyDetails details -> // we want each path to have the plugin name interjected - details.relativePath = new RelativePath(true, bundledPluginProjectName, details.relativePath.toString()) + details.relativePath = new RelativePath(true, bundledPluginName, details.relativePath.toString()) } } } From 7112d4368f0993f4e83941bfa4fc3d1c78287d38 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 19 Mar 2018 19:58:03 -0700 Subject: [PATCH 88/89] Plugins: Fix module name conflict check for meta plugins (#29146) This commit moves the check for plugin names conflicting with builtin modules to a location that also applies to meta plugins. --- .../elasticsearch/plugins/InstallPluginCommand.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index b7f201b70aa46..84f3764880243 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -532,6 +532,12 @@ private Path stagingDirectoryWithoutPosixPermissions(Path pluginsDir) throws IOE // checking for existing version of the plugin private void verifyPluginName(Path pluginPath, String pluginName, Path candidateDir) throws UserException, IOException { + // don't let user install plugin conflicting with module... + // they might be unavoidably in maven central and are packaged up the same way) + if (MODULES.contains(pluginName)) { + throw new UserException(ExitCodes.USAGE, "plugin '" + pluginName + "' cannot be installed as a plugin, it is a system module"); + } + final Path destination = pluginPath.resolve(pluginName); if (Files.exists(destination)) { final String message = String.format( @@ -574,13 +580,6 @@ private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, boolean is terminal.println(VERBOSE, info.toString()); - // don't let user install plugin as a module... - // they might be unavoidably in maven central and are packaged up the same way) - if (MODULES.contains(info.getName())) { - throw new UserException(ExitCodes.USAGE, "plugin '" + info.getName() + - "' cannot be installed like this, it is a system module"); - } - // check for jar hell before any copying jarHellCheck(info, pluginRoot, env.pluginsFile(), env.modulesFile()); From 52a517e2165d4589c456b6c1bc2faede5f3c3637 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 19 Mar 2018 20:04:58 -0700 Subject: [PATCH 89/89] Docs: Add example of resetting index setting (#29048) This commit adds an example using `null` to reset an index settings. closes #22870 --- docs/reference/indices/update-settings.asciidoc | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/reference/indices/update-settings.asciidoc b/docs/reference/indices/update-settings.asciidoc index 15d0cdaaead96..227ff1a73d7a0 100644 --- a/docs/reference/indices/update-settings.asciidoc +++ b/docs/reference/indices/update-settings.asciidoc @@ -19,6 +19,20 @@ PUT /twitter/_settings // CONSOLE // TEST[setup:twitter] +To reset a setting back to the default value, use `null`. For example: + +[source,js] +-------------------------------------------------- +PUT /twitter/_settings +{ + "index" : { + "refresh_interval" : null + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + The list of per-index settings which can be updated dynamically on live indices can be found in <>. To preserve existing settings from being updated, the `preserve_existing`