From 482717b2c7f4fabd800b51bf4b19aeee8b307545 Mon Sep 17 00:00:00 2001 From: Mathias Fussenegger Date: Thu, 30 Apr 2015 10:20:55 +0200 Subject: [PATCH 001/123] prevent over allocation for multicast ping request very tiny optimization but the BytesStreamOutput ctor by default allocates 16KB which is way too much for a ping request. The actual size of the request depends on the clusterName so 150 isn't accurate either but should be enough. --- .../discovery/zen/ping/multicast/MulticastZenPing.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPing.java b/src/main/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPing.java index 0d5ca7260b4..85eb221a73e 100644 --- a/src/main/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPing.java +++ b/src/main/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPing.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.zen.ping.multicast; import org.apache.lucene.util.Constants; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; @@ -29,7 +28,10 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.io.stream.*; +import org.elasticsearch.common.io.stream.BytesStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.MulticastChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -64,6 +66,8 @@ public class MulticastZenPing extends AbstractLifecycleComponent implem private static final byte[] INTERNAL_HEADER = new byte[]{1, 9, 8, 4}; + private static final int PING_SIZE_ESTIMATE = 150; + private final String address; private final int port; private final String group; @@ -248,7 +252,7 @@ public class MulticastZenPing extends AbstractLifecycleComponent implem private void sendPingRequest(int id) { try { - BytesStreamOutput out = new BytesStreamOutput(); + BytesStreamOutput out = new BytesStreamOutput(PING_SIZE_ESTIMATE); out.writeBytes(INTERNAL_HEADER); // TODO: change to min_required version! Version.writeVersion(version, out); From defe2c305cdc54b051d197e2204d9a160084d0ab Mon Sep 17 00:00:00 2001 From: aleph-zero Date: Thu, 30 Apr 2015 13:47:24 -0700 Subject: [PATCH 002/123] Read configuration file with .yaml suffix Fixes a bug whereby we failed to read an elasticsearch config file with the .yaml extension. This commit allows elasticsearch config files to be suffixed with: .yml, .yaml, .json, .properties. Closes #9706 --- .../internal/InternalSettingsPreparer.java | 26 +++++++------------ 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index fcb6f3919c6..f940c15c042 100644 --- a/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -19,6 +19,7 @@ package org.elasticsearch.node.internal; +import com.google.common.collect.ImmutableList; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Names; import org.elasticsearch.common.Strings; @@ -28,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.FailedToResolveConfigException; +import java.util.List; import java.util.Map; import static org.elasticsearch.common.Strings.cleanPath; @@ -38,6 +40,8 @@ import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilde */ public class InternalSettingsPreparer { + static final List ALLOWED_SUFFIXES = ImmutableList.of(".yml", ".yaml", ".json", ".properties"); + public static Tuple prepareSettings(Settings pSettings, boolean loadConfigSettings) { // ignore this prefixes when getting properties from es. and elasticsearch. String[] ignorePrefixes = new String[]{"es.default.", "elasticsearch.default."}; @@ -73,22 +77,12 @@ public class InternalSettingsPreparer { } } if (loadFromEnv) { - try { - settingsBuilder.loadFromUrl(environment.resolveConfig("elasticsearch.yml")); - } catch (FailedToResolveConfigException e) { - // ignore - } catch (NoClassDefFoundError e) { - // ignore, no yaml - } - try { - settingsBuilder.loadFromUrl(environment.resolveConfig("elasticsearch.json")); - } catch (FailedToResolveConfigException e) { - // ignore - } - try { - settingsBuilder.loadFromUrl(environment.resolveConfig("elasticsearch.properties")); - } catch (FailedToResolveConfigException e) { - // ignore + for (String allowedSuffix : ALLOWED_SUFFIXES) { + try { + settingsBuilder.loadFromUrl(environment.resolveConfig("elasticsearch" + allowedSuffix)); + } catch (FailedToResolveConfigException e) { + // ignore + } } } } From 4a13a56058465e9b9cff0e9d435b24f1a37d5826 Mon Sep 17 00:00:00 2001 From: aleph-zero Date: Thu, 7 May 2015 13:26:42 -0700 Subject: [PATCH 003/123] Test for reading alternate suffixes --- .../node/internal/InternalSettingsPreparerTests.java | 12 ++++++++++++ src/test/resources/config/elasticsearch.json | 3 +++ src/test/resources/config/elasticsearch.properties | 2 ++ src/test/resources/config/elasticsearch.yaml | 3 +++ 4 files changed, 20 insertions(+) create mode 100644 src/test/resources/config/elasticsearch.json create mode 100644 src/test/resources/config/elasticsearch.properties create mode 100644 src/test/resources/config/elasticsearch.yaml diff --git a/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 8db6fd4e5c0..d78644e5fb5 100644 --- a/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -52,4 +52,16 @@ public class InternalSettingsPreparerTests extends ElasticsearchTestCase { // Should use setting from the system property assertThat(tuple.v1().get("node.zone"), equalTo("bar")); } + + @Test + public void testAlternateConfigFileSuffixes() { + // test that we can read config files with .yaml, .json, and .properties suffixes + Tuple tuple = InternalSettingsPreparer.prepareSettings(settingsBuilder() + .put("config.ignore_system_properties", true) + .build(), true); + + assertThat(tuple.v1().get("yaml.config.exists"), equalTo("true")); + assertThat(tuple.v1().get("json.config.exists"), equalTo("true")); + assertThat(tuple.v1().get("properties.config.exists"), equalTo("true")); + } } diff --git a/src/test/resources/config/elasticsearch.json b/src/test/resources/config/elasticsearch.json new file mode 100644 index 00000000000..16433a2c88d --- /dev/null +++ b/src/test/resources/config/elasticsearch.json @@ -0,0 +1,3 @@ +{ + "json.config.exists" : "true" +} diff --git a/src/test/resources/config/elasticsearch.properties b/src/test/resources/config/elasticsearch.properties new file mode 100644 index 00000000000..d3f822cafb5 --- /dev/null +++ b/src/test/resources/config/elasticsearch.properties @@ -0,0 +1,2 @@ + +properties.config.exists: true diff --git a/src/test/resources/config/elasticsearch.yaml b/src/test/resources/config/elasticsearch.yaml new file mode 100644 index 00000000000..b6ebc6bd105 --- /dev/null +++ b/src/test/resources/config/elasticsearch.yaml @@ -0,0 +1,3 @@ + +yaml.config.exists: true + From aa98ae5240cf8c2d18abb50ab310ff414d3df9a2 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 May 2015 19:28:49 +0200 Subject: [PATCH 004/123] Search: Do not specialize TermQuery vs. TermsQuery. We had some code that created a TermQuery instead of a TermsQuery when there was a single term, which is not useful anymore now that TermsQuery rewrites to a disjunction when there are few terms: https://issues.apache.org/jira/browse/LUCENE-6360 --- .../mapper/core/AbstractFieldMapper.java | 20 ++++--------------- .../index/mapper/internal/IdFieldMapper.java | 6 +----- .../string/SimpleStringMappingTests.java | 14 ------------- 3 files changed, 5 insertions(+), 35 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java index 5c26e0c14b4..b1d1d3395e4 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java @@ -479,23 +479,11 @@ public abstract class AbstractFieldMapper implements FieldMapper { @Override public Query termsQuery(List values, @Nullable QueryParseContext context) { - switch (values.size()) { - case 0: - return Queries.newMatchNoDocsQuery(); - case 1: - // When there is a single term, it's important to return a term filter so that - // it can return a DocIdSet that is directly backed by a postings list, instead - // of loading everything into a bit set and returning an iterator based on the - // bit set - return termQuery(values.get(0), context); - default: - BytesRef[] bytesRefs = new BytesRef[values.size()]; - for (int i = 0; i < bytesRefs.length; i++) { - bytesRefs[i] = indexedValueForSearch(values.get(i)); - } - return new TermsQuery(names.indexName(), bytesRefs); - + BytesRef[] bytesRefs = new BytesRef[values.size()]; + for (int i = 0; i < bytesRefs.length; i++) { + bytesRefs[i] = indexedValueForSearch(values.get(i)); } + return new TermsQuery(names.indexName(), bytesRefs); } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index cd59591b6bb..b7438540c40 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -188,11 +188,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { return super.termQuery(value, context); } final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value); - if (uids.length == 1) { - return new TermQuery(new Term(UidFieldMapper.NAME, uids[0])); - } else { - return new TermsQuery(UidFieldMapper.NAME, uids); - } + return new TermsQuery(UidFieldMapper.NAME, uids); } @Override diff --git a/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 9baa2ca6afc..3363518b8e4 100644 --- a/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -519,18 +519,4 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest { assertTrue(mergeResult.buildConflicts()[0].contains("cannot enable norms")); } - public void testTermsQuery() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse(mapping); - FieldMapper mapper = defaultMapper.mappers().getMapper("field"); - assertNotNull(mapper); - assertTrue(mapper instanceof StringFieldMapper); - assertEquals(Queries.newMatchNoDocsQuery(), mapper.termsQuery(Collections.emptyList(), null)); - assertEquals(new TermQuery(new Term("field", "value")), mapper.termsQuery(Collections.singletonList("value"), null)); - assertEquals(new TermsQuery(new Term("field", "value1"), new Term("field", "value2")), mapper.termsQuery(Arrays.asList("value1", "value2"), null)); - } - } From b376a3fbfb3a5830db194476b7a08aad7633650f Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Sat, 23 May 2015 17:18:21 +0300 Subject: [PATCH 005/123] Move index sealing terminology to synced flush #10032 introduced the notion of sealing an index by marking it with a special read only marker, allowing for a couple of optimization to happen. The most important one was to speed up recoveries of shards where we know nothing has changed since they were online by skipping the file based sync phase. During the implementation we came up with a light notion which achieves the same recovery benefits but without the read only aspects which we dubbed synced flush. The fact that it was light weight and didn't put the index in read only mode, allowed us to do it automatically in the background which has great advantage. However we also felt the need to allow users to manually trigger this operation. The implementation at #11179 added the sync flush internal logic and the manual (rest) rest API. The name of the API was modeled after the sealing terminology which may end up being confusing. This commit changes the API name to match the internal synced flush naming, namely `{index}/_flush/synced'. On top of that it contains a couple other changes: - Remove all java client API. This feature is not supposed to be called programtically by applications but rather by admins. - Improve rest responses making structure similar to other (flush) API - Change IndexShard#getOperationsCount to exclude the internal +1 on open shard . it's confusing to get 1 while there are actually no ongoing operations - Some minor other clean ups --- docs/reference/indices.asciidoc | 1 - docs/reference/indices/flush.asciidoc | 152 ++++++++++- docs/reference/indices/seal.asciidoc | 91 ------- docs/reference/setup/upgrade.asciidoc | 56 +++-- rest-api-spec/api/indices.flush_synced.json | 39 +++ rest-api-spec/api/indices.seal.json | 17 -- .../10_basic.yaml | 7 +- .../elasticsearch/action/ActionModule.java | 3 - .../admin/indices/seal/SealIndicesAction.java | 45 ---- .../indices/seal/SealIndicesRequest.java | 49 ---- .../seal/SealIndicesRequestBuilder.java | 38 --- .../indices/seal/SealIndicesResponse.java | 171 ------------- .../seal/TransportSealIndicesAction.java | 100 -------- .../client/IndicesAdminClient.java | 25 -- .../client/support/AbstractClient.java | 19 -- .../elasticsearch/index/shard/IndexShard.java | 4 +- .../elasticsearch/indices/IndicesModule.java | 1 + .../flush/IndicesSyncedFlushResult.java | 148 +++++++++++ .../flush/ShardsSyncedFlushResult.java | 164 ++++++++++++ .../{ => flush}/SyncedFlushService.java | 237 +++++++----------- .../recovery/RecoverySourceHandler.java | 3 +- .../TransportNodesListShardStoreMetaData.java | 9 +- .../rest/action/RestActionModule.java | 8 +- .../RestSyncedFlushAction.java} | 35 +-- .../admin/indices/seal/SealIndicesTests.java | 116 --------- .../gateway/RecoveryFromGatewayTests.java | 10 +- .../index/shard/IndexShardTests.java | 9 +- .../org/elasticsearch/indices/SealTests.java | 49 ---- .../indices/{ => flush}/FlushTest.java | 82 +++--- .../SyncedFlushSingleNodeTest.java} | 53 ++-- .../indices/flush/SyncedFlushUnitTests.java | 134 ++++++++++ .../indices/{ => flush}/SyncedFlushUtil.java | 30 ++- .../test/ElasticsearchIntegrationTest.java | 144 +++++------ .../test/InternalTestCluster.java | 10 +- 34 files changed, 948 insertions(+), 1111 deletions(-) delete mode 100644 docs/reference/indices/seal.asciidoc create mode 100644 rest-api-spec/api/indices.flush_synced.json delete mode 100644 rest-api-spec/api/indices.seal.json rename rest-api-spec/test/{indices.seal => indices.flush}/10_basic.yaml (74%) delete mode 100644 src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesAction.java delete mode 100644 src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java delete mode 100644 src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequestBuilder.java delete mode 100644 src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesResponse.java delete mode 100644 src/main/java/org/elasticsearch/action/admin/indices/seal/TransportSealIndicesAction.java create mode 100644 src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java create mode 100644 src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java rename src/main/java/org/elasticsearch/indices/{ => flush}/SyncedFlushService.java (80%) rename src/main/java/org/elasticsearch/rest/action/admin/indices/{seal/RestSealIndicesAction.java => flush/RestSyncedFlushAction.java} (55%) delete mode 100644 src/test/java/org/elasticsearch/action/admin/indices/seal/SealIndicesTests.java delete mode 100644 src/test/java/org/elasticsearch/indices/SealTests.java rename src/test/java/org/elasticsearch/indices/{ => flush}/FlushTest.java (73%) rename src/test/java/org/elasticsearch/indices/{SycnedFlushSingleNodeTest.java => flush/SyncedFlushSingleNodeTest.java} (83%) create mode 100644 src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java rename src/test/java/org/elasticsearch/indices/{ => flush}/SyncedFlushUtil.java (70%) diff --git a/docs/reference/indices.asciidoc b/docs/reference/indices.asciidoc index fe1b20b05e9..06a24821440 100644 --- a/docs/reference/indices.asciidoc +++ b/docs/reference/indices.asciidoc @@ -59,7 +59,6 @@ and warmers. * <> * <> * <> -* <> * <> -- diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index b2b67474623..99aa589d866 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -10,8 +10,9 @@ trigger flush operations as required in order to clear memory. [source,js] -------------------------------------------------- -$ curl -XPOST 'http://localhost:9200/twitter/_flush' +POST /twitter/_flush -------------------------------------------------- +// AUTOSENSE [float] [[flush-parameters]] @@ -39,7 +40,152 @@ or even on `_all` the indices. [source,js] -------------------------------------------------- -$ curl -XPOST 'http://localhost:9200/kimchy,elasticsearch/_flush' +POST /kimchy,elasticsearch/_flush -$ curl -XPOST 'http://localhost:9200/_flush' +POST /_flush -------------------------------------------------- +// AUTOSENSE + +[[indices-synced-flush]] +=== Synced Flush + +Elasticsearch tracks the indexing activity of each shards. Shards that have not +received any indexing operations for, by default, 30m are automatically marked as inactive. This presents +an opportunity for Elasticsearch to reduce shard resources and also perform +a special kind of flush, called `synced flush`. A synced flush performs normal +flushing and adds a special uniquely generated marker (`sync_id`) to all shards. + +Since the sync id marker was added when there were no ongoing indexing operations, it can +be used as a quick way to check if two shards indices are identical. This quick sync id +comparison (if present) is used during recovery or restarts to skip the first and +most costly phase of the process. In that case, no segment files need to be copied and +the transaction log replay phase of the recovery can start immediately. Note that since the sync id +marker was applied together with a flush, it is highly likely that the transaction log will be empty, +speeding up recoveries even more. + +This is particularly useful for use cases having lots of indices which are +never or very rarely updated, such as time based data. This use case typically generates lots of indices whose +recovery without the synced flush marker would take a long time. + +To check whether a shard has a marker or not, one can use the `commit` section of shard stats returned by +the <> API: + +[source,bash] +-------------------------------------------------- +GET /twitter/_stats/commit?level=shards +-------------------------------------------------- +// AUTOSENSE + +[float] +=== Synced Flush API + +The Synced Flush API allows an administrator to initiate a synced flush manually. This can particularly useful for +a planned (rolling) cluster restart where one can stop indexing and doesn't want to wait for the default 30m to pass +when the synced flush will be performed automatically. + +While handy, there are a couple of caveats for this API: + +1. Synced flush is a best effort operation. Any ongoing indexing operations will cause +the synced flush to fail. This means that some shards may be synced flushed while others aren't. See below for more. +2. The `sync_id` marker is removed as soon as the shard is flushed again. Uncommitted +operations in the transaction log do not remove the marker. That is because the marker is store as part +of a low level lucene commit, representing a point in time snapshot of the segments. In practice, one should consider +any indexing operation on an index as removing the marker. + + +[source,bash] +-------------------------------------------------- +POST /twitter/_flush/synced +-------------------------------------------------- +// AUTOSENSE + +The response contains details about how many shards were successfully synced-flushed and information about any failure. + +Here is what it looks like when all shards of a two shards and one replica index successfully +sync-flushed: + +[source,js] +-------------------------------------------------- +{ + "_shards": { + "total": 4, + "successful": 4, + "failed": 0 + }, + "twitter": { + "total": 4, + "successful": 4, + "failed": 0 + } +} +-------------------------------------------------- + + +Here is what it looks like when one shard group failed due to pending operations: + +[source,js] +-------------------------------------------------- +{ + "_shards": { + "total": 4, + "successful": 2, + "failed": 2 + }, + "twitter": { + "total": 4, + "successful": 2, + "failed": 2, + "failures": [ + { + "shard": 1, + "reason": "[2] ongoing operations on primary" + } + ] + } +} +-------------------------------------------------- + + +Sometimes the failures are specific to a shard copy, in which case they will be reported as follows: + +[source,js] +-------------------------------------------------- +{ + "_shards": { + "total": 4, + "successful": 1, + "failed": 1 + }, + "twitter": { + "total": 4, + "successful": 3, + "failed": 1, + "failures": [ + { + "shard": 1, + "reason": "unexpected error", + "routing": { + "state": "STARTED", + "primary": false, + "node": "SZNr2J_ORxKTLUCydGX4zA", + "relocating_node": null, + "shard": 1, + "index": "twitter" + } + } + ] + } +} +-------------------------------------------------- + + +The synced flush API can be applied to more than one index with a single call, +or even on `_all` the indices. + +[source,js] +-------------------------------------------------- +POST /kimchy,elasticsearch/_flush/synced + +POST /_flush/synced +-------------------------------------------------- +// AUTOSENSE \ No newline at end of file diff --git a/docs/reference/indices/seal.asciidoc b/docs/reference/indices/seal.asciidoc deleted file mode 100644 index 86ad42a40da..00000000000 --- a/docs/reference/indices/seal.asciidoc +++ /dev/null @@ -1,91 +0,0 @@ -[[indices-seal]] -== Seal - -The seal API flushes and adds a "seal" marker to the shards of one or more -indices. The seal is used during recovery or restarts to skip the first and -most costly phase of the process if all copies of the shard have the same seal. -No segment files need to be copied and the transaction log replay phase of the -recovery can start immediately which makes recovery much faster. - -There are two important points about seals: -1. They are best effort in that if there are any outstanding write operations -while the seal operation is being performed then the shards which those writes -target won't be sealed but all others will be. See below for more. -2. The seal breaks as soon as the shard issues a new lucene commit. Uncommitted -operations in the transaction log do not break the seal. That is because a seal -marks a point in time snapshot of the segments, a low level lucene commit. -Practically that means that every write operation on the index will remove the -seal. - -[source,bash] --------------------------------------------------- -$ curl -XPOST 'http://localhost:9200/twitter/_seal' --------------------------------------------------- - -The response contains details about which shards wrote the seal and the reason -in case they failed to write the seal. - -Here is what it looks like when all copies single shard index successfully -wrote the seal: - -[source,js] --------------------------------------------------- -{ - "twitter": [ - { - "shard_id": 0, - "responses": { - "5wjOIntuRqy9F_7JRrrLwA": "success", - "M2iCBe-nS5yaInE8volfSg": "success" - }, - "message": "success" - } -} --------------------------------------------------- - - -Here is what it looks like when one copy fails: - -[source,js] --------------------------------------------------- -{ - "twitter": [ - { - "shard_id": 0, - "responses": { - "M2iCBe-nS5yaInE8volfSg": "pending operations", - "5wjOIntuRqy9F_7JRrrLwA": "success" - }, - "message": "failed on some copies" - } -} --------------------------------------------------- - - -Sometimes the failures can be shard wide and they'll look like this: - -[source,js] --------------------------------------------------- -{ - "twitter": [ - { - "shard_id": 0, - "message": "operation counter on primary is non zero [2]" - } -} --------------------------------------------------- - - -[float] -[[seal-multi-index]] -=== Multi Index - -The seal API can be applied to more than one index with a single call, -or even on `_all` the indices. - -[source,js] --------------------------------------------------- -curl -XPOST 'http://localhost:9200/kimchy,elasticsearch/_seal' - -curl -XPOST 'http://localhost:9200/_seal' --------------------------------------------------- diff --git a/docs/reference/setup/upgrade.asciidoc b/docs/reference/setup/upgrade.asciidoc index c477f5ec9c7..61708755006 100644 --- a/docs/reference/setup/upgrade.asciidoc +++ b/docs/reference/setup/upgrade.asciidoc @@ -85,13 +85,27 @@ This syntax applies to Elasticsearch 1.0 and later: [source,sh] -------------------------------------------------- - curl -XPUT localhost:9200/_cluster/settings -d '{ - "transient" : { - "cluster.routing.allocation.enable" : "none" - } - }' +curl -XPUT localhost:9200/_cluster/settings -d '{ + "transient" : { + "cluster.routing.allocation.enable" : "none" + } +}' -------------------------------------------------- +* There is no problem to continue indexing while doing the upgrade. However, you can speed the process considerably +by stopping indexing temporarily to non-essential indices and issuing a manual <>. +A synced flush is special kind of flush which can seriously speed up recovery of shards. Elasticsearch automatically +uses it when an index has been inactive for a while (default is `30m`) but you can manually trigger it using the following command: + +[source,sh] +-------------------------------------------------- +curl -XPOST localhost:9200/_all/_flush/synced +-------------------------------------------------- + +Note that a synced flush call is a best effort operation. It will fail there are any pending indexing operations. It is safe to issue +it multiple times if needed. + + * Shut down a single node within the cluster. * Confirm that all shards are correctly reallocated to the remaining running nodes. @@ -110,11 +124,11 @@ This syntax applies to Elasticsearch 1.0 and later: [source,sh] -------------------------------------------------- - curl -XPUT localhost:9200/_cluster/settings -d '{ - "transient" : { - "cluster.routing.allocation.enable" : "all" - } - }' +curl -XPUT localhost:9200/_cluster/settings -d '{ + "transient" : { + "cluster.routing.allocation.enable" : "all" + } +}' -------------------------------------------------- * Observe that all shards are properly allocated on all nodes. Balancing may take some time. @@ -150,11 +164,11 @@ This syntax is from versions prior to 1.0: [source,sh] -------------------------------------------------- - curl -XPUT localhost:9200/_cluster/settings -d '{ - "persistent" : { - "cluster.routing.allocation.disable_allocation" : true - } - }' +curl -XPUT localhost:9200/_cluster/settings -d '{ + "persistent" : { + "cluster.routing.allocation.disable_allocation" : true + } +}' -------------------------------------------------- * Stop all Elasticsearch services on all nodes in the cluster. @@ -169,12 +183,12 @@ This syntax is from versions prior to 1.0: This syntax is from release 1.0 and later: [source,sh] ------------------------------------------------------ - curl -XPUT localhost:9200/_cluster/settings -d '{ - "persistent" : { - "cluster.routing.allocation.disable_allocation": false, - "cluster.routing.allocation.enable" : "all" - } - }' +curl -XPUT localhost:9200/_cluster/settings -d '{ + "persistent" : { + "cluster.routing.allocation.disable_allocation": false, + "cluster.routing.allocation.enable" : "all" + } +}' ------------------------------------------------------ The cluster upgrade can be streamlined by installing the software before stopping cluster services. If this is done, testing must be performed to ensure that no production data or configuration files are overwritten prior to restart. diff --git a/rest-api-spec/api/indices.flush_synced.json b/rest-api-spec/api/indices.flush_synced.json new file mode 100644 index 00000000000..28cad291e6a --- /dev/null +++ b/rest-api-spec/api/indices.flush_synced.json @@ -0,0 +1,39 @@ +{ + "indices.flush.synced": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-flush.html", + "methods": ["POST", "GET"], + "url": { + "path": "/_flush/synced", + "paths": [ + "/_flush/synced", + "/{index}/_flush/synced" + ], + "parts": { + "index": { + "type" : "list", + "description" : "A comma-separated list of index names; use `_all` or empty string for all indices" + }, + "ignore_unavailable": { + "type": "boolean", + "description": "Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "allow_no_indices": { + "type": "boolean", + "description": "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + }, + "expand_wildcards": { + "type": "enum", + "options": [ + "open", + "closed", + "none", + "all" + ], + "default": "open", + "description": "Whether to expand wildcard expression to concrete indices that are open, closed or both." + } + } + }, + "body": null + } +} diff --git a/rest-api-spec/api/indices.seal.json b/rest-api-spec/api/indices.seal.json deleted file mode 100644 index 3377d6e88a1..00000000000 --- a/rest-api-spec/api/indices.seal.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "indices.seal": { - "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-seal.html", - "methods": ["POST", "GET"], - "url": { - "path": "/_seal", - "paths": ["/_seal", "/{index}/_seal"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names; use `_all` or empty string for all indices" - } - } - }, - "body": null - } -} diff --git a/rest-api-spec/test/indices.seal/10_basic.yaml b/rest-api-spec/test/indices.flush/10_basic.yaml similarity index 74% rename from rest-api-spec/test/indices.seal/10_basic.yaml rename to rest-api-spec/test/indices.flush/10_basic.yaml index 5277adc528d..f85458da69e 100644 --- a/rest-api-spec/test/indices.seal/10_basic.yaml +++ b/rest-api-spec/test/indices.flush/10_basic.yaml @@ -1,5 +1,5 @@ --- -"Index seal rest test": +"Index synced flush rest test": - do: indices.create: index: testing @@ -8,8 +8,11 @@ cluster.health: wait_for_status: yellow - do: - indices.seal: + indices.flush.synced: index: testing + + - is_false: _shards.failed + - do: indices.stats: {level: shards} diff --git a/src/main/java/org/elasticsearch/action/ActionModule.java b/src/main/java/org/elasticsearch/action/ActionModule.java index 1c273f67b5b..c529a3e876f 100644 --- a/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/src/main/java/org/elasticsearch/action/ActionModule.java @@ -103,8 +103,6 @@ import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettin import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; -import org.elasticsearch.action.admin.indices.seal.SealIndicesAction; -import org.elasticsearch.action.admin.indices.seal.TransportSealIndicesAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; @@ -256,7 +254,6 @@ public class ActionModule extends AbstractModule { registerAction(ValidateQueryAction.INSTANCE, TransportValidateQueryAction.class); registerAction(RefreshAction.INSTANCE, TransportRefreshAction.class); registerAction(FlushAction.INSTANCE, TransportFlushAction.class); - registerAction(SealIndicesAction.INSTANCE, TransportSealIndicesAction.class); registerAction(OptimizeAction.INSTANCE, TransportOptimizeAction.class); registerAction(ClearIndicesCacheAction.INSTANCE, TransportClearIndicesCacheAction.class); registerAction(PutWarmerAction.INSTANCE, TransportPutWarmerAction.class); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesAction.java b/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesAction.java deleted file mode 100644 index fbb01b05abe..00000000000 --- a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesAction.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.seal; - -import org.elasticsearch.action.Action; -import org.elasticsearch.client.ElasticsearchClient; - -/** - */ -public class SealIndicesAction extends Action { - - public static final SealIndicesAction INSTANCE = new SealIndicesAction(); - public static final String NAME = "indices:admin/seal"; - - private SealIndicesAction() { - super(NAME); - } - - @Override - public SealIndicesResponse newResponse() { - return new SealIndicesResponse(); - } - - @Override - public SealIndicesRequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new SealIndicesRequestBuilder(client, this); - } -} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java deleted file mode 100644 index 42cdc51ed32..00000000000 --- a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.seal; - -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; - -import java.util.Arrays; - -/** - * A request to seal one or more indices. - */ -public class SealIndicesRequest extends BroadcastOperationRequest { - - SealIndicesRequest() { - } - - /** - * Constructs a seal request against one or more indices. If nothing is provided, all indices will - * be sealed. - */ - public SealIndicesRequest(String... indices) { - super(indices); - } - - @Override - public String toString() { - return "SealIndicesRequest{" + - "indices=" + Arrays.toString(indices) + - ", indicesOptions=" + indicesOptions() + - '}'; - } -} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequestBuilder.java b/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequestBuilder.java deleted file mode 100644 index a424ab3fc3b..00000000000 --- a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequestBuilder.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.seal; - -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.client.ElasticsearchClient; - -/** - * - */ -public class SealIndicesRequestBuilder extends ActionRequestBuilder { - - public SealIndicesRequestBuilder(ElasticsearchClient client, SealIndicesAction action) { - super(client, action, new SealIndicesRequest()); - } - - public SealIndicesRequestBuilder indices(String ... indices) { - request.indices(indices); - return this; - } -} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesResponse.java deleted file mode 100644 index 1dfd47795a5..00000000000 --- a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesResponse.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.seal; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.indices.SyncedFlushService; -import org.elasticsearch.rest.RestStatus; - -import java.io.IOException; -import java.util.*; - -/** - * A response to a seal action on several indices. - */ -public class SealIndicesResponse extends ActionResponse implements ToXContent { - - final private Set results; - - private RestStatus restStatus; - - SealIndicesResponse() { - results = new HashSet<>(); - } - - SealIndicesResponse(Set results) { - this.results = results; - if (allShardsFailed()) { - restStatus = RestStatus.CONFLICT; - } else if (someShardsFailed()) { - restStatus = RestStatus.PARTIAL_CONTENT; - } else { - restStatus = RestStatus.OK; - } - } - - public RestStatus status() { - return restStatus; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - int size = in.readVInt(); - results.clear(); - for (int i = 0; i < size; i++) { - SyncedFlushService.SyncedFlushResult syncedFlushResult = new SyncedFlushService.SyncedFlushResult(); - syncedFlushResult.readFrom(in); - results.add(syncedFlushResult); - } - restStatus = RestStatus.readFrom(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(results.size()); - for (SyncedFlushService.SyncedFlushResult syncedFlushResult : results) { - syncedFlushResult.writeTo(out); - } - RestStatus.writeTo(out, restStatus); - } - - public Set results() { - return results; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - Map> allResults = new HashMap<>(); - - // first, sort everything by index and shard id - for (SyncedFlushService.SyncedFlushResult result : results) { - String indexName = result.getShardId().index().name(); - int shardId = result.getShardId().getId(); - - if (allResults.get(indexName) == null) { - // no results yet for this index - allResults.put(indexName, new TreeMap()); - } - if (result.shardResponses().size() > 0) { - Map shardResponses = new HashMap<>(); - for (Map.Entry shardResponse : result.shardResponses().entrySet()) { - shardResponses.put(shardResponse.getKey(), shardResponse.getValue()); - } - allResults.get(indexName).put(shardId, shardResponses); - } else { - allResults.get(indexName).put(shardId, result.failureReason()); - } - } - for (Map.Entry> result : allResults.entrySet()) { - builder.startArray(result.getKey()); - for (Map.Entry shardResponse : result.getValue().entrySet()) { - builder.startObject(); - builder.field("shard_id", shardResponse.getKey()); - if (shardResponse.getValue() instanceof Map) { - builder.startObject("responses"); - Map results = (Map) shardResponse.getValue(); - boolean success = true; - for (Map.Entry shardCopy : results.entrySet()) { - builder.field(shardCopy.getKey().currentNodeId(), shardCopy.getValue().success() ? "success" : shardCopy.getValue().failureReason()); - if (shardCopy.getValue().success() == false) { - success = false; - } - } - builder.endObject(); - builder.field("message", success ? "success" : "failed on some copies"); - - } else { - builder.field("message", shardResponse.getValue()); // must be a string - } - builder.endObject(); - } - builder.endArray(); - } - return builder; - } - - public boolean allShardsFailed() { - for (SyncedFlushService.SyncedFlushResult result : results) { - if (result.success()) { - return false; - } - if (result.shardResponses().size() > 0) { - for (Map.Entry shardResponse : result.shardResponses().entrySet()) { - if (shardResponse.getValue().success()) { - return false; - } - } - } - } - return true; - } - - public boolean someShardsFailed() { - for (SyncedFlushService.SyncedFlushResult result : results) { - if (result.success() == false) { - return true; - } - if (result.shardResponses().size() > 0) { - for (Map.Entry shardResponse : result.shardResponses().entrySet()) { - if (shardResponse.getValue().success() == false) { - return true; - } - } - } - } - return false; - } -} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/seal/TransportSealIndicesAction.java b/src/main/java/org/elasticsearch/action/admin/indices/seal/TransportSealIndicesAction.java deleted file mode 100644 index 61d5e53c32f..00000000000 --- a/src/main/java/org/elasticsearch/action/admin/indices/seal/TransportSealIndicesAction.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.seal; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.ShardIterator; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.CountDown; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.IndicesLifecycle; -import org.elasticsearch.indices.SyncedFlushService; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; - -import java.io.IOException; -import java.util.HashSet; -import java.util.Set; - -/** - */ -public class TransportSealIndicesAction extends HandledTransportAction { - - - final private SyncedFlushService syncedFlushService; - final private ClusterService clusterService; - - @Inject - public TransportSealIndicesAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, SyncedFlushService syncedFlushService, ClusterService clusterService) { - super(settings, SealIndicesAction.NAME, threadPool, transportService, actionFilters, SealIndicesRequest.class); - this.syncedFlushService = syncedFlushService; - this.clusterService = clusterService; - } - - @Override - protected void doExecute(final SealIndicesRequest request, final ActionListener listener) { - ClusterState state = clusterService.state(); - String[] concreteIndices = state.metaData().concreteIndices(request.indicesOptions(), request.indices()); - GroupShardsIterator primaries = state.routingTable().activePrimaryShardsGrouped(concreteIndices, true); - final Set results = ConcurrentCollections.newConcurrentSet(); - - final CountDown countDown = new CountDown(primaries.size()); - - for (final ShardIterator shard : primaries) { - if (shard.size() == 0) { - results.add(new SyncedFlushService.SyncedFlushResult(shard.shardId(), "no active primary available")); - if (countDown.countDown()) { - listener.onResponse(new SealIndicesResponse(results)); - } - } else { - final ShardId shardId = shard.shardId(); - syncedFlushService.attemptSyncedFlush(shardId, new ActionListener() { - @Override - public void onResponse(SyncedFlushService.SyncedFlushResult syncedFlushResult) { - results.add(syncedFlushResult); - if (countDown.countDown()) { - listener.onResponse(new SealIndicesResponse(results)); - } - } - - @Override - public void onFailure(Throwable e) { - logger.debug("{} unexpected error while executing synced flush", shardId); - results.add(new SyncedFlushService.SyncedFlushResult(shardId, e.getMessage())); - if (countDown.countDown()) { - listener.onResponse(new SealIndicesResponse(results)); - } - } - }); - } - } - - } -} diff --git a/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index c54aaece7f4..6f69fd42ac7 100644 --- a/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -84,9 +84,6 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRespons import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.seal.SealIndicesRequest; -import org.elasticsearch.action.admin.indices.seal.SealIndicesRequestBuilder; -import org.elasticsearch.action.admin.indices.seal.SealIndicesResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse; @@ -117,7 +114,6 @@ import org.elasticsearch.common.Nullable; */ public interface IndicesAdminClient extends ElasticsearchClient { - /** * Indices Exists. * @@ -362,27 +358,6 @@ public interface IndicesAdminClient extends ElasticsearchClient { */ FlushRequestBuilder prepareFlush(String... indices); - /** - * Explicitly sync flush one or more indices - * - * @param request The seal indices request - * @return A result future - */ - ActionFuture sealIndices(SealIndicesRequest request); - - /** - * Explicitly sync flush one or more indices - * - * @param request The seal indices request - * @param listener A listener to be notified with a result - */ - void sealIndices(SealIndicesRequest request, ActionListener listener); - - /** - * Explicitly seal one or more indices - */ - SealIndicesRequestBuilder prepareSealIndices(String... indices); - /** * Explicitly optimize one or more indices into a the number of segments. * diff --git a/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 18b0a377ac0..703b03f0dc5 100644 --- a/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -180,10 +180,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.seal.SealIndicesAction; -import org.elasticsearch.action.admin.indices.seal.SealIndicesRequest; -import org.elasticsearch.action.admin.indices.seal.SealIndicesRequestBuilder; -import org.elasticsearch.action.admin.indices.seal.SealIndicesResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; @@ -1327,21 +1323,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new FlushRequestBuilder(this, FlushAction.INSTANCE).setIndices(indices); } - @Override - public ActionFuture sealIndices(SealIndicesRequest request) { - return execute(SealIndicesAction.INSTANCE, request); - } - - @Override - public void sealIndices(SealIndicesRequest request, ActionListener listener) { - execute(SealIndicesAction.INSTANCE, request, listener); - } - - @Override - public SealIndicesRequestBuilder prepareSealIndices(String... indices) { - return new SealIndicesRequestBuilder(this, SealIndicesAction.INSTANCE).indices(indices); - } - @Override public void getMappings(GetMappingsRequest request, ActionListener listener) { execute(GetMappingsAction.INSTANCE, request, listener); diff --git a/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 71c0bc1347d..a5a9835b21f 100644 --- a/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.shard; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; - import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.search.Query; @@ -79,7 +78,6 @@ import org.elasticsearch.index.get.ShardGetService; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.indexing.ShardIndexingService; import org.elasticsearch.index.mapper.*; -import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.policy.MergePolicyProvider; import org.elasticsearch.index.merge.scheduler.MergeSchedulerProvider; @@ -1367,7 +1365,7 @@ public class IndexShard extends AbstractIndexShardComponent { } public int getOperationsCount() { - return indexShardOperationCounter.refCount(); + return Math.max(0, indexShardOperationCounter.refCount() - 1); // refCount is incremented on creation and decremented on close } /** diff --git a/src/main/java/org/elasticsearch/indices/IndicesModule.java b/src/main/java/org/elasticsearch/indices/IndicesModule.java index 4cfb9980e87..50bbfa61858 100644 --- a/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -32,6 +32,7 @@ import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener; +import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.memory.IndexingMemoryController; import org.elasticsearch.indices.query.IndicesQueriesModule; import org.elasticsearch.indices.recovery.RecoverySettings; diff --git a/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java b/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java new file mode 100644 index 00000000000..7c0a680b383 --- /dev/null +++ b/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java @@ -0,0 +1,148 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.indices.flush; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * The result of performing a sync flush operation on all shards of multiple indices + */ +public class IndicesSyncedFlushResult implements ToXContent { + + final Map> shardsResultPerIndex; + final ShardCounts shardCounts; + + + public IndicesSyncedFlushResult(Map> shardsResultPerIndex) { + this.shardsResultPerIndex = ImmutableMap.copyOf(shardsResultPerIndex); + this.shardCounts = calculateShardCounts(Iterables.concat(shardsResultPerIndex.values())); + } + + /** total number shards, including replicas, both assigned and unassigned */ + public int totalShards() { + return shardCounts.total; + } + + /** total number of shards for which the operation failed */ + public int failedShards() { + return shardCounts.failed; + } + + /** total number of shards which were successfully sync-flushed */ + public int successfulShards() { + return shardCounts.successful; + } + + public Map> getShardsResultPerIndex() { + return shardsResultPerIndex; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(Fields._SHARDS); + shardCounts.toXContent(builder, params); + builder.endObject(); + for (Map.Entry> indexEntry : shardsResultPerIndex.entrySet()) { + List indexResult = indexEntry.getValue(); + builder.startObject(indexEntry.getKey()); + ShardCounts indexShardCounts = calculateShardCounts(indexResult); + indexShardCounts.toXContent(builder, params); + if (indexShardCounts.failed > 0) { + builder.startArray(Fields.FAILURES); + for (ShardsSyncedFlushResult shardResults : indexResult) { + if (shardResults.failed()) { + builder.startObject(); + builder.field(Fields.SHARD, shardResults.shardId().id()); + builder.field(Fields.REASON, shardResults.failureReason()); + builder.endObject(); + continue; + } + Map failedShards = shardResults.failedShards(); + for (Map.Entry shardEntry : failedShards.entrySet()) { + builder.startObject(); + builder.field(Fields.SHARD, shardResults.shardId().id()); + builder.field(Fields.REASON, shardEntry.getValue().failureReason()); + builder.field(Fields.ROUTING, shardEntry.getKey()); + builder.endObject(); + } + } + builder.endArray(); + } + builder.endObject(); + } + return builder; + } + + static ShardCounts calculateShardCounts(Iterable results) { + int total = 0, successful = 0, failed = 0; + for (ShardsSyncedFlushResult result : results) { + total += result.totalShards(); + successful += result.successfulShards(); + if (result.failed()) { + // treat all shard copies as failed + failed += result.totalShards(); + } else { + // some shards may have failed during the sync phase + failed += result.failedShards().size(); + } + } + return new ShardCounts(total, successful, failed); + } + + static final class ShardCounts implements ToXContent { + + public final int total; + public final int successful; + public final int failed; + + ShardCounts(int total, int successful, int failed) { + this.total = total; + this.successful = successful; + this.failed = failed; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.TOTAL, total); + builder.field(Fields.SUCCESSFUL, successful); + builder.field(Fields.FAILED, failed); + return builder; + } + } + + static final class Fields { + static final XContentBuilderString _SHARDS = new XContentBuilderString("_shards"); + static final XContentBuilderString TOTAL = new XContentBuilderString("total"); + static final XContentBuilderString SUCCESSFUL = new XContentBuilderString("successful"); + static final XContentBuilderString FAILED = new XContentBuilderString("failed"); + static final XContentBuilderString FAILURES = new XContentBuilderString("failures"); + static final XContentBuilderString SHARD = new XContentBuilderString("shard"); + static final XContentBuilderString ROUTING = new XContentBuilderString("routing"); + static final XContentBuilderString REASON = new XContentBuilderString("reason"); + } +} diff --git a/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java b/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java new file mode 100644 index 00000000000..cdf8a2495d8 --- /dev/null +++ b/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java @@ -0,0 +1,164 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.indices.flush; + +import com.google.common.collect.ImmutableMap; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.index.shard.ShardId; + +import java.util.HashMap; +import java.util.Map; + +/** + * Result for all copies of a shard + */ +public class ShardsSyncedFlushResult { + private String failureReason; + private Map shardResponses; + private String syncId; + private ShardId shardId; + // some shards may be unassigned, so we need this as state + private int totalShards; + + public ShardsSyncedFlushResult() { + } + + public ShardId getShardId() { + return shardId; + } + + /** + * failure constructor + */ + public ShardsSyncedFlushResult(ShardId shardId, int totalShards, String failureReason) { + this.syncId = null; + this.failureReason = failureReason; + this.shardResponses = ImmutableMap.of(); + this.shardId = shardId; + this.totalShards = totalShards; + } + + /** + * success constructor + */ + public ShardsSyncedFlushResult(ShardId shardId, String syncId, int totalShards, Map shardResponses) { + this.failureReason = null; + ImmutableMap.Builder builder = ImmutableMap.builder(); + this.shardResponses = builder.putAll(shardResponses).build(); + this.syncId = syncId; + this.totalShards = totalShards; + this.shardId = shardId; + } + + /** + * @return true if the operation failed before reaching step three of synced flush. {@link #failureReason()} can be used for + * more details + */ + public boolean failed() { + return failureReason != null; + } + + /** + * @return the reason for the failure if synced flush failed before step three of synced flush + */ + public String failureReason() { + return failureReason; + } + + public String syncId() { + return syncId; + } + + /** + * @return total number of shards for which a sync attempt was made + */ + public int totalShards() { + return totalShards; + } + + /** + * @return total number of successful shards + */ + public int successfulShards() { + int i = 0; + for (SyncedFlushService.SyncedFlushResponse result : shardResponses.values()) { + if (result.success()) { + i++; + } + } + return i; + } + + /** + * @return an array of shard failures + */ + public Map failedShards() { + Map failures = new HashMap<>(); + for (Map.Entry result : shardResponses.entrySet()) { + if (result.getValue().success() == false) { + failures.put(result.getKey(), result.getValue()); + } + } + return failures; + } + + /** + * @return Individual responses for each shard copy with a detailed failure message if the copy failed to perform the synced flush. + * Empty if synced flush failed before step three. + */ + public Map shardResponses() { + return shardResponses; + } + +// @Override +// public void writeTo(StreamOutput out) throws IOException { +// super.writeTo(out); +// out.writeOptionalString(failureReason); +// out.writeOptionalString(syncId); +// out.writeVInt(totalShards); +// out.writeVInt(shardResponses.size()); +// for (Map.Entry result : shardResponses.entrySet()) { +// result.getKey().writeTo(out); +// result.getValue().writeTo(out); +// } +// shardId.writeTo(out); +// } + +// @Override +// public void readFrom(StreamInput in) throws IOException { +// super.readFrom(in); +// failureReason = in.readOptionalString(); +// syncId = in.readOptionalString(); +// totalShards = in.readVInt(); +// int size = in.readVInt(); +// ImmutableMap.Builder builder = ImmutableMap.builder(); +// for (int i = 0; i < size; i++) { +// ImmutableShardRouting shardRouting = ImmutableShardRouting.readShardRoutingEntry(in); +// SyncedFlushService.SyncedFlushResponse syncedFlushRsponse = new SyncedFlushService.SyncedFlushResponse(); +// syncedFlushRsponse.readFrom(in); +// builder.put(shardRouting, syncedFlushRsponse); +// } +// shardResponses = builder.build(); +// shardId = ShardId.readShardId(in); +// } + + public ShardId shardId() { + return shardId; + } +} diff --git a/src/main/java/org/elasticsearch/indices/SyncedFlushService.java b/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java similarity index 80% rename from src/main/java/org/elasticsearch/indices/SyncedFlushService.java rename to src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index c4c3fbaa84c..b9447ed01f6 100644 --- a/src/main/java/org/elasticsearch/indices/SyncedFlushService.java +++ b/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -16,17 +16,16 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.indices; +package org.elasticsearch.indices.flush; -import com.google.common.collect.ImmutableMap; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.ImmutableShardRouting; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; @@ -36,26 +35,27 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexShardMissingException; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndexClosedException; +import org.elasticsearch.indices.IndexMissingException; +import org.elasticsearch.indices.IndicesLifecycle; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; public class SyncedFlushService extends AbstractComponent { @@ -82,10 +82,10 @@ public class SyncedFlushService extends AbstractComponent { public void onShardInactive(final IndexShard indexShard) { // we only want to call sync flush once, so only trigger it when we are on a primary if (indexShard.routingEntry().primary()) { - attemptSyncedFlush(indexShard.shardId(), new ActionListener() { + attemptSyncedFlush(indexShard.shardId(), new ActionListener() { @Override - public void onResponse(SyncedFlushResult syncedFlushResult) { - logger.debug("{} sync flush on inactive shard returned successfully for sync_id: {}", syncedFlushResult.getShardId(), syncedFlushResult.syncId()); + public void onResponse(ShardsSyncedFlushResult syncedFlushResult) { + logger.trace("{} sync flush on inactive shard returned successfully for sync_id: {}", syncedFlushResult.getShardId(), syncedFlushResult.syncId()); } @Override @@ -98,6 +98,48 @@ public class SyncedFlushService extends AbstractComponent { }); } + public void attemptSyncedFlush(final String[] aliasesOrIndices, IndicesOptions indicesOptions, final ActionListener listener) { + final ClusterState state = clusterService.state(); + final String[] concreteIndices = state.metaData().concreteIndices(indicesOptions, aliasesOrIndices); + final Map> results = ConcurrentCollections.newConcurrentMap(); + int totalNumberOfShards = 0; + int numberOfShards = 0; + for (String index : concreteIndices) { + final IndexMetaData indexMetaData = state.metaData().index(index); + totalNumberOfShards += indexMetaData.totalNumberOfShards(); + numberOfShards += indexMetaData.getNumberOfShards(); + results.put(index, Collections.synchronizedList(new ArrayList())); + + } + final int finalTotalNumberOfShards = totalNumberOfShards; + final CountDown countDown = new CountDown(numberOfShards); + + for (final String index : concreteIndices) { + final int indexNumberOfShards = state.metaData().index(index).getNumberOfShards(); + for (int shard = 0; shard < indexNumberOfShards; shard++) { + final ShardId shardId = new ShardId(index, shard); + attemptSyncedFlush(shardId, new ActionListener() { + @Override + public void onResponse(ShardsSyncedFlushResult syncedFlushResult) { + results.get(index).add(syncedFlushResult); + if (countDown.countDown()) { + listener.onResponse(new IndicesSyncedFlushResult(results)); + } + } + + @Override + public void onFailure(Throwable e) { + logger.debug("{} unexpected error while executing synced flush", shardId); + results.get(index).add(new ShardsSyncedFlushResult(shardId, finalTotalNumberOfShards, e.getMessage())); + if (countDown.countDown()) { + listener.onResponse(new IndicesSyncedFlushResult(results)); + } + } + }); + } + } + } + /* * Tries to flush all copies of a shard and write a sync id to it. * After a synced flush two shard copies may only contain the same sync id if they contain the same documents. @@ -124,28 +166,36 @@ public class SyncedFlushService extends AbstractComponent { * * Synced flush is a best effort operation. The sync id may be written on all, some or none of the copies. **/ - public void attemptSyncedFlush(final ShardId shardId, final ActionListener actionListener) { + public void attemptSyncedFlush(final ShardId shardId, final ActionListener actionListener) { try { final ClusterState state = clusterService.state(); - final IndexShardRoutingTable shardRoutingTable = getActiveShardRoutings(shardId, state); + final IndexShardRoutingTable shardRoutingTable = getShardRoutingTable(shardId, state); final List activeShards = shardRoutingTable.activeShards(); + final int totalShards = shardRoutingTable.getSize(); + + if (activeShards.size() == 0) { + actionListener.onResponse(new ShardsSyncedFlushResult(shardId, totalShards, "no active shards")); + return; + } + final ActionListener> commitIdsListener = new ActionListener>() { @Override public void onResponse(final Map commitIds) { if (commitIds.isEmpty()) { - actionListener.onResponse(new SyncedFlushResult(shardId, "all shards failed to commit on pre-sync")); + actionListener.onResponse(new ShardsSyncedFlushResult(shardId, totalShards, "all shards failed to commit on pre-sync")); + return; } final ActionListener inflightOpsListener = new ActionListener() { @Override public void onResponse(InFlightOpsResponse response) { final int inflight = response.opCount(); - assert inflight >= -1; - if (inflight != 1) { // 1 means that there are no write operations are in flight (>1) and the shard is not closed (0). - actionListener.onResponse(new SyncedFlushResult(shardId, "operation counter on primary is non zero [" + inflight + "]")); + assert inflight >= 0; + if (inflight != 0) { + actionListener.onResponse(new ShardsSyncedFlushResult(shardId, totalShards, "[" + inflight + "] ongoing operations on primary")); } else { // 3. now send the sync request to all the shards String syncId = Strings.base64UUID(); - sendSyncRequests(syncId, activeShards, state, commitIds, shardId, actionListener); + sendSyncRequests(syncId, activeShards, state, commitIds, shardId, totalShards, actionListener); } } @@ -171,7 +221,7 @@ public class SyncedFlushService extends AbstractComponent { } } - final IndexShardRoutingTable getActiveShardRoutings(ShardId shardId, ClusterState state) { + final IndexShardRoutingTable getShardRoutingTable(ShardId shardId, ClusterState state) { final IndexRoutingTable indexRoutingTable = state.routingTable().index(shardId.index().name()); if (indexRoutingTable == null) { IndexMetaData index = state.getMetaData().index(shardId.index().getName()); @@ -188,7 +238,7 @@ public class SyncedFlushService extends AbstractComponent { } /** - * returns the number of inflight operations on primary. -1 upon error. + * returns the number of in flight operations on primary. -1 upon error. */ protected void getInflightOpsCount(final ShardId shardId, ClusterState state, IndexShardRoutingTable shardRoutingTable, final ActionListener listener) { try { @@ -214,7 +264,7 @@ public class SyncedFlushService extends AbstractComponent { @Override public void handleException(TransportException exp) { - logger.debug("{} unexpected error while retrieving inflight op count", shardId); + logger.debug("{} unexpected error while retrieving in flight op count", shardId); listener.onFailure(exp); } @@ -229,7 +279,8 @@ public class SyncedFlushService extends AbstractComponent { } - void sendSyncRequests(final String syncId, final List shards, ClusterState state, Map expectedCommitIds, final ShardId shardId, final ActionListener listener) { + void sendSyncRequests(final String syncId, final List shards, ClusterState state, Map expectedCommitIds, + final ShardId shardId, final int totalShards, final ActionListener listener) { final CountDown countDown = new CountDown(shards.size()); final Map results = ConcurrentCollections.newConcurrentMap(); for (final ShardRouting shard : shards) { @@ -237,14 +288,14 @@ public class SyncedFlushService extends AbstractComponent { if (node == null) { logger.trace("{} is assigned to an unknown node. skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); results.put(shard, new SyncedFlushResponse("unknown node")); - contDownAndSendResponseIfDone(syncId, shards, shardId, listener, countDown, results); + contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); continue; } final Engine.CommitId expectedCommitId = expectedCommitIds.get(shard.currentNodeId()); if (expectedCommitId == null) { logger.trace("{} can't resolve expected commit id for {}, skipping for sync id [{}]. shard routing {}", shardId, syncId, shard); results.put(shard, new SyncedFlushResponse("no commit id from pre-sync flush")); - contDownAndSendResponseIfDone(syncId, shards, shardId, listener, countDown, results); + contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); continue; } logger.trace("{} sending synced flush request to {}. sync id [{}].", shardId, shard, syncId); @@ -260,14 +311,14 @@ public class SyncedFlushService extends AbstractComponent { SyncedFlushResponse existing = results.put(shard, response); assert existing == null : "got two answers for node [" + node + "]"; // count after the assert so we won't decrement twice in handleException - contDownAndSendResponseIfDone(syncId, shards, shardId, listener, countDown, results); + contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); } @Override public void handleException(TransportException exp) { logger.trace("{} error while performing synced flush on [{}], skipping", exp, shardId, shard); results.put(shard, new SyncedFlushResponse(exp.getMessage())); - contDownAndSendResponseIfDone(syncId, shards, shardId, listener, countDown, results); + contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results); } @Override @@ -279,10 +330,12 @@ public class SyncedFlushService extends AbstractComponent { } - private void contDownAndSendResponseIfDone(String syncId, List shards, ShardId shardId, ActionListener listener, CountDown countDown, Map results) { + private void contDownAndSendResponseIfDone(String syncId, List shards, ShardId shardId, int totalShards, + ActionListener listener, CountDown countDown, Map results) { if (countDown.countDown()) { assert results.size() == shards.size(); - listener.onResponse(new SyncedFlushResult(shardId, syncId, results)); + listener.onResponse(new ShardsSyncedFlushResult(shardId, syncId, totalShards, results)); } } @@ -297,8 +350,8 @@ public class SyncedFlushService extends AbstractComponent { final DiscoveryNode node = state.nodes().get(shard.currentNodeId()); if (node == null) { logger.trace("{} shard routing {} refers to an unknown node. skipping.", shardId, shard); - if(countDown.countDown()) { - listener.onResponse(commitIds); + if (countDown.countDown()) { + listener.onResponse(commitIds); } continue; } @@ -313,7 +366,7 @@ public class SyncedFlushService extends AbstractComponent { Engine.CommitId existing = commitIds.putIfAbsent(node.id(), response.commitId()); assert existing == null : "got two answers for node [" + node + "]"; // count after the assert so we won't decrement twice in handleException - if(countDown.countDown()) { + if (countDown.countDown()) { listener.onResponse(commitIds); } } @@ -321,7 +374,7 @@ public class SyncedFlushService extends AbstractComponent { @Override public void handleException(TransportException exp) { logger.trace("{} error while performing pre synced flush on [{}], skipping", shardId, exp, shard); - if(countDown.countDown()) { + if (countDown.countDown()) { listener.onResponse(commitIds); } } @@ -348,7 +401,7 @@ public class SyncedFlushService extends AbstractComponent { IndexShard indexShard = indexService.shardSafe(request.shardId().id()); logger.trace("{} performing sync flush. sync id [{}], expected commit id {}", request.shardId(), request.syncId(), request.expectedCommitId()); Engine.SyncedFlushResult result = indexShard.syncFlush(request.syncId(), request.expectedCommitId()); - logger.trace("{} sync flush done. sync id [{}], result [{}]", request.shardId(), request.syncId(), result); + logger.trace("{} sync flush done. sync id [{}], result [{}]", request.shardId(), request.syncId(), result); switch (result) { case SUCCESS: return new SyncedFlushResponse(); @@ -372,124 +425,6 @@ public class SyncedFlushService extends AbstractComponent { return new InFlightOpsResponse(opCount); } - /** - * Result for all copies of a shard - */ - public static class SyncedFlushResult extends TransportResponse { - private String failureReason; - private Map shardResponses; - private String syncId; - private ShardId shardId; - - public SyncedFlushResult() { - } - - public ShardId getShardId() { - return shardId; - } - - /** - * failure constructor - */ - public SyncedFlushResult(ShardId shardId, String failureReason) { - this.syncId = null; - this.failureReason = failureReason; - this.shardResponses = ImmutableMap.of(); - this.shardId = shardId; - } - - /** - * success constructor - */ - public SyncedFlushResult(ShardId shardId, String syncId, Map shardResponses) { - this.failureReason = null; - ImmutableMap.Builder builder = ImmutableMap.builder(); - this.shardResponses = builder.putAll(shardResponses).build(); - this.syncId = syncId; - this.shardId = shardId; - } - - /** - * @return true if one or more shard copies was successful, false if all failed before step three of synced flush - */ - public boolean success() { - return syncId != null; - } - - /** - * @return the reason for the failure if synced flush failed before step three of synced flush - */ - public String failureReason() { - return failureReason; - } - - public String syncId() { - return syncId; - } - - /** - * @return total number of shards for which a sync attempt was made - */ - public int totalShards() { - return shardResponses.size(); - } - - /** - * @return total number of successful shards - */ - public int successfulShards() { - int i = 0; - for (SyncedFlushResponse result : shardResponses.values()) { - if (result.success()) { - i++; - } - } - return i; - } - - /** - * @return Individual responses for each shard copy with a detailed failure message if the copy failed to perform the synced flush. - * Empty if synced flush failed before step three. - */ - public Map shardResponses() { - return shardResponses; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeOptionalString(failureReason); - out.writeOptionalString(syncId); - out.writeVInt(shardResponses.size()); - for (Map.Entry result : shardResponses.entrySet()) { - result.getKey().writeTo(out); - result.getValue().writeTo(out); - } - shardId.writeTo(out); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - failureReason = in.readOptionalString(); - syncId = in.readOptionalString(); - int size = in.readVInt(); - ImmutableMap.Builder builder = ImmutableMap.builder(); - for (int i = 0; i < size; i++) { - ImmutableShardRouting shardRouting = ImmutableShardRouting.readShardRoutingEntry(in); - SyncedFlushResponse syncedFlushRsponse = new SyncedFlushResponse(); - syncedFlushRsponse.readFrom(in); - builder.put(shardRouting, syncedFlushRsponse); - } - shardResponses = builder.build(); - shardId = ShardId.readShardId(in); - } - - public ShardId shardId() { - return shardId; - } - } - final static class PreSyncedFlushRequest extends TransportRequest { private ShardId shardId; diff --git a/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 2e2497fe7cb..572b784093e 100644 --- a/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -21,7 +21,6 @@ package org.elasticsearch.indices.recovery; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; - import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; @@ -199,6 +198,8 @@ public class RecoverySourceHandler { } // we shortcut recovery here because we have nothing to copy. but we must still start the engine on the target. // so we don't return here + logger.trace("[{}][{}] skipping [phase1] to {} - identical sync id [{}] found on both source and target", indexName, shardId, + request.targetNode(), recoverySourceSyncId); } else { final Store.RecoveryDiff diff = recoverySourceMetadata.recoveryDiff(request.metadataSnapshot()); for (StoreFileMetaData md : diff.identical) { diff --git a/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index c1766a30f51..2611c7b5791 100644 --- a/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -19,11 +19,8 @@ package org.elasticsearch.indices.store; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -32,7 +29,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -42,7 +38,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; @@ -56,7 +51,6 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Iterator; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceArray; @@ -241,6 +235,9 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio metadataSnapshot.writeTo(out); } + /** + * @return commit sync id if exists, else null + */ public String syncId() { return metadataSnapshot.getSyncId(); } diff --git a/src/main/java/org/elasticsearch/rest/action/RestActionModule.java b/src/main/java/org/elasticsearch/rest/action/RestActionModule.java index a3864d5886e..a2d09f4f80e 100644 --- a/src/main/java/org/elasticsearch/rest/action/RestActionModule.java +++ b/src/main/java/org/elasticsearch/rest/action/RestActionModule.java @@ -23,9 +23,6 @@ import com.google.common.collect.Lists; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.action.admin.indices.seal.RestSealIndicesAction; -import org.elasticsearch.rest.action.admin.indices.upgrade.RestUpgradeAction; -import org.elasticsearch.rest.action.admin.cluster.repositories.verify.RestVerifyRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthAction; import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction; import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction; @@ -33,6 +30,7 @@ import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsActi import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction; import org.elasticsearch.rest.action.admin.cluster.repositories.put.RestPutRepositoryAction; +import org.elasticsearch.rest.action.admin.cluster.repositories.verify.RestVerifyRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.reroute.RestClusterRerouteAction; import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterGetSettingsAction; import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterUpdateSettingsAction; @@ -59,6 +57,7 @@ import org.elasticsearch.rest.action.admin.indices.delete.RestDeleteIndexAction; import org.elasticsearch.rest.action.admin.indices.exists.indices.RestIndicesExistsAction; import org.elasticsearch.rest.action.admin.indices.exists.types.RestTypesExistsAction; import org.elasticsearch.rest.action.admin.indices.flush.RestFlushAction; +import org.elasticsearch.rest.action.admin.indices.flush.RestSyncedFlushAction; import org.elasticsearch.rest.action.admin.indices.get.RestGetIndicesAction; import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetFieldMappingAction; import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetMappingAction; @@ -75,6 +74,7 @@ import org.elasticsearch.rest.action.admin.indices.template.delete.RestDeleteInd import org.elasticsearch.rest.action.admin.indices.template.get.RestGetIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.template.head.RestHeadIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.template.put.RestPutIndexTemplateAction; +import org.elasticsearch.rest.action.admin.indices.upgrade.RestUpgradeAction; import org.elasticsearch.rest.action.admin.indices.validate.query.RestValidateQueryAction; import org.elasticsearch.rest.action.admin.indices.warmer.delete.RestDeleteWarmerAction; import org.elasticsearch.rest.action.admin.indices.warmer.get.RestGetWarmerAction; @@ -183,7 +183,7 @@ public class RestActionModule extends AbstractModule { bind(RestRefreshAction.class).asEagerSingleton(); bind(RestFlushAction.class).asEagerSingleton(); - bind(RestSealIndicesAction.class).asEagerSingleton(); + bind(RestSyncedFlushAction.class).asEagerSingleton(); bind(RestOptimizeAction.class).asEagerSingleton(); bind(RestUpgradeAction.class).asEagerSingleton(); bind(RestClearIndicesCacheAction.class).asEagerSingleton(); diff --git a/src/main/java/org/elasticsearch/rest/action/admin/indices/seal/RestSealIndicesAction.java b/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java similarity index 55% rename from src/main/java/org/elasticsearch/rest/action/admin/indices/seal/RestSealIndicesAction.java rename to src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java index a2007ca703d..82a1d5f8fd3 100644 --- a/src/main/java/org/elasticsearch/rest/action/admin/indices/seal/RestSealIndicesAction.java +++ b/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java @@ -17,17 +17,16 @@ * under the License. */ -package org.elasticsearch.rest.action.admin.indices.seal; +package org.elasticsearch.rest.action.admin.indices.flush; -import org.elasticsearch.action.admin.indices.seal.SealIndicesAction; -import org.elasticsearch.action.admin.indices.seal.SealIndicesRequest; -import org.elasticsearch.action.admin.indices.seal.SealIndicesResponse; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.indices.flush.IndicesSyncedFlushResult; +import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.support.RestBuilderListener; @@ -37,29 +36,33 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; /** * */ -public class RestSealIndicesAction extends BaseRestHandler { +public class RestSyncedFlushAction extends BaseRestHandler { + + private final SyncedFlushService syncedFlushService; @Inject - public RestSealIndicesAction(Settings settings, RestController controller, Client client) { + public RestSyncedFlushAction(Settings settings, RestController controller, Client client, SyncedFlushService syncedFlushService) { super(settings, controller, client); - controller.registerHandler(POST, "/_seal", this); - controller.registerHandler(POST, "/{index}/_seal", this); + this.syncedFlushService = syncedFlushService; + controller.registerHandler(POST, "/_flush/synced", this); + controller.registerHandler(POST, "/{index}/_flush/synced", this); - controller.registerHandler(GET, "/_seal", this); - controller.registerHandler(GET, "/{index}/_seal", this); + controller.registerHandler(GET, "/_flush/synced", this); + controller.registerHandler(GET, "/{index}/_flush/synced", this); } @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - SealIndicesRequest sealIndicesRequest = new SealIndicesRequest(indices); - client.admin().indices().execute(SealIndicesAction.INSTANCE, sealIndicesRequest, new RestBuilderListener(channel) { + IndicesOptions indicesOptions = IndicesOptions.fromRequest(request, IndicesOptions.lenientExpandOpen()); + + syncedFlushService.attemptSyncedFlush(indices, indicesOptions, new RestBuilderListener(channel) { @Override - public RestResponse buildResponse(SealIndicesResponse response, XContentBuilder builder) throws Exception { + public RestResponse buildResponse(IndicesSyncedFlushResult results, XContentBuilder builder) throws Exception { builder.startObject(); - builder = response.toXContent(builder, ToXContent.EMPTY_PARAMS); + results.toXContent(builder, request); builder.endObject(); - return new BytesRestResponse(response.status(), builder); + return new BytesRestResponse(RestStatus.OK, builder); } }); } diff --git a/src/test/java/org/elasticsearch/action/admin/indices/seal/SealIndicesTests.java b/src/test/java/org/elasticsearch/action/admin/indices/seal/SealIndicesTests.java deleted file mode 100644 index 13c376eb250..00000000000 --- a/src/test/java/org/elasticsearch/action/admin/indices/seal/SealIndicesTests.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.seal; - -import org.elasticsearch.cluster.routing.ImmutableShardRouting; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.SyncedFlushService; -import org.elasticsearch.test.ElasticsearchTestCase; - -import java.io.IOException; -import java.util.*; - -import static org.elasticsearch.test.XContentTestUtils.convertToMap; -import static org.hamcrest.Matchers.equalTo; - -public class SealIndicesTests extends ElasticsearchTestCase { - - public void testSealIndicesResponseStreaming() throws IOException { - - Set shardResults = new HashSet<>(); - // add one result where one shard failed and one succeeded - SyncedFlushService.SyncedFlushResult syncedFlushResult = createSyncedFlushResult(0, "test"); - shardResults.add(syncedFlushResult); - // add one result where all failed - syncedFlushResult = new SyncedFlushService.SyncedFlushResult(new ShardId("test", 1), "all failed :("); - shardResults.add(syncedFlushResult); - SealIndicesResponse sealIndicesResponse = new SealIndicesResponse(shardResults); - BytesStreamOutput out = new BytesStreamOutput(); - sealIndicesResponse.writeTo(out); - out.close(); - StreamInput in = StreamInput.wrap(out.bytes()); - SealIndicesResponse readResponse = new SealIndicesResponse(); - readResponse.readFrom(in); - Map asMap = convertToMap(readResponse); - assertResponse(asMap); - } - - public void testXContentResponse() throws IOException { - - Set shardResults = new HashSet<>(); - // add one result where one shard failed and one succeeded - SyncedFlushService.SyncedFlushResult syncedFlushResult = createSyncedFlushResult(0, "test"); - shardResults.add(syncedFlushResult); - // add one result where all failed - syncedFlushResult = new SyncedFlushService.SyncedFlushResult(new ShardId("test", 1), "all failed :("); - shardResults.add(syncedFlushResult); - SealIndicesResponse sealIndicesResponse = new SealIndicesResponse(shardResults); - Map asMap = convertToMap(sealIndicesResponse); - assertResponse(asMap); - } - - protected void assertResponse(Map asMap) { - assertNotNull(asMap.get("test")); - assertThat((Integer) (((HashMap) ((ArrayList) asMap.get("test")).get(0)).get("shard_id")), equalTo(0)); - assertThat((String) (((HashMap) ((ArrayList) asMap.get("test")).get(0)).get("message")), equalTo("failed on some copies")); - HashMap shardResponses = (HashMap) ((HashMap) ((ArrayList) asMap.get("test")).get(0)).get("responses"); - assertThat(shardResponses.get("node_1"), equalTo("failed for some reason")); - assertThat(shardResponses.get("node_2"), equalTo("success")); - HashMap failedShard = (HashMap) (((ArrayList) asMap.get("test")).get(1)); - assertThat((Integer) (failedShard.get("shard_id")), equalTo(1)); - assertThat((String) (failedShard.get("message")), equalTo("all failed :(")); - } - - public void testXContentResponseSortsShards() throws IOException { - Set shardResults = new HashSet<>(); - // add one result where one shard failed and one succeeded - SyncedFlushService.SyncedFlushResult syncedFlushResult; - for (int i = 100000; i >= 0; i--) { - if (randomBoolean()) { - syncedFlushResult = createSyncedFlushResult(i, "test"); - shardResults.add(syncedFlushResult); - } else { - syncedFlushResult = new SyncedFlushService.SyncedFlushResult(new ShardId("test", i), "all failed :("); - shardResults.add(syncedFlushResult); - } - } - SealIndicesResponse sealIndicesResponse = new SealIndicesResponse(shardResults); - Map asMap = convertToMap(sealIndicesResponse); - assertNotNull(asMap.get("test")); - for (int i = 0; i < 100000; i++) { - assertThat((Integer) (((HashMap) ((ArrayList) asMap.get("test")).get(i)).get("shard_id")), equalTo(i)); - } - } - - protected SyncedFlushService.SyncedFlushResult createSyncedFlushResult(int shardId, String index) { - Map responses = new HashMap<>(); - ImmutableShardRouting shardRouting = new ImmutableShardRouting(index, shardId, "node_1", false, ShardRoutingState.RELOCATING, 2); - SyncedFlushService.SyncedFlushResponse syncedFlushResponse = new SyncedFlushService.SyncedFlushResponse("failed for some reason"); - responses.put(shardRouting, syncedFlushResponse); - shardRouting = new ImmutableShardRouting(index, shardId, "node_2", false, ShardRoutingState.RELOCATING, 2); - syncedFlushResponse = new SyncedFlushService.SyncedFlushResponse(); - responses.put(shardRouting, syncedFlushResponse); - return new SyncedFlushService.SyncedFlushResult(new ShardId(index, shardId), "some_sync_id", responses); - } -} diff --git a/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayTests.java b/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayTests.java index 93ead8ac862..7bdedbabe5d 100644 --- a/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayTests.java +++ b/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayTests.java @@ -33,13 +33,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.SyncedFlushService; +import org.elasticsearch.indices.flush.SyncedFlushUtil; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; import org.elasticsearch.test.InternalTestCluster.RestartCallback; -import org.elasticsearch.indices.SyncedFlushUtil; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryService; import org.junit.Test; @@ -398,11 +396,7 @@ public class RecoveryFromGatewayTests extends ElasticsearchIntegrationTest { ensureGreen(); } else { logger.info("--> trying to sync flush"); - int numShards = Integer.parseInt(client().admin().indices().prepareGetSettings("test").get().getSetting("test", "index.number_of_shards")); - SyncedFlushService syncedFlushService = internalCluster().getInstance(SyncedFlushService.class); - for (int i = 0; i < numShards; i++) { - assertTrue(SyncedFlushUtil.attemptSyncedFlush(syncedFlushService, new ShardId("test", i)).success()); - } + assertEquals(SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test").failedShards(), 0); assertSyncIdsNotNull(); } diff --git a/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 0ecc99d923b..c1c8a69d2d6 100644 --- a/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.index.shard; -import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.MutableShardRouting; import org.elasticsearch.cluster.routing.ShardRouting; @@ -254,13 +254,14 @@ public class IndexShardTests extends ElasticsearchSingleNodeTest { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexServiceSafe("test"); IndexShard indexShard = indexService.shard(0); + assertEquals(0, indexShard.getOperationsCount()); + indexShard.incrementOperationCounter(); + assertEquals(1, indexShard.getOperationsCount()); indexShard.incrementOperationCounter(); assertEquals(2, indexShard.getOperationsCount()); - indexShard.incrementOperationCounter(); - assertEquals(3, indexShard.getOperationsCount()); indexShard.decrementOperationCounter(); indexShard.decrementOperationCounter(); - assertEquals(1, indexShard.getOperationsCount()); + assertEquals(0, indexShard.getOperationsCount()); } @Test diff --git a/src/test/java/org/elasticsearch/indices/SealTests.java b/src/test/java/org/elasticsearch/indices/SealTests.java deleted file mode 100644 index 723f699d8cd..00000000000 --- a/src/test/java/org/elasticsearch/indices/SealTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.indices; - -import org.elasticsearch.action.admin.indices.seal.SealIndicesResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.ImmutableSettings; -import org.elasticsearch.test.ElasticsearchIntegrationTest; -import org.junit.Test; - -import static java.lang.Thread.sleep; -import static org.hamcrest.Matchers.equalTo; - -@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numDataNodes = 0) -public class SealTests extends ElasticsearchIntegrationTest { - - @Test - public void testUnallocatedShardsDoesNotHang() throws InterruptedException { - ImmutableSettings.Builder settingsBuilder = ImmutableSettings.builder() - .put("node.data", false) - .put("node.master", true) - .put("path.data", createTempDir().toString()); - internalCluster().startNode(settingsBuilder.build()); - // create an index but because no data nodes are available no shards will be allocated - createIndex("test"); - // this should not hang but instead immediately return with empty result set - SealIndicesResponse sealIndicesResponse = client().admin().indices().prepareSealIndices("test").get(); - // just to make sure the test actually tests the right thing - int numShards = client().admin().indices().prepareGetSettings("test").get().getIndexToSettings().get("test").getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1); - assertThat(sealIndicesResponse.results().size(), equalTo(numShards)); - assertThat(sealIndicesResponse.results().iterator().next().failureReason(), equalTo("no active primary available")); - } -} diff --git a/src/test/java/org/elasticsearch/indices/FlushTest.java b/src/test/java/org/elasticsearch/indices/flush/FlushTest.java similarity index 73% rename from src/test/java/org/elasticsearch/indices/FlushTest.java rename to src/test/java/org/elasticsearch/indices/flush/FlushTest.java index 65fb2f6816b..0307f705f38 100644 --- a/src/test/java/org/elasticsearch/indices/FlushTest.java +++ b/src/test/java/org/elasticsearch/indices/flush/FlushTest.java @@ -16,13 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.indices; +package org.elasticsearch.indices.flush; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.admin.indices.seal.SealIndicesResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; @@ -36,6 +35,7 @@ import org.junit.Test; import java.io.IOException; import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -43,7 +43,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import static java.lang.Thread.sleep; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; @@ -97,8 +96,16 @@ public class FlushTest extends ElasticsearchIntegrationTest { assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); } - SyncedFlushService.SyncedFlushResult result = SyncedFlushUtil.attemptSyncedFlush(internalCluster().getInstance(SyncedFlushService.class), new ShardId("test", 0)); - assertTrue(result.success()); + ShardsSyncedFlushResult result; + if (randomBoolean()) { + logger.info("--> sync flushing shard 0"); + result = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), new ShardId("test", 0)); + } else { + logger.info("--> sync flushing index [test]"); + IndicesSyncedFlushResult indicesResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test"); + result = indicesResult.getShardsResultPerIndex().get("test").get(0); + } + assertFalse(result.failed()); assertThat(result.totalShards(), equalTo(indexStats.getShards().length)); assertThat(result.successfulShards(), equalTo(indexStats.getShards().length)); @@ -140,26 +147,7 @@ public class FlushTest extends ElasticsearchIntegrationTest { } @TestLogging("indices:TRACE") - public void testSyncedFlushWithApi() throws ExecutionException, InterruptedException, IOException { - - createIndex("test"); - ensureGreen(); - - IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); - for (ShardStats shardStats : indexStats.getShards()) { - assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); - } - logger.info("--> trying sync flush"); - SealIndicesResponse sealIndicesResponse = client().admin().indices().prepareSealIndices("test").get(); - logger.info("--> sync flush done"); - indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); - for (ShardStats shardStats : indexStats.getShards()) { - assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); - } - } - - @TestLogging("indices:TRACE") - public void testSyncedFlushWithApiAndConcurrentIndexing() throws Exception { + public void testSyncedFlushWithConcurrentIndexing() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); createIndex("test"); @@ -186,14 +174,12 @@ public class FlushTest extends ElasticsearchIntegrationTest { assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); } logger.info("--> trying sync flush"); - SealIndicesResponse sealIndicesResponse = client().admin().indices().prepareSealIndices("test").get(); + IndicesSyncedFlushResult syncedFlushResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test"); logger.info("--> sync flush done"); stop.set(true); indexingThread.join(); indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); - for (ShardStats shardStats : indexStats.getShards()) { - assertFlushResponseEqualsShardStats(shardStats, sealIndicesResponse); - } + assertFlushResponseEqualsShardStats(indexStats.getShards(), syncedFlushResult.getShardsResultPerIndex().get("test")); refresh(); assertThat(client().prepareCount().get().getCount(), equalTo((long) numDocs.get())); logger.info("indexed {} docs", client().prepareCount().get().getCount()); @@ -203,22 +189,38 @@ public class FlushTest extends ElasticsearchIntegrationTest { assertThat(client().prepareCount().get().getCount(), equalTo((long) numDocs.get())); } - private void assertFlushResponseEqualsShardStats(ShardStats shardStats, SealIndicesResponse sealIndicesResponse) { + private void assertFlushResponseEqualsShardStats(ShardStats[] shardsStats, List syncedFlushResults) { - for (SyncedFlushService.SyncedFlushResult shardResult : sealIndicesResponse.results()) { - if (shardStats.getShardRouting().getId() == shardResult.shardId().getId()) { - for (Map.Entry singleResponse : shardResult.shardResponses().entrySet()) { - if (singleResponse.getKey().currentNodeId().equals(shardStats.getShardRouting().currentNodeId())) { - if (singleResponse.getValue().success()) { - assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); - logger.info("sync flushed {} on node {}", singleResponse.getKey().shardId(), singleResponse.getKey().currentNodeId()); - } else { - assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); - logger.info("sync flush failed for {} on node {}", singleResponse.getKey().shardId(), singleResponse.getKey().currentNodeId()); + for (final ShardStats shardStats : shardsStats) { + for (final ShardsSyncedFlushResult shardResult : syncedFlushResults) { + if (shardStats.getShardRouting().getId() == shardResult.shardId().getId()) { + for (Map.Entry singleResponse : shardResult.shardResponses().entrySet()) { + if (singleResponse.getKey().currentNodeId().equals(shardStats.getShardRouting().currentNodeId())) { + if (singleResponse.getValue().success()) { + logger.info("{} sync flushed on node {}", singleResponse.getKey().shardId(), singleResponse.getKey().currentNodeId()); + assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); + } else { + logger.info("{} sync flush failed for on node {}", singleResponse.getKey().shardId(), singleResponse.getKey().currentNodeId()); + assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); + } } } } } } } + + @Test + public void testUnallocatedShardsDoesNotHang() throws InterruptedException { + // create an index but disallow allocation + prepareCreate("test").setSettings(ImmutableSettings.builder().put("index.routing.allocation.include._name", "nonexistent")).get(); + + // this should not hang but instead immediately return with empty result set + List shardsResult = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), "test").getShardsResultPerIndex().get("test"); + // just to make sure the test actually tests the right thing + int numShards = client().admin().indices().prepareGetSettings("test").get().getIndexToSettings().get("test").getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1); + assertThat(shardsResult.size(), equalTo(numShards)); + assertThat(shardsResult.get(0).failureReason(), equalTo("no active shards")); + } + } diff --git a/src/test/java/org/elasticsearch/indices/SycnedFlushSingleNodeTest.java b/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTest.java similarity index 83% rename from src/test/java/org/elasticsearch/indices/SycnedFlushSingleNodeTest.java rename to src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTest.java index 348c5753c83..5d65c1acb7d 100644 --- a/src/test/java/org/elasticsearch/indices/SycnedFlushSingleNodeTest.java +++ b/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTest.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.indices; +package org.elasticsearch.indices.flush; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -27,17 +27,17 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ElasticsearchSingleNodeTest; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import java.util.List; import java.util.Map; /** */ -public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { +public class SyncedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { - public void testModificationPreventsSealing() throws InterruptedException { + public void testModificationPreventsFlushing() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); @@ -46,18 +46,18 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); final ClusterState state = getInstanceFromNode(ClusterService.class).state(); - final IndexShardRoutingTable shardRoutingTable = flushService.getActiveShardRoutings(shardId, state); + final IndexShardRoutingTable shardRoutingTable = flushService.getShardRoutingTable(shardId, state); final List activeShards = shardRoutingTable.activeShards(); assertEquals("exactly one active shard", 1, activeShards.size()); Map commitIds = SyncedFlushUtil.sendPreSyncRequests(flushService, activeShards, state, shardId); assertEquals("exactly one commit id", 1, commitIds.size()); client().prepareIndex("test", "test", "2").setSource("{}").get(); String syncId = Strings.base64UUID(); - SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); - flushService.sendSyncRequests(syncId, activeShards, state, commitIds, shardId,listener); + SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener<>(); + flushService.sendSyncRequests(syncId, activeShards, state, commitIds, shardId, shardRoutingTable.size(), listener); listener.latch.await(); assertNull(listener.error); - SyncedFlushService.SyncedFlushResult syncedFlushResult = listener.result; + ShardsSyncedFlushResult syncedFlushResult = listener.result; assertNotNull(syncedFlushResult); assertEquals(0, syncedFlushResult.successfulShards()); assertEquals(1, syncedFlushResult.totalShards()); @@ -66,9 +66,9 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { assertFalse(syncedFlushResult.shardResponses().get(activeShards.get(0)).success()); assertEquals("pending operations", syncedFlushResult.shardResponses().get(activeShards.get(0)).failureReason()); - SyncedFlushUtil.sendPreSyncRequests(flushService, activeShards, state, shardId); // pull another commit and make sure we can't seal with the old one + SyncedFlushUtil.sendPreSyncRequests(flushService, activeShards, state, shardId); // pull another commit and make sure we can't sync-flush with the old one listener = new SyncedFlushUtil.LatchedListener(); - flushService.sendSyncRequests(syncId, activeShards, state, commitIds, shardId,listener); + flushService.sendSyncRequests(syncId, activeShards, state, commitIds, shardId, shardRoutingTable.size(), listener); listener.latch.await(); assertNull(listener.error); syncedFlushResult = listener.result; @@ -79,7 +79,6 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { assertNotNull(syncedFlushResult.shardResponses().get(activeShards.get(0))); assertFalse(syncedFlushResult.shardResponses().get(activeShards.get(0)).success()); assertEquals("commit has changed", syncedFlushResult.shardResponses().get(activeShards.get(0)).failureReason()); - ElasticsearchAssertions.assertVersionSerializable(syncedFlushResult); } public void testSingleShardSuccess() throws InterruptedException { @@ -90,17 +89,16 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); - SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); + SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); flushService.attemptSyncedFlush(shardId, listener); listener.latch.await(); assertNull(listener.error); - SyncedFlushService.SyncedFlushResult syncedFlushResult = listener.result; + ShardsSyncedFlushResult syncedFlushResult = listener.result; assertNotNull(syncedFlushResult); assertEquals(1, syncedFlushResult.successfulShards()); assertEquals(1, syncedFlushResult.totalShards()); SyncedFlushService.SyncedFlushResponse response = syncedFlushResult.shardResponses().values().iterator().next(); assertTrue(response.success()); - ElasticsearchAssertions.assertVersionSerializable(syncedFlushResult); } public void testSyncFailsIfOperationIsInFlight() throws InterruptedException { @@ -113,16 +111,15 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { final ShardId shardId = shard.shardId(); shard.incrementOperationCounter(); try { - SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); + SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener<>(); flushService.attemptSyncedFlush(shardId, listener); listener.latch.await(); assertNull(listener.error); - SyncedFlushService.SyncedFlushResult syncedFlushResult = listener.result; + ShardsSyncedFlushResult syncedFlushResult = listener.result; assertNotNull(syncedFlushResult); assertEquals(0, syncedFlushResult.successfulShards()); - assertEquals(0, syncedFlushResult.totalShards()); - assertEquals("operation counter on primary is non zero [2]", syncedFlushResult.failureReason()); - ElasticsearchAssertions.assertVersionSerializable(syncedFlushResult); + assertNotEquals(0, syncedFlushResult.totalShards()); + assertEquals("[1] ongoing operations on primary", syncedFlushResult.failureReason()); } finally { shard.decrementOperationCounter(); } @@ -168,7 +165,7 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); final ClusterState state = getInstanceFromNode(ClusterService.class).state(); - final IndexShardRoutingTable shardRoutingTable = flushService.getActiveShardRoutings(shardId, state); + final IndexShardRoutingTable shardRoutingTable = flushService.getShardRoutingTable(shardId, state); final List activeShards = shardRoutingTable.activeShards(); assertEquals("exactly one active shard", 1, activeShards.size()); Map commitIds = SyncedFlushUtil.sendPreSyncRequests(flushService, activeShards, state, shardId); @@ -178,11 +175,11 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { } client().admin().indices().prepareFlush("test").setForce(true).get(); String syncId = Strings.base64UUID(); - final SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); - flushService.sendSyncRequests(syncId, activeShards, state, commitIds, shardId, listener); + final SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); + flushService.sendSyncRequests(syncId, activeShards, state, commitIds, shardId, shardRoutingTable.size(), listener); listener.latch.await(); assertNull(listener.error); - SyncedFlushService.SyncedFlushResult syncedFlushResult = listener.result; + ShardsSyncedFlushResult syncedFlushResult = listener.result; assertNotNull(syncedFlushResult); assertEquals(0, syncedFlushResult.successfulShards()); assertEquals(1, syncedFlushResult.totalShards()); @@ -190,7 +187,6 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { assertNotNull(syncedFlushResult.shardResponses().get(activeShards.get(0))); assertFalse(syncedFlushResult.shardResponses().get(activeShards.get(0)).success()); assertEquals("commit has changed", syncedFlushResult.shardResponses().get(activeShards.get(0)).failureReason()); - ElasticsearchAssertions.assertVersionSerializable(syncedFlushResult); } public void testFailWhenCommitIsMissing() throws InterruptedException { @@ -202,18 +198,18 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); final ClusterState state = getInstanceFromNode(ClusterService.class).state(); - final IndexShardRoutingTable shardRoutingTable = flushService.getActiveShardRoutings(shardId, state); + final IndexShardRoutingTable shardRoutingTable = flushService.getShardRoutingTable(shardId, state); final List activeShards = shardRoutingTable.activeShards(); assertEquals("exactly one active shard", 1, activeShards.size()); Map commitIds = SyncedFlushUtil.sendPreSyncRequests(flushService, activeShards, state, shardId); assertEquals("exactly one commit id", 1, commitIds.size()); commitIds.clear(); // wipe it... String syncId = Strings.base64UUID(); - SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); - flushService.sendSyncRequests(syncId, activeShards, state, commitIds, shardId, listener); + SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); + flushService.sendSyncRequests(syncId, activeShards, state, commitIds, shardId, shardRoutingTable.size(), listener); listener.latch.await(); assertNull(listener.error); - SyncedFlushService.SyncedFlushResult syncedFlushResult = listener.result; + ShardsSyncedFlushResult syncedFlushResult = listener.result; assertNotNull(syncedFlushResult); assertEquals(0, syncedFlushResult.successfulShards()); assertEquals(1, syncedFlushResult.totalShards()); @@ -221,7 +217,6 @@ public class SycnedFlushSingleNodeTest extends ElasticsearchSingleNodeTest { assertNotNull(syncedFlushResult.shardResponses().get(activeShards.get(0))); assertFalse(syncedFlushResult.shardResponses().get(activeShards.get(0)).success()); assertEquals("no commit id from pre-sync flush", syncedFlushResult.shardResponses().get(activeShards.get(0)).failureReason()); - ElasticsearchAssertions.assertVersionSerializable(syncedFlushResult); } diff --git a/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java b/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java new file mode 100644 index 00000000000..fcf80c19d67 --- /dev/null +++ b/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.indices.flush; + +import com.carrotsearch.hppc.ObjectIntHashMap; +import com.carrotsearch.hppc.ObjectIntMap; +import org.elasticsearch.cluster.routing.ImmutableShardRouting; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.flush.IndicesSyncedFlushResult.ShardCounts; +import org.elasticsearch.indices.flush.SyncedFlushService.SyncedFlushResponse; +import org.elasticsearch.test.ElasticsearchTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.test.XContentTestUtils.convertToMap; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class SyncedFlushUnitTests extends ElasticsearchTestCase { + + + private static class TestPlan { + public ShardCounts totalCounts; + public Map countsPerIndex = new HashMap<>(); + public ObjectIntMap expectedFailuresPerIndex = new ObjectIntHashMap<>(); + + public IndicesSyncedFlushResult result; + + } + + public void testIndicesSyncedFlushResult() throws IOException { + final TestPlan testPlan = createTestPlan(); + assertThat(testPlan.result.totalShards(), equalTo(testPlan.totalCounts.total)); + assertThat(testPlan.result.successfulShards(), equalTo(testPlan.totalCounts.successful)); + assertThat(testPlan.result.failedShards(), equalTo(testPlan.totalCounts.failed)); + Map asMap = convertToMap(testPlan.result); + assertShardCount("_shards header", (Map) asMap.get("_shards"), testPlan.totalCounts); + + assertThat("unexpected number of indices", asMap.size(), equalTo(1 + testPlan.countsPerIndex.size())); // +1 for the shards header + for (String index : testPlan.countsPerIndex.keySet()) { + Map indexMap = (Map) asMap.get(index); + assertShardCount(index, indexMap, testPlan.countsPerIndex.get(index)); + List> failureList = (List>) indexMap.get("failures"); + final int expectedFailures = testPlan.expectedFailuresPerIndex.get(index); + if (expectedFailures == 0) { + assertNull(index + " has unexpected failures", failureList); + } else { + assertNotNull(index + " should have failures", failureList); + assertThat(failureList, hasSize(expectedFailures)); + } + } + } + + private void assertShardCount(String name, Map header, ShardCounts expectedCounts) { + assertThat(name + " has unexpected total count", (Integer) header.get("total"), equalTo(expectedCounts.total)); + assertThat(name + " has unexpected successful count", (Integer) header.get("successful"), equalTo(expectedCounts.successful)); + assertThat(name + " has unexpected failed count", (Integer) header.get("failed"), equalTo(expectedCounts.failed)); + } + + protected TestPlan createTestPlan() { + final TestPlan testPlan = new TestPlan(); + final Map> indicesResults = new HashMap<>(); + final int indexCount = randomIntBetween(1, 10); + int totalShards = 0; + int totalSuccesful = 0; + int totalFailed = 0; + for (int i = 0; i < indexCount; i++) { + final String index = "index_" + i; + int shards = randomIntBetween(1, 4); + int replicas = randomIntBetween(0, 2); + int successful = 0; + int failed = 0; + int failures = 0; + List shardsResults = new ArrayList<>(); + for (int shard = 0; shard < shards; shard++) { + final ShardId shardId = new ShardId(index, shard); + if (randomInt(5) < 2) { + // total shard failure + failed += replicas + 1; + failures++; + shardsResults.add(new ShardsSyncedFlushResult(shardId, replicas + 1, "simulated total failure")); + } else { + Map shardResponses = new HashMap<>(); + for (int copy = 0; copy < replicas + 1; copy++) { + final ShardRouting shardRouting = new ImmutableShardRouting(index, shard, "node_" + shardId + "_" + copy, null, + copy == 0, ShardRoutingState.STARTED, 0); + if (randomInt(5) < 2) { + // shard copy failure + failed++; + failures++; + shardResponses.put(shardRouting, new SyncedFlushResponse("copy failure " + shardId)); + } else { + successful++; + shardResponses.put(shardRouting, new SyncedFlushResponse()); + } + } + shardsResults.add(new ShardsSyncedFlushResult(shardId, "_sync_id_" + shard, replicas + 1, shardResponses)); + } + } + indicesResults.put(index, shardsResults); + testPlan.countsPerIndex.put(index, new ShardCounts(shards * (replicas + 1), successful, failed)); + testPlan.expectedFailuresPerIndex.put(index, failures); + totalFailed += failed; + totalShards += shards * (replicas + 1); + totalSuccesful += successful; + } + testPlan.result = new IndicesSyncedFlushResult(indicesResults); + testPlan.totalCounts = new ShardCounts(totalShards, totalSuccesful, totalFailed); + return testPlan; + } +} diff --git a/src/test/java/org/elasticsearch/indices/SyncedFlushUtil.java b/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java similarity index 70% rename from src/test/java/org/elasticsearch/indices/SyncedFlushUtil.java rename to src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java index e16c85b4b7e..fef6c23231e 100644 --- a/src/test/java/org/elasticsearch/indices/SyncedFlushUtil.java +++ b/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java @@ -16,16 +16,16 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.indices; +package org.elasticsearch.indices.flush; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.SyncedFlushService; +import org.elasticsearch.test.InternalTestCluster; import java.util.List; import java.util.Map; @@ -38,11 +38,31 @@ public class SyncedFlushUtil { } + /** + * Blocking single index version of {@link SyncedFlushService#attemptSyncedFlush(String[], IndicesOptions, ActionListener)} + */ + public static IndicesSyncedFlushResult attemptSyncedFlush(InternalTestCluster cluster, String index) { + SyncedFlushService service = cluster.getInstance(SyncedFlushService.class); + LatchedListener listener = new LatchedListener(); + service.attemptSyncedFlush(new String[]{index}, IndicesOptions.lenientExpandOpen(), listener); + try { + listener.latch.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + if (listener.error != null) { + throw ExceptionsHelper.convertToElastic(listener.error); + } + return listener.result; + } + + /** * Blocking version of {@link SyncedFlushService#attemptSyncedFlush(ShardId, ActionListener)} */ - public static SyncedFlushService.SyncedFlushResult attemptSyncedFlush(SyncedFlushService service, ShardId shardId) { - LatchedListener listener = new LatchedListener(); + public static ShardsSyncedFlushResult attemptSyncedFlush(InternalTestCluster cluster, ShardId shardId) { + SyncedFlushService service = cluster.getInstance(SyncedFlushService.class); + LatchedListener listener = new LatchedListener(); service.attemptSyncedFlush(shardId, listener); try { listener.latch.await(); diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index df786df3d6a..bf0a504e140 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -18,14 +18,15 @@ */ package org.elasticsearch.test; -import com.carrotsearch.randomizedtesting.*; +import com.carrotsearch.randomizedtesting.RandomizedContext; +import com.carrotsearch.randomizedtesting.RandomizedTest; +import com.carrotsearch.randomizedtesting.Randomness; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.collect.Lists; - import org.apache.commons.lang3.StringUtils; import org.apache.http.impl.client.HttpClients; import org.apache.lucene.store.StoreRateLimiting; @@ -49,7 +50,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.optimize.OptimizeResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; -import org.elasticsearch.action.admin.indices.seal.SealIndicesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -102,21 +102,18 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper.Loading; import org.elasticsearch.index.mapper.internal.SizeFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; -import org.elasticsearch.index.merge.policy.AbstractMergePolicyProvider; -import org.elasticsearch.index.merge.policy.LogByteSizeMergePolicyProvider; -import org.elasticsearch.index.merge.policy.LogDocMergePolicyProvider; -import org.elasticsearch.index.merge.policy.MergePolicyModule; -import org.elasticsearch.index.merge.policy.MergePolicyProvider; -import org.elasticsearch.index.merge.policy.TieredMergePolicyProvider; +import org.elasticsearch.index.merge.policy.*; import org.elasticsearch.index.merge.scheduler.ConcurrentMergeSchedulerProvider; import org.elasticsearch.index.merge.scheduler.MergeSchedulerModule; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogService; -import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogWriter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; +import org.elasticsearch.indices.flush.IndicesSyncedFlushResult; +import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.node.Node; @@ -129,53 +126,28 @@ import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.transport.netty.NettyTransport; import org.hamcrest.Matchers; import org.joda.time.DateTimeZone; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Ignore; +import org.junit.*; import java.io.IOException; import java.io.InputStream; -import java.lang.annotation.ElementType; -import java.lang.annotation.Inherited; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; +import java.lang.annotation.*; import java.net.InetSocketAddress; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; +import java.util.*; +import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.XContentTestUtils.convertToMap; import static org.elasticsearch.test.XContentTestUtils.mapsEqualIgnoringArrayOrder; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; -import static org.hamcrest.Matchers.emptyIterable; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; /** * {@link ElasticsearchIntegrationTest} is an abstract base class to run integration @@ -253,7 +225,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase /** * Annotation for third-party integration tests. - *

+ *

* These are tests the require a third-party service in order to run. They * may require the user to manually configure an external process (such as rabbitmq), * or may additionally require some external configuration (e.g. AWS credentials) @@ -417,56 +389,56 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase } mappings.startArray("dynamic_templates") .startObject() - .startObject("template-strings") - .field("match_mapping_type", "string") + .startObject("template-strings") + .field("match_mapping_type", "string") .startObject("mapping") - .startObject("fielddata") - .field(FieldDataType.FORMAT_KEY, randomFrom("paged_bytes", "fst")) - .field(Loading.KEY, randomLoadingValues()) + .startObject("fielddata") + .field(FieldDataType.FORMAT_KEY, randomFrom("paged_bytes", "fst")) + .field(Loading.KEY, randomLoadingValues()) .endObject() .endObject() .endObject() .endObject() .startObject() - .startObject("template-longs") - .field("match_mapping_type", "long") - .startObject("mapping") - .field("doc_values", randomBoolean()) - .startObject("fielddata") - .field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER)) + .startObject("template-longs") + .field("match_mapping_type", "long") + .startObject("mapping") + .field("doc_values", randomBoolean()) + .startObject("fielddata") + .field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER)) .endObject() .endObject() .endObject() .endObject() .startObject() - .startObject("template-doubles") - .field("match_mapping_type", "double") - .startObject("mapping") - .field("doc_values", randomBoolean()) - .startObject("fielddata") - .field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER)) + .startObject("template-doubles") + .field("match_mapping_type", "double") + .startObject("mapping") + .field("doc_values", randomBoolean()) + .startObject("fielddata") + .field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER)) .endObject() .endObject() .endObject() .endObject() .startObject() - .startObject("template-geo_points") - .field("match_mapping_type", "geo_point") - .startObject("mapping") - .field("doc_values", randomBoolean()) - .startObject("fielddata") - .field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER)) + .startObject("template-geo_points") + .field("match_mapping_type", "geo_point") + .startObject("mapping") + .field("doc_values", randomBoolean()) + .startObject("fielddata") + .field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER)) .endObject() .endObject() .endObject() .endObject() .startObject() - .startObject("template-booleans") - .field("match_mapping_type", "boolean") - .startObject("mapping") - .startObject("fielddata") - .field(FieldDataType.FORMAT_KEY, randomFrom("array", "doc_values")) - .field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER)) + .startObject("template-booleans") + .field("match_mapping_type", "boolean") + .startObject("mapping") + .startObject("fielddata") + .field(FieldDataType.FORMAT_KEY, randomFrom("array", "doc_values")) + .field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER)) .endObject() .endObject() .endObject() @@ -521,7 +493,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase } if (random.nextBoolean()) { - builder.put(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, RandomPicks.randomFrom(random, TranslogWriter.Type.values()).name()); + builder.put(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, RandomPicks.randomFrom(random, TranslogWriter.Type.values()).name()); } if (random.nextBoolean()) { @@ -661,9 +633,9 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase if (currentClusterScope != Scope.TEST) { MetaData metaData = client().admin().cluster().prepareState().execute().actionGet().getState().getMetaData(); assertThat("test leaves persistent cluster metadata behind: " + metaData.persistentSettings().getAsMap(), metaData - .persistentSettings().getAsMap().size(), equalTo(0)); + .persistentSettings().getAsMap().size(), equalTo(0)); assertThat("test leaves transient cluster metadata behind: " + metaData.transientSettings().getAsMap(), metaData - .transientSettings().getAsMap().size(), equalTo(0)); + .transientSettings().getAsMap().size(), equalTo(0)); } ensureClusterSizeConsistency(); ensureClusterStateConsistency(); @@ -879,11 +851,11 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase public void run() { for (Client client : clients()) { ClusterHealthResponse clusterHealth = client.admin().cluster().prepareHealth().setLocal(true).get(); - assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0)); + assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0)); PendingClusterTasksResponse pendingTasks = client.admin().cluster().preparePendingClusterTasks().setLocal(true).get(); assertThat("client " + client + " still has pending tasks " + pendingTasks.prettyPrint(), pendingTasks, Matchers.emptyIterable()); clusterHealth = client.admin().cluster().prepareHealth().setLocal(true).get(); - assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0)); + assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0)); } } }); @@ -970,7 +942,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase * It is useful to ensure that all action on the cluster have finished and all shards that were currently relocating * are now allocated and started. */ - public ClusterHealthStatus ensureGreen(String... indices) { + public ClusterHealthStatus ensureGreen(String... indices) { return ensureGreen(TimeValue.timeValueSeconds(30), indices); } @@ -1252,11 +1224,11 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase /** * Syntactic sugar for: - * + *

*

      *   return client().prepareIndex(index, type, id).setSource(source).execute().actionGet();
      * 
- * + *

* where source is a String. */ protected final IndexResponse index(String index, String type, String id, String source) { @@ -1379,7 +1351,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase * @param forceRefresh if true all involved indices are refreshed once the documents are indexed. * @param dummyDocuments if true some empty dummy documents may be randomly inserted into the document list and deleted once * all documents are indexed. This is useful to produce deleted documents on the server side. - * @param maybeFlush if true this method may randomly execute full flushes after index operations. + * @param maybeFlush if true this method may randomly execute full flushes after index operations. * @param builders the documents to index. */ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean maybeFlush, List builders) throws InterruptedException, ExecutionException { @@ -1513,8 +1485,8 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase client().admin().indices().prepareFlush(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute( new LatchedActionListener(newLatch(inFlightAsyncOperations))); } else { - client().admin().indices().prepareSealIndices(indices).execute( - new LatchedActionListener(newLatch(inFlightAsyncOperations))); + internalCluster().getInstance(SyncedFlushService.class).attemptSyncedFlush(indices, IndicesOptions.lenientExpandOpen(), + new LatchedActionListener(newLatch(inFlightAsyncOperations))); } } else if (rarely()) { client().admin().indices().prepareOptimize(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).setMaxNumSegments(between(1, 10)).setFlush(maybeFlush && randomBoolean()).execute( @@ -1673,7 +1645,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase private int getMinNumDataNodes() { ClusterScope annotation = getAnnotation(this.getClass()); - return annotation == null || annotation.minNumDataNodes() == -1 ? InternalTestCluster.DEFAULT_MIN_NUM_DATA_NODES : annotation.minNumDataNodes(); + return annotation == null || annotation.minNumDataNodes() == -1 ? InternalTestCluster.DEFAULT_MIN_NUM_DATA_NODES : annotation.minNumDataNodes(); } private int getMaxNumDataNodes() { @@ -1706,7 +1678,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "1b") .put("script.indexed", "on") .put("script.inline", "on") - // wait short time for other active shards before actually deleting, default 30s not needed in tests + // wait short time for other active shards before actually deleting, default 30s not needed in tests .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(1, TimeUnit.SECONDS)) .build(); } @@ -1885,7 +1857,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndex())) { + if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndex())) { String name = clusterState.nodes().get(shardRouting.currentNodeId()).name(); nodes.add(name); assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true)); diff --git a/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/src/test/java/org/elasticsearch/test/InternalTestCluster.java index 0ca7ae60ff6..fd179fd8201 100644 --- a/src/test/java/org/elasticsearch/test/InternalTestCluster.java +++ b/src/test/java/org/elasticsearch/test/InternalTestCluster.java @@ -79,10 +79,8 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineClosedException; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardModule; -import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.IndexStoreModule; -import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogWriter; import org.elasticsearch.indices.IndicesService; @@ -90,7 +88,6 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; -import org.elasticsearch.monitor.sigar.SigarService; import org.elasticsearch.node.Node; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.plugins.PluginsService; @@ -978,7 +975,7 @@ public final class InternalTestCluster extends TestCluster { @Override public void beforeIndexDeletion() { - // Check that the operations counter on index shard has reached 1. + // Check that the operations counter on index shard has reached 0. // The assumption here is that after a test there are no ongoing write operations. // test that have ongoing write operations after the test (for example because ttl is used // and not all docs have been purged after the test) and inherit from @@ -1021,10 +1018,7 @@ public final class InternalTestCluster extends TestCluster { IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); for (IndexService indexService : indexServices) { for (IndexShard indexShard : indexService) { - assertThat(indexShard.getOperationsCount(), anyOf(equalTo(1), equalTo(0))); - if (indexShard.getOperationsCount() == 0) { - assertThat(indexShard.state(), equalTo(IndexShardState.CLOSED)); - } + assertThat(indexShard.getOperationsCount(), equalTo(0)); } } } From 8958096754f1a75cd86bf8c1e9da1ea9e371e523 Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Tue, 26 May 2015 04:06:21 -0400 Subject: [PATCH 006/123] don't truncate TopDocs after rescoring --- .../search/rescore/RescorePhase.java | 5 ---- .../search/rescore/QueryRescorerTests.java | 24 ++++++++++++++++++- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index 88d2b0aae60..48d8407facd 100644 --- a/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -60,11 +60,6 @@ public class RescorePhase extends AbstractComponent implements SearchPhase { for (RescoreSearchContext ctx : context.rescore()) { topDocs = ctx.rescorer().rescore(topDocs, context, ctx); } - if (context.size() < topDocs.scoreDocs.length) { - ScoreDoc[] hits = new ScoreDoc[context.size()]; - System.arraycopy(topDocs.scoreDocs, 0, hits, 0, hits.length); - topDocs = new TopDocs(topDocs.totalHits, hits, topDocs.getMaxScore()); - } context.queryResult().topDocs(topDocs); } catch (IOException e) { throw new ElasticsearchException("Rescore Phase Failed", e); diff --git a/src/test/java/org/elasticsearch/search/rescore/QueryRescorerTests.java b/src/test/java/org/elasticsearch/search/rescore/QueryRescorerTests.java index afeb93f9be4..d1cbdc86525 100644 --- a/src/test/java/org/elasticsearch/search/rescore/QueryRescorerTests.java +++ b/src/test/java/org/elasticsearch/search/rescore/QueryRescorerTests.java @@ -45,6 +45,7 @@ import java.util.Arrays; import java.util.Comparator; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.*; @@ -206,7 +207,7 @@ public class QueryRescorerTests extends ElasticsearchIntegrationTest { RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f)).setRescoreWindow(20).execute().actionGet(); - assertThat(searchResponse.getHits().hits().length, equalTo(3)); + assertThat(searchResponse.getHits().hits().length, equalTo(5)); assertHitCount(searchResponse, 9); assertFirstHit(searchResponse, hasId("3")); } @@ -719,4 +720,25 @@ public class QueryRescorerTests extends ElasticsearchIntegrationTest { ensureGreen(); return numDocs; } + + // #11277 + public void testFromSize() throws Exception { + Builder settings = Settings.builder(); + settings.put(SETTING_NUMBER_OF_SHARDS, 1); + settings.put(SETTING_NUMBER_OF_REPLICAS, 0); + assertAcked(prepareCreate("test").setSettings(settings)); + for(int i=0;i<5;i++) { + client().prepareIndex("test", "type", ""+i).setSource("text", "hello world").get(); + } + refresh(); + + SearchRequestBuilder request = client().prepareSearch(); + request.setQuery(QueryBuilders.termQuery("text", "hello")); + request.setFrom(1); + request.setSize(4); + request.addRescorer(RescoreBuilder.queryRescorer(QueryBuilders.matchAllQuery())); + request.setRescoreWindow(50); + + assertEquals(4, request.get().getHits().hits().length); + } } From 37782c17453e00c9bacea096c18f98840942f7bb Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Fri, 22 May 2015 16:59:56 +0200 Subject: [PATCH 007/123] analyzers: custom analyzers names and aliases must not start with _ closes #9596 --- .../analyzers/custom-analyzer.asciidoc | 1 + .../index/analysis/AnalysisService.java | 5 +++ .../index/analysis/AnalysisModuleTests.java | 35 ++++++++++++++++++- 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index a7cf7136a83..012045153b3 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -5,6 +5,7 @@ An analyzer of type `custom` that allows to combine a `Tokenizer` with zero or more `Token Filters`, and zero or more `Char Filters`. The custom analyzer accepts a logical/registered name of the tokenizer to use, and a list of logical/registered names of token filters. +The name of the custom analyzer must not start mit "_". The following are settings that can be set for a `custom` analyzer type: diff --git a/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index e1c23ceb2f8..53cdbfde4ae 100644 --- a/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -251,6 +251,11 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable defaultSearchAnalyzer = analyzers.containsKey("default_search") ? analyzers.get("default_search") : analyzers.get("default"); defaultSearchQuoteAnalyzer = analyzers.containsKey("default_search_quote") ? analyzers.get("default_search_quote") : defaultSearchAnalyzer; + for (Map.Entry analyzer : analyzers.entrySet()) { + if (analyzer.getKey().startsWith("_")) { + throw new IllegalArgumentException("analyzer name must not start with _. got \"" + analyzer.getKey() + "\""); + } + } this.analyzers = ImmutableMap.copyOf(analyzers); } diff --git a/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index a1db68ab039..886c3b289a6 100644 --- a/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; +import org.elasticsearch.common.inject.ProvisionException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; @@ -94,7 +95,7 @@ public class AnalysisModuleTests extends ElasticsearchTestCase { Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.yml"); testSimpleConfiguration(settings); } - + @Test public void testDefaultFactoryTokenFilters() throws IOException { assertTokenFilter("keyword_repeat", KeywordRepeatFilter.class); @@ -238,4 +239,36 @@ public class AnalysisModuleTests extends ElasticsearchTestCase { return wordListFile; } + @Test + public void testUnderscoreInAnalyzerName() { + Settings settings = Settings.builder() + .put("index.analysis.analyzer._invalid_name.tokenizer", "keyword") + .put("path.home", createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, "1") + .build(); + try { + getAnalysisService(settings); + fail("This should fail with IllegalArgumentException because the analyzers name starts with _"); + } catch (ProvisionException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with _. got \"_invalid_name\"")); + } + } + + @Test + public void testUnderscoreInAnalyzerNameAlias() { + Settings settings = Settings.builder() + .put("index.analysis.analyzer.valid_name.tokenizer", "keyword") + .put("index.analysis.analyzer.valid_name.alias", "_invalid_name") + .put("path.home", createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, "1") + .build(); + try { + getAnalysisService(settings); + fail("This should fail with IllegalArgumentException because the analyzers alias starts with _"); + } catch (ProvisionException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with _. got \"_invalid_name\"")); + } + } } From 60519911b4e50bcc958c924a768dca2ae618101b Mon Sep 17 00:00:00 2001 From: David Pilato Date: Sat, 23 May 2015 18:51:22 +0200 Subject: [PATCH 008/123] [maven] fix paths for final artifact We need to define an absolute path (based on `${project.basedir}`) instead of using related paths. --- pom.xml | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/pom.xml b/pom.xml index 8a5b11f1bee..f644ff91bbf 100644 --- a/pom.xml +++ b/pom.xml @@ -2,12 +2,12 @@ - elasticsearch 4.0.0 org.elasticsearch elasticsearch 2.0.0-SNAPSHOT jar + Elasticsearch core Elasticsearch - Open Source, Distributed, RESTful Search Engine 2009 @@ -291,7 +291,7 @@ sigar sigar system - ${basedir}/lib/sigar/sigar-1.6.4.jar + ${project.basedir}/lib/sigar/sigar-1.6.4.jar true --> @@ -302,19 +302,19 @@ - src/packaging/common/packaging.properties + ${project.basedir}/src/packaging/common/packaging.properties - ${basedir}/src/main/java + ${project.basedir}/src/main/java **/*.json **/*.yml - ${basedir}/src/main/resources + ${project.basedir}/src/main/resources **/*.* @@ -324,7 +324,7 @@ - ${basedir}/src/test/java + ${project.basedir}/src/test/java **/*.json **/*.yml @@ -334,19 +334,19 @@ true - ${basedir}/src/test/java + ${project.basedir}/src/test/java **/*.gz - ${basedir}/src/test/resources + ${project.basedir}/src/test/resources **/*.* - ${basedir}/rest-api-spec + ${project.basedir}/rest-api-spec rest-api-spec api/*.json @@ -543,14 +543,14 @@ ${project.build.directory}/bin - ${basedir}/bin + ${project.basedir}/bin true *.exe - ${basedir}/bin + ${project.basedir}/bin false *.exe @@ -569,12 +569,12 @@ ${project.build.directory}/generated-packaging/deb/ - src/packaging/common/packaging.properties - src/packaging/deb/packaging.properties + ${project.basedir}/src/packaging/common/packaging.properties + ${project.basedir}/src/packaging/deb/packaging.properties - src/packaging/common/ + ${project.basedir}/src/packaging/common/ true **/* @@ -584,7 +584,7 @@ - src/packaging/deb/ + ${project.basedir}/src/packaging/deb/ true **/* @@ -615,8 +615,8 @@ ${project.build.directory}/generated-packaging/rpm/ - src/packaging/common/packaging.properties - src/packaging/rpm/packaging.properties + ${project.basedir}/src/packaging/common/packaging.properties + ${project.basedir}/src/packaging/rpm/packaging.properties @@ -660,8 +660,8 @@ false ${project.build.directory}/releases/ - ${basedir}/src/main/assemblies/targz-bin.xml - ${basedir}/src/main/assemblies/zip-bin.xml + ${project.basedir}/src/main/assemblies/targz-bin.xml + ${project.basedir}/src/main/assemblies/zip-bin.xml From eeeb29f9006790460270f4d32a928e7645c0ea21 Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Tue, 26 May 2015 11:40:19 +0200 Subject: [PATCH 009/123] spell correct and add single quotes --- docs/reference/analysis/analyzers/custom-analyzer.asciidoc | 2 +- .../java/org/elasticsearch/index/analysis/AnalysisService.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index 012045153b3..bdc03a0998b 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -5,7 +5,7 @@ An analyzer of type `custom` that allows to combine a `Tokenizer` with zero or more `Token Filters`, and zero or more `Char Filters`. The custom analyzer accepts a logical/registered name of the tokenizer to use, and a list of logical/registered names of token filters. -The name of the custom analyzer must not start mit "_". +The name of the custom analyzer must not start with "_". The following are settings that can be set for a `custom` analyzer type: diff --git a/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index 53cdbfde4ae..1cc37b8cda8 100644 --- a/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -253,7 +253,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable for (Map.Entry analyzer : analyzers.entrySet()) { if (analyzer.getKey().startsWith("_")) { - throw new IllegalArgumentException("analyzer name must not start with _. got \"" + analyzer.getKey() + "\""); + throw new IllegalArgumentException("analyzer name must not start with '_'. got \"" + analyzer.getKey() + "\""); } } this.analyzers = ImmutableMap.copyOf(analyzers); From 802b7b88fa55e94975f9c9bb451eb6642861d1cc Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Tue, 26 May 2015 11:49:33 +0200 Subject: [PATCH 010/123] [TEST] fix epected error message --- .../org/elasticsearch/index/analysis/AnalysisModuleTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 886c3b289a6..c1d260392ff 100644 --- a/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -251,7 +251,7 @@ public class AnalysisModuleTests extends ElasticsearchTestCase { fail("This should fail with IllegalArgumentException because the analyzers name starts with _"); } catch (ProvisionException e) { assertTrue(e.getCause() instanceof IllegalArgumentException); - assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with _. got \"_invalid_name\"")); + assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")); } } @@ -268,7 +268,7 @@ public class AnalysisModuleTests extends ElasticsearchTestCase { fail("This should fail with IllegalArgumentException because the analyzers alias starts with _"); } catch (ProvisionException e) { assertTrue(e.getCause() instanceof IllegalArgumentException); - assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with _. got \"_invalid_name\"")); + assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")); } } } From 543f572d80b0494c4f620eb2e6ad3fb8d1fdd89b Mon Sep 17 00:00:00 2001 From: javanna Date: Sat, 23 May 2015 11:15:27 +0200 Subject: [PATCH 011/123] Highlighting: keep track of the original query only in HighlighterContext We used to keep track of the rewritten query in the highlighter context to support custom rewriting done by our own postings highlighter fork. Now that we rely on lucene implementation, no rewrite happens, we can simply keep track of the original query and simplify code around it. Closes #11317 --- .../highlight/FastVectorHighlighter.java | 4 +-- .../search/highlight/HighlightPhase.java | 8 ++--- .../search/highlight/HighlighterContext.java | 29 ++----------------- .../search/highlight/PlainHighlighter.java | 4 +-- .../search/highlight/PostingsHighlighter.java | 2 +- 5 files changed, 8 insertions(+), 39 deletions(-) diff --git a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java index a61fabb0034..715dacae39d 100644 --- a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java @@ -79,13 +79,13 @@ public class FastVectorHighlighter implements Highlighter { if (field.fieldOptions().requireFieldMatch()) { if (cache.fieldMatchFieldQuery == null) { // we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...) - cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query.originalQuery(), hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); + cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); } fieldQuery = cache.fieldMatchFieldQuery; } else { if (cache.noFieldMatchFieldQuery == null) { // we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...) - cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query.originalQuery(), hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); + cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); } fieldQuery = cache.noFieldMatchFieldQuery; } diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index a730b6612d0..10afac729ba 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.highlight; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.search.Query; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; @@ -113,12 +114,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { throw new IllegalArgumentException("unknown highlighter type [" + highlighterType + "] for the field [" + fieldName + "]"); } - HighlighterContext.HighlightQuery highlightQuery; - if (field.fieldOptions().highlightQuery() == null) { - highlightQuery = new HighlighterContext.HighlightQuery(context.parsedQuery().query(), context.query(), context.queryRewritten()); - } else { - highlightQuery = new HighlighterContext.HighlightQuery(field.fieldOptions().highlightQuery(), field.fieldOptions().highlightQuery(), false); - } + Query highlightQuery = field.fieldOptions().highlightQuery() == null ? context.parsedQuery().query() : field.fieldOptions().highlightQuery(); HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context, hitContext, highlightQuery); HighlightField highlightField = highlighter.highlight(highlighterContext); if (highlightField != null) { diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java b/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java index f3dd9ff1ba8..e791aad4310 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.search.highlight; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.Query; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -34,10 +33,10 @@ public class HighlighterContext { public final FieldMapper mapper; public final SearchContext context; public final FetchSubPhase.HitContext hitContext; - public final HighlightQuery query; + public final Query query; public HighlighterContext(String fieldName, SearchContextHighlight.Field field, FieldMapper mapper, SearchContext context, - FetchSubPhase.HitContext hitContext, HighlightQuery query) { + FetchSubPhase.HitContext hitContext, Query query) { this.fieldName = fieldName; this.field = field; this.mapper = mapper; @@ -45,28 +44,4 @@ public class HighlighterContext { this.hitContext = hitContext; this.query = query; } - - public static class HighlightQuery { - private final Query originalQuery; - private final Query query; - private final boolean queryRewritten; - - protected HighlightQuery(Query originalQuery, Query query, boolean queryRewritten) { - this.originalQuery = originalQuery; - this.query = query; - this.queryRewritten = queryRewritten; - } - - public boolean queryRewritten() { - return queryRewritten; - } - - public Query originalQuery() { - return originalQuery; - } - - public Query query() { - return query; - } - } } diff --git a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index 3e0eca6c468..12d7d08fa8f 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; -import org.apache.lucene.search.Query; import org.apache.lucene.search.highlight.*; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.text.StringText; @@ -69,8 +68,7 @@ public class PlainHighlighter implements Highlighter { org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); if (entry == null) { - Query query = highlighterContext.query.originalQuery(); - QueryScorer queryScorer = new CustomQueryScorer(query, field.fieldOptions().requireFieldMatch() ? mapper.names().indexName() : null); + QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.names().indexName() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { diff --git a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java index 1614f294ff2..0375ff204f7 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java @@ -91,7 +91,7 @@ public class PostingsHighlighter implements Highlighter { } IndexSearcher searcher = new IndexSearcher(hitContext.reader()); - Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.names().indexName(), highlighterContext.query.originalQuery(), searcher, hitContext.docId(), numberOfFragments); + Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.names().indexName(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); for (Snippet fieldSnippet : fieldSnippets) { if (Strings.hasText(fieldSnippet.getText())) { snippets.add(fieldSnippet); From ce63590bd67f3fa419b388f7d3d13eaee1089b29 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 5 May 2015 14:11:05 +0200 Subject: [PATCH 012/123] API: Add response filtering with filter_path parameter This change adds a new "filter_path" parameter that can be used to filter and reduce the responses returned by the REST API of elasticsearch. For example, returning only the shards that failed to be optimized: ``` curl -XPOST 'localhost:9200/beer/_optimize?filter_path=_shards.failed' {"_shards":{"failed":0}}% ``` It supports multiple filters (separated by a comma): ``` curl -XGET 'localhost:9200/_mapping?pretty&filter_path=*.mappings.*.properties.name,*.mappings.*.properties.title' ``` It also supports the YAML response format. Here it returns only the `_id` field of a newly indexed document: ``` curl -XPOST 'localhost:9200/library/book?filter_path=_id' -d '---hello:\n world: 1\n' --- _id: "AU0j64-b-stVfkvus5-A" ``` It also supports wildcards. Here it returns only the host name of every nodes in the cluster: ``` curl -XGET 'http://localhost:9200/_nodes/stats?filter_path=nodes.*.host*' {"nodes":{"lvJHed8uQQu4brS-SXKsNA":{"host":"portable"}}} ``` And "**" can be used to include sub fields without knowing the exact path. Here it returns only the Lucene version of every segment: ``` curl 'http://localhost:9200/_segments?pretty&filter_path=indices.**.version' { "indices" : { "beer" : { "shards" : { "0" : [ { "segments" : { "_0" : { "version" : "5.2.0" }, "_1" : { "version" : "5.2.0" } } } ] } } } } ``` Note that elasticsearch sometimes returns directly the raw value of a field, like the _source field. If you want to filter _source fields, you should consider combining the already existing _source parameter (see Get API for more details) with the filter_path parameter like this: ``` curl -XGET 'localhost:9200/_search?pretty&filter_path=hits.hits._source&_source=title' { "hits" : { "hits" : [ { "_source":{"title":"Book #2"} }, { "_source":{"title":"Book #1"} }, { "_source":{"title":"Book #3"} } ] } } ``` --- docs/reference/api-conventions.asciidoc | 107 ++++ rest-api-spec/api/nodes.stats.json | 4 + rest-api-spec/api/search.json | 4 + .../nodes.stats/20_response_filtering.yaml | 154 +++++ .../test/search/70_response_filtering.yaml | 87 +++ .../common/xcontent/XContent.java | 5 + .../common/xcontent/XContentBuilder.java | 15 +- .../common/xcontent/cbor/CborXContent.java | 21 +- .../xcontent/cbor/CborXContentGenerator.java | 4 +- .../xcontent/json/BaseJsonGenerator.java | 80 +++ .../common/xcontent/json/JsonXContent.java | 19 +- .../xcontent/json/JsonXContentGenerator.java | 53 +- .../common/xcontent/smile/SmileXContent.java | 23 +- .../smile/SmileXContentGenerator.java | 4 +- .../support/filtering/FilterContext.java | 225 ++++++++ .../filtering/FilteringJsonGenerator.java | 423 ++++++++++++++ .../common/xcontent/yaml/YamlXContent.java | 21 +- .../xcontent/yaml/YamlXContentGenerator.java | 4 +- .../elasticsearch/rest/BytesRestResponse.java | 2 +- .../org/elasticsearch/rest/RestChannel.java | 13 +- .../elasticsearch/rest/RestController.java | 2 +- .../rest/action/get/RestGetSourceAction.java | 2 +- .../rest/action/index/RestIndexAction.java | 2 +- .../script/RestPutIndexedScriptAction.java | 2 +- .../AbstractFilteringJsonGeneratorTests.java | 524 ++++++++++++++++++ .../CborFilteringGeneratorTests.java | 36 ++ .../FilteringJsonGeneratorBenchmark.java | 99 ++++ .../JsonFilteringGeneratorTests.java | 36 ++ .../SmileFilteringGeneratorTests.java | 36 ++ .../YamlFilteringGeneratorTests.java | 36 ++ .../rest/RestFilterChainTests.java | 9 +- 31 files changed, 1986 insertions(+), 66 deletions(-) create mode 100644 rest-api-spec/test/nodes.stats/20_response_filtering.yaml create mode 100644 rest-api-spec/test/search/70_response_filtering.yaml create mode 100644 src/main/java/org/elasticsearch/common/xcontent/json/BaseJsonGenerator.java create mode 100644 src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilterContext.java create mode 100644 src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGenerator.java create mode 100644 src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTests.java create mode 100644 src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborFilteringGeneratorTests.java create mode 100644 src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGeneratorBenchmark.java create mode 100644 src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonFilteringGeneratorTests.java create mode 100644 src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java create mode 100644 src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 25b9ac4fcea..7dfb3936e35 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -81,6 +81,113 @@ being consumed by a monitoring tool, rather than intended for human consumption. The default for the `human` flag is `false`. +[float] +=== Response Filtering + +All REST APIs accept a `filter_path` parameter that can be used to reduce +the response returned by elasticsearch. This parameter takes a comma +separated list of filters expressed with the dot notation: + +[source,sh] +-------------------------------------------------- +curl -XGET 'localhost:9200/_search?pretty&filter_path=took,hits.hits._id,hits.hits._score' +{ + "took" : 3, + "hits" : { + "hits" : [ + { + "_id" : "3640", + "_score" : 1.0 + }, + { + "_id" : "3642", + "_score" : 1.0 + } + ] + } +} +-------------------------------------------------- + +It also supports the `*` wildcard character to match any field or part +of a field's name: + +[source,sh] +-------------------------------------------------- +curl -XGET 'localhost:9200/_nodes/stats?filter_path=nodes.*.ho*' +{ + "nodes" : { + "lvJHed8uQQu4brS-SXKsNA" : { + "host" : "portable" + } + } +} +-------------------------------------------------- + +And the `**` wildcard can be used to include fields without knowing the +exact path of the field. For example, we can return the Lucene version +of every segment with this request: + +[source,sh] +-------------------------------------------------- +curl 'localhost:9200/_segments?pretty&filter_path=indices.**.version' +{ + "indices" : { + "movies" : { + "shards" : { + "0" : [ { + "segments" : { + "_0" : { + "version" : "5.2.0" + } + } + } ], + "2" : [ { + "segments" : { + "_0" : { + "version" : "5.2.0" + } + } + } ] + } + }, + "books" : { + "shards" : { + "0" : [ { + "segments" : { + "_0" : { + "version" : "5.2.0" + } + } + } ] + } + } + } +} +-------------------------------------------------- + +Note that elasticsearch sometimes returns directly the raw value of a field, +like the `_source` field. If you want to filter _source fields, you should +consider combining the already existing `_source` parameter (see +<> for more details) with the `filter_path` + parameter like this: + +[source,sh] +-------------------------------------------------- +curl -XGET 'localhost:9200/_search?pretty&filter_path=hits.hits._source&_source=title' +{ + "hits" : { + "hits" : [ { + "_source":{"title":"Book #2"} + }, { + "_source":{"title":"Book #1"} + }, { + "_source":{"title":"Book #3"} + } ] + } +} +-------------------------------------------------- + + [float] === Flat Settings diff --git a/rest-api-spec/api/nodes.stats.json b/rest-api-spec/api/nodes.stats.json index 0a0870020b3..478dc7c8465 100644 --- a/rest-api-spec/api/nodes.stats.json +++ b/rest-api-spec/api/nodes.stats.json @@ -56,6 +56,10 @@ "options" : ["node", "indices", "shards"], "default" : "node" }, + "filter_path": { + "type" : "list", + "description" : "A comma-separated list of fields to include in the returned response" + }, "types" : { "type" : "list", "description" : "A comma-separated list of document types for the `indexing` index metric" diff --git a/rest-api-spec/api/search.json b/rest-api-spec/api/search.json index e3c286c842c..2d37ce4432f 100644 --- a/rest-api-spec/api/search.json +++ b/rest-api-spec/api/search.json @@ -72,6 +72,10 @@ "type" : "boolean", "description" : "Specify whether query terms should be lowercased" }, + "filter_path": { + "type" : "list", + "description" : "A comma-separated list of fields to include in the returned response" + }, "preference": { "type" : "string", "description" : "Specify the node or shard the operation should be performed on (default: random)" diff --git a/rest-api-spec/test/nodes.stats/20_response_filtering.yaml b/rest-api-spec/test/nodes.stats/20_response_filtering.yaml new file mode 100644 index 00000000000..4031f405259 --- /dev/null +++ b/rest-api-spec/test/nodes.stats/20_response_filtering.yaml @@ -0,0 +1,154 @@ +--- +"Nodes Stats with response filtering": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + # Nodes Stats with no filtering + - do: + nodes.stats: {} + + - is_true: cluster_name + - is_true: nodes + - is_true: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_true: nodes.$master.indices.segments + - gte: { nodes.$master.indices.segments.count: 0 } + - is_true: nodes.$master.jvm + - is_true: nodes.$master.jvm.threads + - gte: { nodes.$master.jvm.threads.count: 0 } + - is_true: nodes.$master.jvm.buffer_pools.direct + - gte: { nodes.$master.jvm.buffer_pools.direct.count: 0 } + - gte: { nodes.$master.jvm.buffer_pools.direct.used_in_bytes: 0 } + + # Nodes Stats with only "cluster_name" field + - do: + nodes.stats: + filter_path: cluster_name + + - is_true: cluster_name + - is_false: nodes + - is_false: nodes.$master.name + - is_false: nodes.$master.indices + - is_false: nodes.$master.jvm + + # Nodes Stats with "nodes" field and sub-fields + - do: + nodes.stats: + filter_path: nodes.* + + - is_false: cluster_name + - is_true: nodes + - is_true: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_true: nodes.$master.indices.segments + - gte: { nodes.$master.indices.segments.count: 0 } + - is_true: nodes.$master.jvm + - is_true: nodes.$master.jvm.threads + - gte: { nodes.$master.jvm.threads.count: 0 } + - is_true: nodes.$master.jvm.buffer_pools.direct + - gte: { nodes.$master.jvm.buffer_pools.direct.count: 0 } + - gte: { nodes.$master.jvm.buffer_pools.direct.used_in_bytes: 0 } + + # Nodes Stats with "nodes.*.indices" field and sub-fields + - do: + nodes.stats: + filter_path: nodes.*.indices + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_true: nodes.$master.indices.segments + - gte: { nodes.$master.indices.segments.count: 0 } + - is_false: nodes.$master.jvm + + # Nodes Stats with "nodes.*.name" and "nodes.*.indices.docs.count" fields + - do: + nodes.stats: + filter_path: [ "nodes.*.name", "nodes.*.indices.docs.count" ] + + - is_false: cluster_name + - is_true: nodes + - is_true: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_false: nodes.$master.indices.segments + - is_false: nodes.$master.jvm + + # Nodes Stats with all "count" fields + - do: + nodes.stats: + filter_path: "nodes.**.count" + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_true: nodes.$master.indices.segments + - gte: { nodes.$master.indices.segments.count: 0 } + - is_true: nodes.$master.jvm + - is_true: nodes.$master.jvm.threads + - gte: { nodes.$master.jvm.threads.count: 0 } + - is_true: nodes.$master.jvm.buffer_pools.direct + - gte: { nodes.$master.jvm.buffer_pools.direct.count: 0 } + - is_false: nodes.$master.jvm.buffer_pools.direct.used_in_bytes + + # Nodes Stats with all "count" fields in sub-fields of "jvm" field + - do: + nodes.stats: + filter_path: "nodes.**.jvm.**.count" + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_false: nodes.$master.indices + - is_false: nodes.$master.indices.docs.count + - is_false: nodes.$master.indices.segments.count + - is_true: nodes.$master.jvm + - is_true: nodes.$master.jvm.threads + - gte: { nodes.$master.jvm.threads.count: 0 } + - is_true: nodes.$master.jvm.buffer_pools.direct + - gte: { nodes.$master.jvm.buffer_pools.direct.count: 0 } + - is_false: nodes.$master.jvm.buffer_pools.direct.used_in_bytes + + # Nodes Stats with "nodes.*.fs.data" fields + - do: + nodes.stats: + filter_path: "nodes.*.fs.data" + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_false: nodes.$master.indices + - is_false: nodes.$master.jvm + - is_true: nodes.$master.fs.data + - is_true: nodes.$master.fs.data.0.path + - is_true: nodes.$master.fs.data.0.type + - is_true: nodes.$master.fs.data.0.total_in_bytes + + # Nodes Stats with "nodes.*.fs.data.t*" fields + - do: + nodes.stats: + filter_path: "nodes.*.fs.data.t*" + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_false: nodes.$master.indices + - is_false: nodes.$master.jvm + - is_true: nodes.$master.fs.data + - is_false: nodes.$master.fs.data.0.path + - is_true: nodes.$master.fs.data.0.type + - is_true: nodes.$master.fs.data.0.total_in_bytes diff --git a/rest-api-spec/test/search/70_response_filtering.yaml b/rest-api-spec/test/search/70_response_filtering.yaml new file mode 100644 index 00000000000..ade3b68e256 --- /dev/null +++ b/rest-api-spec/test/search/70_response_filtering.yaml @@ -0,0 +1,87 @@ +--- +"Search with response filtering": + - do: + indices.create: + index: test + - do: + index: + index: test + type: test + id: 1 + body: { foo: bar } + + - do: + index: + index: test + type: test + id: 2 + body: { foo: bar } + + - do: + indices.refresh: + index: [test] + + - do: + search: + index: test + filter_path: "*" + body: "{ query: { match_all: {} } }" + + - is_true: took + - is_true: _shards.total + - is_true: hits.total + - is_true: hits.hits.0._index + - is_true: hits.hits.0._type + - is_true: hits.hits.0._id + - is_true: hits.hits.1._index + - is_true: hits.hits.1._type + - is_true: hits.hits.1._id + + - do: + search: + index: test + filter_path: "took" + body: "{ query: { match_all: {} } }" + + - is_true: took + - is_false: _shards.total + - is_false: hits.total + - is_false: hits.hits.0._index + - is_false: hits.hits.0._type + - is_false: hits.hits.0._id + - is_false: hits.hits.1._index + - is_false: hits.hits.1._type + - is_false: hits.hits.1._id + + - do: + search: + index: test + filter_path: "_shards.*" + body: "{ query: { match_all: {} } }" + + - is_false: took + - is_true: _shards.total + - is_false: hits.total + - is_false: hits.hits.0._index + - is_false: hits.hits.0._type + - is_false: hits.hits.0._id + - is_false: hits.hits.1._index + - is_false: hits.hits.1._type + - is_false: hits.hits.1._id + + - do: + search: + index: test + filter_path: [ "hits.**._i*", "**.total" ] + body: "{ query: { match_all: {} } }" + + - is_false: took + - is_true: _shards.total + - is_true: hits.total + - is_true: hits.hits.0._index + - is_false: hits.hits.0._type + - is_true: hits.hits.0._id + - is_true: hits.hits.1._index + - is_false: hits.hits.1._type + - is_true: hits.hits.1._id + diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContent.java b/src/main/java/org/elasticsearch/common/xcontent/XContent.java index d9cf704725c..101098d67a4 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContent.java @@ -40,6 +40,11 @@ public interface XContent { */ XContentGenerator createGenerator(OutputStream os) throws IOException; + /** + * Creates a new generator using the provided output stream and some filters. + */ + XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException; + /** * Creates a new generator using the provided writer. */ diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index b2cf8738fe0..fb31bd89a95 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -77,6 +77,10 @@ public final class XContentBuilder implements BytesStream, Releasable { return new XContentBuilder(xContent, new BytesStreamOutput()); } + public static XContentBuilder builder(XContent xContent, String[] filters) throws IOException { + return new XContentBuilder(xContent, new BytesStreamOutput(), filters); + } + private XContentGenerator generator; private final OutputStream bos; @@ -92,8 +96,17 @@ public final class XContentBuilder implements BytesStream, Releasable { * to call {@link #close()} when the builder is done with. */ public XContentBuilder(XContent xContent, OutputStream bos) throws IOException { + this(xContent, bos, null); + } + + /** + * Constructs a new builder using the provided xcontent, an OutputStream and some filters. The + * filters are used to filter fields that won't be written to the OutputStream. Make sure + * to call {@link #close()} when the builder is done with. + */ + public XContentBuilder(XContent xContent, OutputStream bos, String[] filters) throws IOException { this.bos = bos; - this.generator = xContent.createGenerator(bos); + this.generator = xContent.createGenerator(bos, filters); } public XContentBuilder fieldCaseConversion(FieldCaseConversion fieldCaseConversion) { diff --git a/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java b/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java index 06f8605ec24..5210a82527e 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java @@ -20,11 +20,15 @@ package org.elasticsearch.common.xcontent.cbor; import com.fasterxml.jackson.core.JsonEncoding; +import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; +import org.elasticsearch.common.xcontent.support.filtering.FilteringJsonGenerator; import java.io.*; @@ -59,14 +63,27 @@ public class CborXContent implements XContent { throw new ElasticsearchParseException("cbor does not support stream parsing..."); } + private XContentGenerator newXContentGenerator(JsonGenerator jsonGenerator) { + return new CborXContentGenerator(new BaseJsonGenerator(jsonGenerator)); + } + @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { - return new CborXContentGenerator(cborFactory.createGenerator(os, JsonEncoding.UTF8)); + return newXContentGenerator(cborFactory.createGenerator(os, JsonEncoding.UTF8)); + } + + @Override + public XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException { + if (CollectionUtils.isEmpty(filters)) { + return createGenerator(os); + } + FilteringJsonGenerator cborGenerator = new FilteringJsonGenerator(cborFactory.createGenerator(os, JsonEncoding.UTF8), filters); + return new CborXContentGenerator(cborGenerator); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { - return new CborXContentGenerator(cborFactory.createGenerator(writer)); + return newXContentGenerator(cborFactory.createGenerator(writer)); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentGenerator.java index c410d777b0d..70b92b0708c 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentGenerator.java +++ b/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentGenerator.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.xcontent.cbor; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.cbor.CBORParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; import org.elasticsearch.common.xcontent.json.JsonXContentGenerator; import java.io.IOException; @@ -34,7 +34,7 @@ import java.io.OutputStream; */ public class CborXContentGenerator extends JsonXContentGenerator { - public CborXContentGenerator(JsonGenerator generator) { + public CborXContentGenerator(BaseJsonGenerator generator) { super(generator); } diff --git a/src/main/java/org/elasticsearch/common/xcontent/json/BaseJsonGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/json/BaseJsonGenerator.java new file mode 100644 index 00000000000..0b485508c32 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/xcontent/json/BaseJsonGenerator.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.json; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.base.GeneratorBase; +import com.fasterxml.jackson.core.util.JsonGeneratorDelegate; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.Streams; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +public class BaseJsonGenerator extends JsonGeneratorDelegate { + + protected final GeneratorBase base; + + public BaseJsonGenerator(JsonGenerator generator, JsonGenerator base) { + super(generator, true); + if (base instanceof GeneratorBase) { + this.base = (GeneratorBase) base; + } else { + this.base = null; + } + } + + public BaseJsonGenerator(JsonGenerator generator) { + this(generator, generator); + } + + protected void writeStartRaw(String fieldName) throws IOException { + writeFieldName(fieldName); + writeRaw(':'); + } + + public void writeEndRaw() { + assert base != null : "JsonGenerator should be of instance GeneratorBase but was: " + delegate.getClass(); + if (base != null) { + base.getOutputContext().writeValue(); + } + } + + protected void writeRawValue(byte[] content, OutputStream bos) throws IOException { + flush(); + bos.write(content); + } + + protected void writeRawValue(byte[] content, int offset, int length, OutputStream bos) throws IOException { + flush(); + bos.write(content, offset, length); + } + + protected void writeRawValue(InputStream content, OutputStream bos) throws IOException { + flush(); + Streams.copy(content, bos); + } + + protected void writeRawValue(BytesReference content, OutputStream bos) throws IOException { + flush(); + content.writeTo(bos); + } +} diff --git a/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java b/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java index 5b8fd1b0e59..47da7934939 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java @@ -25,7 +25,9 @@ import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.support.filtering.FilteringJsonGenerator; import java.io.*; @@ -63,14 +65,27 @@ public class JsonXContent implements XContent { return '\n'; } + private XContentGenerator newXContentGenerator(JsonGenerator jsonGenerator) { + return new JsonXContentGenerator(new BaseJsonGenerator(jsonGenerator)); + } + @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { - return new JsonXContentGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8)); + return newXContentGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8)); + } + + @Override + public XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException { + if (CollectionUtils.isEmpty(filters)) { + return createGenerator(os); + } + FilteringJsonGenerator jsonGenerator = new FilteringJsonGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8), filters); + return new JsonXContentGenerator(jsonGenerator); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { - return new JsonXContentGenerator(jsonFactory.createGenerator(writer)); + return newXContentGenerator(jsonFactory.createGenerator(writer)); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java index 71b41919b55..a7946218e21 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java +++ b/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java @@ -19,11 +19,8 @@ package org.elasticsearch.common.xcontent.json; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.base.GeneratorBase; import com.fasterxml.jackson.core.io.SerializedString; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.*; import java.io.IOException; @@ -35,18 +32,11 @@ import java.io.OutputStream; */ public class JsonXContentGenerator implements XContentGenerator { - protected final JsonGenerator generator; + protected final BaseJsonGenerator generator; private boolean writeLineFeedAtEnd; - private final GeneratorBase base; - public JsonXContentGenerator(JsonGenerator generator) { + public JsonXContentGenerator(BaseJsonGenerator generator) { this.generator = generator; - if (generator instanceof GeneratorBase) { - base = (GeneratorBase) generator; - } else { - base = null; - } - } @Override @@ -261,29 +251,23 @@ public class JsonXContentGenerator implements XContentGenerator { @Override public void writeRawField(String fieldName, byte[] content, OutputStream bos) throws IOException { - generator.writeFieldName(fieldName); - generator.writeRaw(':'); - flush(); - bos.write(content); - finishWriteRaw(); + generator.writeStartRaw(fieldName); + generator.writeRawValue(content, bos); + generator.writeEndRaw(); } @Override public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream bos) throws IOException { - generator.writeFieldName(fieldName); - generator.writeRaw(':'); - flush(); - bos.write(content, offset, length); - finishWriteRaw(); + generator.writeStartRaw(fieldName); + generator.writeRawValue(content, offset, length, bos); + generator.writeEndRaw(); } @Override public void writeRawField(String fieldName, InputStream content, OutputStream bos) throws IOException { - generator.writeFieldName(fieldName); - generator.writeRaw(':'); - flush(); - Streams.copy(content, bos); - finishWriteRaw(); + generator.writeStartRaw(fieldName); + generator.writeRawValue(content, bos); + generator.writeEndRaw(); } @Override @@ -308,18 +292,9 @@ public class JsonXContentGenerator implements XContentGenerator { } protected void writeObjectRaw(String fieldName, BytesReference content, OutputStream bos) throws IOException { - generator.writeFieldName(fieldName); - generator.writeRaw(':'); - flush(); - content.writeTo(bos); - finishWriteRaw(); - } - - private void finishWriteRaw() { - assert base != null : "JsonGenerator should be of instance GeneratorBase but was: " + generator.getClass(); - if (base != null) { - base.getOutputContext().writeValue(); - } + generator.writeStartRaw(fieldName); + generator.writeRawValue(content, bos); + generator.writeEndRaw(); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java b/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java index 50fd0442905..8a21ce1d93a 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java @@ -20,13 +20,15 @@ package org.elasticsearch.common.xcontent.smile; import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.*; -import org.elasticsearch.common.xcontent.json.JsonXContentParser; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; +import org.elasticsearch.common.xcontent.support.filtering.FilteringJsonGenerator; import java.io.*; @@ -62,14 +64,27 @@ public class SmileXContent implements XContent { return (byte) 0xFF; } + private XContentGenerator newXContentGenerator(JsonGenerator jsonGenerator) { + return new SmileXContentGenerator(new BaseJsonGenerator(jsonGenerator)); + } + @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { - return new SmileXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8)); + return newXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8)); + } + + @Override + public XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException { + if (CollectionUtils.isEmpty(filters)) { + return createGenerator(os); + } + FilteringJsonGenerator smileGenerator = new FilteringJsonGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8), filters); + return new SmileXContentGenerator(smileGenerator); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { - return new SmileXContentGenerator(smileFactory.createGenerator(writer)); + return newXContentGenerator(smileFactory.createGenerator(writer)); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentGenerator.java index fb0b7d5e4b8..b8c1b3dad65 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentGenerator.java +++ b/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentGenerator.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.xcontent.smile; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.smile.SmileParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; import org.elasticsearch.common.xcontent.json.JsonXContentGenerator; import java.io.IOException; @@ -34,7 +34,7 @@ import java.io.OutputStream; */ public class SmileXContentGenerator extends JsonXContentGenerator { - public SmileXContentGenerator(JsonGenerator generator) { + public SmileXContentGenerator(BaseJsonGenerator generator) { super(generator); } diff --git a/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilterContext.java b/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilterContext.java new file mode 100644 index 00000000000..215af370b31 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilterContext.java @@ -0,0 +1,225 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import com.fasterxml.jackson.core.JsonGenerator; +import org.elasticsearch.common.regex.Regex; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A FilterContext contains the description of a field about to be written by a JsonGenerator. + */ +public class FilterContext { + + /** + * The field/property name to be write + */ + private String property; + + /** + * List of XContentFilter matched by the current filtering context + */ + private List matchings; + + /** + * Flag to indicate if the field/property must be written + */ + private Boolean write = null; + + /** + * Flag to indicate if the field/property match a filter + */ + private boolean match = false; + + /** + * Points to the parent context + */ + private FilterContext parent; + + /** + * Type of the field/property + */ + private Type type = Type.VALUE; + + protected enum Type { + VALUE, + OBJECT, + ARRAY, + ARRAY_OF_OBJECT + } + + public FilterContext(String property, FilterContext parent) { + this.property = property; + this.parent = parent; + } + + public void reset(String property) { + this.property = property; + this.write = null; + if (matchings != null) { + matchings.clear(); + } + this.match = false; + this.type = Type.VALUE; + } + + public void reset(String property, FilterContext parent) { + reset(property); + this.parent = parent; + if (parent.isMatch()) { + match = true; + } + } + + public FilterContext parent() { + return parent; + } + + public List matchings() { + return matchings; + } + + public void addMatching(String[] matching) { + if (matchings == null) { + matchings = new ArrayList<>(); + } + matchings.add(matching); + } + + public boolean isRoot() { + return parent == null; + } + + public boolean isArray() { + return Type.ARRAY.equals(type); + } + + public void initArray() { + this.type = Type.ARRAY; + } + + public boolean isObject() { + return Type.OBJECT.equals(type); + } + + public void initObject() { + this.type = Type.OBJECT; + } + + public boolean isArrayOfObject() { + return Type.ARRAY_OF_OBJECT.equals(type); + } + + public void initArrayOfObject() { + this.type = Type.ARRAY_OF_OBJECT; + } + + public boolean isMatch() { + return match; + } + + /** + * This method contains the logic to check if a field/property must be included + * or not. + */ + public boolean include() { + if (write == null) { + if (parent != null) { + // the parent context matches the end of a filter list: + // by default we include all the sub properties so we + // don't need to check if the sub properties also match + if (parent.isMatch()) { + write = true; + match = true; + return write; + } + + if (parent.matchings() != null) { + + // Iterates over the filters matched by the parent context + // and checks if the current context also match + for (String[] matcher : parent.matchings()) { + if (matcher.length > 0) { + String field = matcher[0]; + + if ("**".equals(field)) { + addMatching(matcher); + } + + if ((field != null) && (Regex.simpleMatch(field, property))) { + int remaining = matcher.length - 1; + + // the current context matches the end of a filter list: + // it must be written and it is flagged as a direct match + if (remaining == 0) { + write = true; + match = true; + return write; + } else { + String[] submatching = new String[remaining]; + System.arraycopy(matcher, 1, submatching, 0, remaining); + addMatching(submatching); + } + } + } + } + } + } else { + // Root object is always written + write = true; + } + + if (write == null) { + write = false; + } + } + return write; + } + + /** + * Ensure that the full path to the current field is write by the JsonGenerator + * + * @param generator + * @throws IOException + */ + public void writePath(JsonGenerator generator) throws IOException { + if (parent != null) { + parent.writePath(generator); + } + + if ((write == null) || (!write)) { + write = true; + + if (property == null) { + generator.writeStartObject(); + } else { + generator.writeFieldName(property); + if (isArray()) { + generator.writeStartArray(); + } else if (isObject() || isArrayOfObject()) { + generator.writeStartObject(); + } + } + } + } +} diff --git a/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGenerator.java new file mode 100644 index 00000000000..2748b4b5097 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGenerator.java @@ -0,0 +1,423 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import com.fasterxml.jackson.core.Base64Variant; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.SerializableString; +import com.google.common.collect.ImmutableList; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.ArrayDeque; +import java.util.List; +import java.util.Queue; + +/** + * A FilteringJsonGenerator uses antpath-like filters to include/exclude fields when writing XContent streams. + * + * When writing a XContent stream, this class instantiates (or reuses) a FilterContext instance for each + * field (or property) that must be generated. This filter context is used to check if the field/property must be + * written according to the current list of XContentFilter filters. + */ +public class FilteringJsonGenerator extends BaseJsonGenerator { + + /** + * List of previous contexts + * (MAX_CONTEXTS contexts are kept around in order to be reused) + */ + private Queue contexts = new ArrayDeque<>(); + private static final int MAX_CONTEXTS = 10; + + /** + * Current filter context + */ + private FilterContext context; + + public FilteringJsonGenerator(JsonGenerator generator, String[] filters) { + super(generator); + + ImmutableList.Builder builder = ImmutableList.builder(); + if (filters != null) { + for (String filter : filters) { + String[] matcher = Strings.delimitedListToStringArray(filter, "."); + if (matcher != null) { + builder.add(matcher); + } + } + } + + // Creates a root context that matches all filtering rules + this.context = get(null, null, builder.build()); + } + + /** + * Get a new context instance (and reset it if needed) + */ + private FilterContext get(String property, FilterContext parent) { + FilterContext ctx = contexts.poll(); + if (ctx == null) { + ctx = new FilterContext(property, parent); + } else { + ctx.reset(property, parent); + } + return ctx; + } + + /** + * Get a new context instance (and reset it if needed) + */ + private FilterContext get(String property, FilterContext context, List matchings) { + FilterContext ctx = get(property, context); + if (matchings != null) { + for (String[] matching : matchings) { + ctx.addMatching(matching); + } + } + return ctx; + } + + /** + * Adds a context instance to the pool in order to reuse it if needed + */ + private void put(FilterContext ctx) { + if (contexts.size() <= MAX_CONTEXTS) { + contexts.offer(ctx); + } + } + + @Override + public void writeStartArray() throws IOException { + context.initArray(); + if (context.include()) { + super.writeStartArray(); + } + } + + @Override + public void writeStartArray(int size) throws IOException { + context.initArray(); + if (context.include()) { + super.writeStartArray(size); + } + } + + @Override + public void writeEndArray() throws IOException { + // Case of array of objects + if (context.isArrayOfObject()) { + // Release current context and go one level up + FilterContext parent = context.parent(); + put(context); + context = parent; + } + + if (context.include()) { + super.writeEndArray(); + } + } + + @Override + public void writeStartObject() throws IOException { + // Case of array of objects + if (context.isArray()) { + // Get a context for the anonymous object + context = get(null, context, context.matchings()); + context.initArrayOfObject(); + } + + if (!context.isArrayOfObject()) { + context.initObject(); + } + + if (context.include()) { + super.writeStartObject(); + } + + context = get(null, context); + } + + @Override + public void writeEndObject() throws IOException { + if (!context.isRoot()) { + // Release current context and go one level up + FilterContext parent = context.parent(); + put(context); + context = parent; + } + + if (context.include()) { + super.writeEndObject(); + } + } + + @Override + public void writeFieldName(String name) throws IOException { + context.reset(name); + + if (context.include()) { + // Ensure that the full path to the field is written + context.writePath(delegate); + super.writeFieldName(name); + } + } + + @Override + public void writeFieldName(SerializableString name) throws IOException { + context.reset(name.getValue()); + + if (context.include()) { + // Ensure that the full path to the field is written + context.writePath(delegate); + super.writeFieldName(name); + } + } + + @Override + public void writeString(String text) throws IOException { + if (context.include()) { + super.writeString(text); + } + } + + @Override + public void writeString(char[] text, int offset, int len) throws IOException { + if (context.include()) { + super.writeString(text, offset, len); + } + } + + @Override + public void writeString(SerializableString text) throws IOException { + if (context.include()) { + super.writeString(text); + } + } + + @Override + public void writeRawUTF8String(byte[] text, int offset, int length) throws IOException { + if (context.include()) { + super.writeRawUTF8String(text, offset, length); + } + } + + @Override + public void writeUTF8String(byte[] text, int offset, int length) throws IOException { + if (context.include()) { + super.writeUTF8String(text, offset, length); + } + } + + @Override + public void writeRaw(String text) throws IOException { + if (context.include()) { + super.writeRaw(text); + } + } + + @Override + public void writeRaw(String text, int offset, int len) throws IOException { + if (context.include()) { + super.writeRaw(text, offset, len); + } + } + + @Override + public void writeRaw(SerializableString raw) throws IOException { + if (context.include()) { + super.writeRaw(raw); + } + } + + @Override + public void writeRaw(char[] text, int offset, int len) throws IOException { + if (context.include()) { + super.writeRaw(text, offset, len); + } + } + + @Override + public void writeRaw(char c) throws IOException { + if (context.include()) { + super.writeRaw(c); + } + } + + @Override + public void writeRawValue(String text) throws IOException { + if (context.include()) { + super.writeRawValue(text); + } + } + + @Override + public void writeRawValue(String text, int offset, int len) throws IOException { + if (context.include()) { + super.writeRawValue(text, offset, len); + } + } + + @Override + public void writeRawValue(char[] text, int offset, int len) throws IOException { + if (context.include()) { + super.writeRawValue(text, offset, len); + } + } + + @Override + public void writeBinary(Base64Variant b64variant, byte[] data, int offset, int len) throws IOException { + if (context.include()) { + super.writeBinary(b64variant, data, offset, len); + } + } + + @Override + public int writeBinary(Base64Variant b64variant, InputStream data, int dataLength) throws IOException { + if (context.include()) { + return super.writeBinary(b64variant, data, dataLength); + } + return 0; + } + + @Override + public void writeNumber(short v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(int v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(long v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(BigInteger v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(double v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(float v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(BigDecimal v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(String encodedValue) throws IOException, UnsupportedOperationException { + if (context.include()) { + super.writeNumber(encodedValue); + } + } + + @Override + public void writeBoolean(boolean state) throws IOException { + if (context.include()) { + super.writeBoolean(state); + } + } + + @Override + public void writeNull() throws IOException { + if (context.include()) { + super.writeNull(); + } + } + + @Override + public void copyCurrentEvent(JsonParser jp) throws IOException { + if (context.include()) { + super.copyCurrentEvent(jp); + } + } + + @Override + public void copyCurrentStructure(JsonParser jp) throws IOException { + if (context.include()) { + super.copyCurrentStructure(jp); + } + } + + @Override + protected void writeRawValue(byte[] content, OutputStream bos) throws IOException { + if (context.include()) { + super.writeRawValue(content, bos); + } + } + + @Override + protected void writeRawValue(byte[] content, int offset, int length, OutputStream bos) throws IOException { + if (context.include()) { + super.writeRawValue(content, offset, length, bos); + } + } + + @Override + protected void writeRawValue(InputStream content, OutputStream bos) throws IOException { + if (context.include()) { + super.writeRawValue(content, bos); + } + } + + @Override + protected void writeRawValue(BytesReference content, OutputStream bos) throws IOException { + if (context.include()) { + super.writeRawValue(content, bos); + } + } + + @Override + public void close() throws IOException { + contexts.clear(); + super.close(); + } +} diff --git a/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index e7111849f7e..388cd992e2b 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -20,11 +20,15 @@ package org.elasticsearch.common.xcontent.yaml; import com.fasterxml.jackson.core.JsonEncoding; +import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; +import org.elasticsearch.common.xcontent.support.filtering.FilteringJsonGenerator; import java.io.*; @@ -58,14 +62,27 @@ public class YamlXContent implements XContent { throw new ElasticsearchParseException("yaml does not support stream parsing..."); } + private XContentGenerator newXContentGenerator(JsonGenerator jsonGenerator) { + return new YamlXContentGenerator(new BaseJsonGenerator(jsonGenerator)); + } + @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { - return new YamlXContentGenerator(yamlFactory.createGenerator(os, JsonEncoding.UTF8)); + return newXContentGenerator(yamlFactory.createGenerator(os, JsonEncoding.UTF8)); + } + + @Override + public XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException { + if (CollectionUtils.isEmpty(filters)) { + return createGenerator(os); + } + FilteringJsonGenerator yamlGenerator = new FilteringJsonGenerator(yamlFactory.createGenerator(os, JsonEncoding.UTF8), filters); + return new YamlXContentGenerator(yamlGenerator); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { - return new YamlXContentGenerator(yamlFactory.createGenerator(writer)); + return newXContentGenerator(yamlFactory.createGenerator(writer)); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentGenerator.java index 350554ac6f1..62967247a82 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentGenerator.java +++ b/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentGenerator.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.xcontent.yaml; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.yaml.YAMLParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; import org.elasticsearch.common.xcontent.json.JsonXContentGenerator; import java.io.IOException; @@ -34,7 +34,7 @@ import java.io.OutputStream; */ public class YamlXContentGenerator extends JsonXContentGenerator { - public YamlXContentGenerator(JsonGenerator generator) { + public YamlXContentGenerator(BaseJsonGenerator generator) { super(generator); } diff --git a/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index 693188307e3..02820158a6e 100644 --- a/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -115,7 +115,7 @@ public class BytesRestResponse extends RestResponse { } private static XContentBuilder convert(RestChannel channel, RestStatus status, Throwable t) throws IOException { - XContentBuilder builder = channel.newBuilder().startObject(); + XContentBuilder builder = channel.newErrorBuilder().startObject(); if (t == null) { builder.field("error", "unknown"); } else if (channel.detailedErrorsEnabled()) { diff --git a/src/main/java/org/elasticsearch/rest/RestChannel.java b/src/main/java/org/elasticsearch/rest/RestChannel.java index 773d939493f..cd185b07958 100644 --- a/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -44,10 +44,15 @@ public abstract class RestChannel { } public XContentBuilder newBuilder() throws IOException { - return newBuilder(request.hasContent() ? request.content() : null); + return newBuilder(request.hasContent() ? request.content() : null, request.hasParam("filter_path")); } - public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource) throws IOException { + public XContentBuilder newErrorBuilder() throws IOException { + // Disable filtering when building error responses + return newBuilder(request.hasContent() ? request.content() : null, false); + } + + public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource, boolean useFiltering) throws IOException { XContentType contentType = XContentType.fromRestContentType(request.param("format", request.header("Content-Type"))); if (contentType == null) { // try and guess it from the auto detect source @@ -59,7 +64,9 @@ public abstract class RestChannel { // default to JSON contentType = XContentType.JSON; } - XContentBuilder builder = new XContentBuilder(XContentFactory.xContent(contentType), bytesOutput()); + + String[] filters = useFiltering ? request.paramAsStringArrayOrEmptyIfAll("filter_path") : null; + XContentBuilder builder = new XContentBuilder(XContentFactory.xContent(contentType), bytesOutput(), filters); if (request.paramAsBoolean("pretty", false)) { builder.prettyPrint().lfAtEnd(); } diff --git a/src/main/java/org/elasticsearch/rest/RestController.java b/src/main/java/org/elasticsearch/rest/RestController.java index f2e36d2b22c..3e3360337de 100644 --- a/src/main/java/org/elasticsearch/rest/RestController.java +++ b/src/main/java/org/elasticsearch/rest/RestController.java @@ -187,7 +187,7 @@ public class RestController extends AbstractLifecycleComponent { // error_trace cannot be used when we disable detailed errors if (channel.detailedErrorsEnabled() == false && request.paramAsBoolean("error_trace", false)) { try { - XContentBuilder builder = channel.newBuilder(); + XContentBuilder builder = channel.newErrorBuilder(); builder.startObject().field("error","error traces in responses are disabled.").endObject().string(); RestResponse response = new BytesRestResponse(BAD_REQUEST, builder); response.addHeader("Content-Type", "application/json"); diff --git a/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java b/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java index db3954ec5e8..1fe07156d99 100644 --- a/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java +++ b/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java @@ -73,7 +73,7 @@ public class RestGetSourceAction extends BaseRestHandler { client.get(getRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(GetResponse response) throws Exception { - XContentBuilder builder = channel.newBuilder(response.getSourceInternal()); + XContentBuilder builder = channel.newBuilder(response.getSourceInternal(), false); if (!response.isExists()) { return new BytesRestResponse(NOT_FOUND, builder); } else { diff --git a/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index a0d5b279e71..3d3ecdfa880 100644 --- a/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -88,7 +88,7 @@ public class RestIndexAction extends BaseRestHandler { indexRequest.opType(IndexRequest.OpType.fromString(sOpType)); } catch (IllegalArgumentException eia){ try { - XContentBuilder builder = channel.newBuilder(); + XContentBuilder builder = channel.newErrorBuilder(); channel.sendResponse(new BytesRestResponse(BAD_REQUEST, builder.startObject().field("error", eia.getMessage()).endObject())); } catch (IOException e1) { logger.warn("Failed to send response", e1); diff --git a/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java b/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java index 35e3f2cc473..33145be6a47 100644 --- a/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java +++ b/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java @@ -85,7 +85,7 @@ public class RestPutIndexedScriptAction extends BaseRestHandler { putRequest.opType(IndexRequest.OpType.fromString(sOpType)); } catch (IllegalArgumentException eia){ try { - XContentBuilder builder = channel.newBuilder(); + XContentBuilder builder = channel.newErrorBuilder(); channel.sendResponse(new BytesRestResponse(BAD_REQUEST, builder.startObject().field("error", eia.getMessage()).endObject())); return; } catch (IOException e1) { diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTests.java new file mode 100644 index 00000000000..d07bf442887 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTests.java @@ -0,0 +1,524 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public abstract class AbstractFilteringJsonGeneratorTests extends ElasticsearchTestCase { + + protected abstract XContentType getXContentType(); + + protected abstract void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder); + + protected void assertString(XContentBuilder expected, XContentBuilder builder) { + assertNotNull(builder); + assertNotNull(expected); + + // Verify that the result is equal to the expected string + assertThat(builder.bytes().toUtf8(), is(expected.bytes().toUtf8())); + } + + protected void assertBinary(XContentBuilder expected, XContentBuilder builder) { + assertNotNull(builder); + assertNotNull(expected); + + try { + XContent xContent = XContentFactory.xContent(builder.contentType()); + XContentParser jsonParser = xContent.createParser(expected.bytes()); + XContentParser testParser = xContent.createParser(builder.bytes()); + + while (true) { + XContentParser.Token token1 = jsonParser.nextToken(); + XContentParser.Token token2 = testParser.nextToken(); + if (token1 == null) { + assertThat(token2, nullValue()); + return; + } + assertThat(token1, equalTo(token2)); + switch (token1) { + case FIELD_NAME: + assertThat(jsonParser.currentName(), equalTo(testParser.currentName())); + break; + case VALUE_STRING: + assertThat(jsonParser.text(), equalTo(testParser.text())); + break; + case VALUE_NUMBER: + assertThat(jsonParser.numberType(), equalTo(testParser.numberType())); + assertThat(jsonParser.numberValue(), equalTo(testParser.numberValue())); + break; + } + } + } catch (Exception e) { + fail("Fail to verify the result of the XContentBuilder: " + e.getMessage()); + } + } + + private XContentBuilder newXContentBuilder(String... filters) throws IOException { + return XContentBuilder.builder(getXContentType().xContent(), filters); + } + + /** + * Build a sample using a given XContentBuilder + */ + private XContentBuilder sample(XContentBuilder builder) throws IOException { + assertNotNull(builder); + builder.startObject() + .field("title", "My awesome book") + .field("pages", 456) + .field("price", 27.99) + .field("timestamp", 1428582942867L) + .nullField("default") + .startArray("tags") + .value("elasticsearch") + .value("java") + .endArray() + .startArray("authors") + .startObject() + .field("name", "John Doe") + .field("lastname", "John") + .field("firstname", "Doe") + .endObject() + .startObject() + .field("name", "William Smith") + .field("lastname", "William") + .field("firstname", "Smith") + .endObject() + .endArray() + .startObject("properties") + .field("weight", 0.8d) + .startObject("language") + .startObject("en") + .field("lang", "English") + .field("available", true) + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .field("street", "Hampton St") + .field("city", "London") + .endObject() + .startObject() + .field("name", "address #2") + .field("street", "Queen St") + .field("city", "Stornoway") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .field("lang", "French") + .field("available", false) + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .field("street", "Rue Mouffetard") + .field("city", "Paris") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + return builder; + } + + /** + * Instanciates a new XContentBuilder with the given filters and builds a sample with it. + */ + private XContentBuilder sample(String... filters) throws IOException { + return sample(newXContentBuilder(filters)); + } + + @Test + public void testNoFiltering() throws Exception { + XContentBuilder expected = sample(); + + assertXContentBuilder(expected, sample()); + assertXContentBuilder(expected, sample("*")); + assertXContentBuilder(expected, sample("**")); + } + + @Test + public void testNoMatch() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject().endObject(); + + assertXContentBuilder(expected, sample("xyz")); + } + + @Test + public void testSimpleField() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .field("title", "My awesome book") + .endObject(); + + assertXContentBuilder(expected, sample("title")); + } + + @Test + public void testSimpleFieldWithWildcard() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .field("price", 27.99) + .startObject("properties") + .field("weight", 0.8d) + .startObject("language") + .startObject("en") + .field("lang", "English") + .field("available", true) + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .field("street", "Hampton St") + .field("city", "London") + .endObject() + .startObject() + .field("name", "address #2") + .field("street", "Queen St") + .field("city", "Stornoway") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .field("lang", "French") + .field("available", false) + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .field("street", "Rue Mouffetard") + .field("city", "Paris") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("pr*")); + } + + @Test + public void testMultipleFields() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .field("title", "My awesome book") + .field("pages", 456) + .endObject(); + + assertXContentBuilder(expected, sample("title", "pages")); + } + + @Test + public void testSimpleArray() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startArray("tags") + .value("elasticsearch") + .value("java") + .endArray() + .endObject(); + + assertXContentBuilder(expected, sample("tags")); + } + + @Test + public void testSimpleArrayOfObjects() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startArray("authors") + .startObject() + .field("name", "John Doe") + .field("lastname", "John") + .field("firstname", "Doe") + .endObject() + .startObject() + .field("name", "William Smith") + .field("lastname", "William") + .field("firstname", "Smith") + .endObject() + .endArray() + .endObject(); + + assertXContentBuilder(expected, sample("authors")); + assertXContentBuilder(expected, sample("authors.*")); + assertXContentBuilder(expected, sample("authors.*name")); + } + + @Test + public void testSimpleArrayOfObjectsProperty() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startArray("authors") + .startObject() + .field("lastname", "John") + .endObject() + .startObject() + .field("lastname", "William") + .endObject() + .endArray() + .endObject(); + + assertXContentBuilder(expected, sample("authors.lastname")); + assertXContentBuilder(expected, sample("authors.l*")); + } + + @Test + public void testRecurseField1() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startArray("authors") + .startObject() + .field("name", "John Doe") + .endObject() + .startObject() + .field("name", "William Smith") + . endObject() + .endArray() + .startObject("properties") + .startObject("language") + .startObject("en") + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .startObject() + .field("name", "address #2") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("**.name")); + } + + @Test + public void testRecurseField2() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startObject("properties") + .startObject("language") + .startObject("en") + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .startObject() + .field("name", "address #2") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("properties.**.name")); + } + + @Test + public void testRecurseField3() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startObject("properties") + .startObject("language") + .startObject("en") + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .startObject() + .field("name", "address #2") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("properties.*.en.**.name")); + } + + @Test + public void testRecurseField4() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startObject("properties") + .startObject("language") + .startObject("en") + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("properties.**.distributors.name")); + } + + @Test + public void testRawField() throws Exception { + + XContentBuilder expectedRawField = newXContentBuilder().startObject().field("foo", 0).startObject("raw").field("content", "hello world!").endObject().endObject(); + XContentBuilder expectedRawFieldFiltered = newXContentBuilder().startObject().field("foo", 0).endObject(); + XContentBuilder expectedRawFieldNotFiltered =newXContentBuilder().startObject().startObject("raw").field("content", "hello world!").endObject().endObject(); + + BytesReference raw = newXContentBuilder().startObject().field("content", "hello world!").endObject().bytes(); + + // Test method: rawField(String fieldName, BytesReference content) + assertXContentBuilder(expectedRawField, newXContentBuilder().startObject().field("foo", 0).rawField("raw", raw).endObject()); + assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("f*").startObject().field("foo", 0).rawField("raw", raw).endObject()); + assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("r*").startObject().field("foo", 0).rawField("raw", raw).endObject()); + + // Test method: rawField(String fieldName, byte[] content) + assertXContentBuilder(expectedRawField, newXContentBuilder().startObject().field("foo", 0).rawField("raw", raw.toBytes()).endObject()); + assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("f*").startObject().field("foo", 0).rawField("raw", raw.toBytes()).endObject()); + assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("r*").startObject().field("foo", 0).rawField("raw", raw.toBytes()).endObject()); + + // Test method: rawField(String fieldName, InputStream content) + assertXContentBuilder(expectedRawField, newXContentBuilder().startObject().field("foo", 0).rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("f*").startObject().field("foo", 0).rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("r*").startObject().field("foo", 0).rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + } + + @Test + public void testArrays() throws Exception { + // Test: Array of values (no filtering) + XContentBuilder expected = newXContentBuilder().startObject().startArray("tags").value("lorem").value("ipsum").value("dolor").endArray().endObject(); + assertXContentBuilder(expected, newXContentBuilder("t*").startObject().startArray("tags").value("lorem").value("ipsum").value("dolor").endArray().endObject()); + assertXContentBuilder(expected, newXContentBuilder("tags").startObject().startArray("tags").value("lorem").value("ipsum").value("dolor").endArray().endObject()); + + // Test: Array of values (with filtering) + assertXContentBuilder(newXContentBuilder().startObject().endObject(), newXContentBuilder("foo").startObject().startArray("tags").value("lorem").value("ipsum").value("dolor").endArray().endObject()); + + // Test: Array of objects (no filtering) + expected = newXContentBuilder().startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject(); + assertXContentBuilder(expected, newXContentBuilder("t*").startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject()); + assertXContentBuilder(expected, newXContentBuilder("tags").startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject()); + + // Test: Array of objects (with filtering) + assertXContentBuilder(newXContentBuilder().startObject().endObject(), newXContentBuilder("foo").startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject()); + + // Test: Array of objects (with partial filtering) + expected = newXContentBuilder().startObject().startArray("tags").startObject().field("firstname", "ipsum").endObject().endArray().endObject(); + assertXContentBuilder(expected, newXContentBuilder("t*.firstname").startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject()); + + } +} diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborFilteringGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborFilteringGeneratorTests.java new file mode 100644 index 00000000000..fab77a26be7 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborFilteringGeneratorTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +public class CborFilteringGeneratorTests extends JsonFilteringGeneratorTests { + + @Override + protected XContentType getXContentType() { + return XContentType.CBOR; + } + + @Override + protected void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder) { + assertBinary(expected, builder); + } +} diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGeneratorBenchmark.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGeneratorBenchmark.java new file mode 100644 index 00000000000..97ce4fcb838 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGeneratorBenchmark.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Locale; + +/** + * Benchmark class to compare filtered and unfiltered XContent generators. + */ +public class FilteringJsonGeneratorBenchmark { + + public static void main(String[] args) throws IOException { + final XContent XCONTENT = JsonXContent.jsonXContent; + + System.out.println("Executing " + FilteringJsonGeneratorBenchmark.class + "..."); + + System.out.println("Warming up..."); + run(XCONTENT, 500_000, 100, 0.5); + System.out.println("Warmed up."); + + System.out.println("nb documents | nb fields | nb fields written | % fields written | time (millis) | rate (docs/sec) | avg size"); + + for (int nbFields : Arrays.asList(10, 25, 50, 100, 250)) { + for (int nbDocs : Arrays.asList(100, 1000, 10_000, 100_000, 500_000)) { + for (double ratio : Arrays.asList(0.0, 1.0, 0.99, 0.95, 0.9, 0.75, 0.5, 0.25, 0.1, 0.05, 0.01)) { + run(XCONTENT, nbDocs, nbFields, ratio); + } + } + } + System.out.println("Done."); + } + + private static void run(XContent xContent, long nbIterations, int nbFields, double ratio) throws IOException { + String[] fields = fields(nbFields); + String[] filters = fields((int) (nbFields * ratio)); + + long size = 0; + BytesStreamOutput os = new BytesStreamOutput(); + + long start = System.nanoTime(); + for (int i = 0; i < nbIterations; i++) { + XContentBuilder builder = new XContentBuilder(xContent, os, filters); + builder.startObject(); + + for (String field : fields) { + builder.field(field, System.nanoTime()); + } + builder.endObject(); + + size += builder.bytes().length(); + os.reset(); + } + double milliseconds = (System.nanoTime() - start) / 1_000_000d; + + System.out.printf(Locale.ROOT, "%12d | %9d | %17d | %14.2f %% | %10.3f ms | %15.2f | %8.0f %n", + nbIterations, nbFields, + (int) (nbFields * ratio), + (ratio * 100d), + milliseconds, + ((double) nbIterations) / (milliseconds / 1000d), + size / ((double) nbIterations)); + } + + /** + * Returns a String array of field names starting from "field_0" with a length of n. + * If n=3, the array is ["field_0","field_1","field_2"] + */ + private static String[] fields(int n) { + String[] fields = new String[n]; + for (int i = 0; i < n; i++) { + fields[i] = "field_" + i; + } + return fields; + } +} diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonFilteringGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonFilteringGeneratorTests.java new file mode 100644 index 00000000000..9468746fac6 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonFilteringGeneratorTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +public class JsonFilteringGeneratorTests extends AbstractFilteringJsonGeneratorTests { + + @Override + protected XContentType getXContentType() { + return XContentType.JSON; + } + + @Override + protected void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder) { + assertString(expected, builder); + } +} diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java new file mode 100644 index 00000000000..a12e12be172 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +public class SmileFilteringGeneratorTests extends JsonFilteringGeneratorTests { + + @Override + protected XContentType getXContentType() { + return XContentType.SMILE; + } + + @Override + protected void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder) { + assertBinary(expected, builder); + } +} diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java new file mode 100644 index 00000000000..d7e3a934ec4 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +public class YamlFilteringGeneratorTests extends AbstractFilteringJsonGeneratorTests { + + @Override + protected XContentType getXContentType() { + return XContentType.YAML; + } + + @Override + protected void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder) { + assertString(expected, builder); + } +} diff --git a/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java b/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java index dc537468c28..087bc306321 100644 --- a/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java +++ b/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java @@ -168,8 +168,13 @@ public class RestFilterChainTests extends ElasticsearchTestCase { } @Override - public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource) throws IOException { - return super.newBuilder(autoDetectSource); + public XContentBuilder newErrorBuilder() throws IOException { + return super.newErrorBuilder(); + } + + @Override + public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource, boolean useFiltering) throws IOException { + return super.newBuilder(autoDetectSource, useFiltering); } @Override From b465e19e5f6416239f3c9e875257109c8fb96a6c Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Tue, 26 May 2015 14:42:59 +0200 Subject: [PATCH 013/123] Release script: Always check for valid environment In order to be sure that a release can be executed on the local machine, the build_release script now checks for environment variables and tries to execute a couple of commands. In order to easily check for a correctly setup environment, you can run the following commands, which exits early and does not trigger a release process. ``` python3 dev-tools/build_release.py --check-only ``` --- dev-tools/build_release.py | 94 +++++++++++++++++++++++++++++--------- 1 file changed, 72 insertions(+), 22 deletions(-) diff --git a/dev-tools/build_release.py b/dev-tools/build_release.py index ca3223c0a92..8ae300d98fd 100644 --- a/dev-tools/build_release.py +++ b/dev-tools/build_release.py @@ -30,6 +30,7 @@ import socket import urllib.request import subprocess +from functools import partial from http.client import HTTPConnection from http.client import HTTPSConnection @@ -72,6 +73,11 @@ PLUGINS = [('license', 'elasticsearch/license/latest'), LOG = env.get('ES_RELEASE_LOG', '/tmp/elasticsearch_release.log') +# console colors +COLOR_OK = '\033[92m' +COLOR_END = '\033[0m' +COLOR_FAIL = '\033[91m' + def log(msg): log_plain('\n%s' % msg) @@ -137,9 +143,6 @@ def get_tag_hash(tag): def get_current_branch(): return os.popen('git rev-parse --abbrev-ref HEAD 2>&1').read().strip() -verify_java_version('1.7') # we require to build with 1.7 -verify_mvn_java_version('1.7', MVN) - # Utility that returns the name of the release branch for a given version def release_branch(version): return 'release_branch_%s' % version @@ -545,14 +548,6 @@ def print_sonatype_notice(): """) -def check_s3_credentials(): - if not env.get('AWS_ACCESS_KEY_ID', None) or not env.get('AWS_SECRET_ACCESS_KEY', None): - raise RuntimeError('Could not find "AWS_ACCESS_KEY_ID" / "AWS_SECRET_ACCESS_KEY" in the env variables please export in order to upload to S3') - -def check_gpg_credentials(): - if not env.get('GPG_KEY_ID', None) or not env.get('GPG_PASSPHRASE', None): - raise RuntimeError('Could not find "GPG_KEY_ID" / "GPG_PASSPHRASE" in the env variables please export in order to sign the packages (also make sure that GPG_KEYRING is set when not in ~/.gnupg)') - def check_command_exists(name, cmd): try: subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) @@ -562,9 +557,6 @@ def check_command_exists(name, cmd): VERSION_FILE = 'src/main/java/org/elasticsearch/Version.java' POM_FILE = 'pom.xml' -# we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml -print_sonatype_notice() - # finds the highest available bwc version to test against def find_bwc_version(release_version, bwc_dir='backwards'): log(' Lookup bwc version in directory [%s]' % bwc_dir) @@ -618,6 +610,60 @@ def check_norelease(path='src'): if pattern.search(line): raise RuntimeError('Found //norelease comment in %s line %s' % (full_path, line_number)) +def run_and_print(text, run_function): + try: + print(text, end='') + run_function() + print(COLOR_OK + 'OK' + COLOR_END) + return True + except RuntimeError: + print(COLOR_FAIL + 'NOT OK' + COLOR_END) + return False + +def check_env_var(text, env_var): + try: + print(text, end='') + env[env_var] + print(COLOR_OK + 'OK' + COLOR_END) + return True + except KeyError: + print(COLOR_FAIL + 'NOT OK' + COLOR_END) + return False + +def check_environment_and_commandline_tools(check_only): + checks = list() + checks.append(check_env_var('Checking for AWS env configuration AWS_SECRET_ACCESS_KEY_ID... ', 'AWS_SECRET_ACCESS_KEY')) + checks.append(check_env_var('Checking for AWS env configuration AWS_ACCESS_KEY_ID... ', 'AWS_ACCESS_KEY_ID')) + checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_USERNAME... ', 'SONATYPE_USERNAME')) + checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_PASSWORD... ', 'SONATYPE_PASSWORD')) + checks.append(check_env_var('Checking for GPG env configuration GPG_KEY_ID... ', 'GPG_KEY_ID')) + checks.append(check_env_var('Checking for GPG env configuration GPG_PASSPHRASE... ', 'GPG_PASSPHRASE')) + checks.append(check_env_var('Checking for S3 repo upload env configuration S3_BUCKET_SYNC_TO... ', 'S3_BUCKET_SYNC_TO')) + checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_NAME... ', 'GIT_AUTHOR_NAME')) + checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_EMAIL... ', 'GIT_AUTHOR_EMAIL')) + + checks.append(run_and_print('Checking command: rpm... ', partial(check_command_exists, 'rpm', 'rpm --version'))) + checks.append(run_and_print('Checking command: dpkg... ', partial(check_command_exists, 'dpkg', 'dpkg --version'))) + checks.append(run_and_print('Checking command: gpg... ', partial(check_command_exists, 'gpg', 'gpg --version'))) + checks.append(run_and_print('Checking command: expect... ', partial(check_command_exists, 'expect', 'expect -v'))) + checks.append(run_and_print('Checking command: createrepo... ', partial(check_command_exists, 'createrepo', 'createrepo --version'))) + checks.append(run_and_print('Checking command: s3cmd... ', partial(check_command_exists, 's3cmd', 's3cmd --version'))) + checks.append(run_and_print('Checking command: apt-ftparchive... ', partial(check_command_exists, 'apt-ftparchive', 'apt-ftparchive --version'))) + + # boto, check error code being returned + location = os.path.dirname(os.path.realpath(__file__)) + command = 'python %s/upload-s3.py -h' % (location) + checks.append(run_and_print('Testing boto python dependency... ', partial(check_command_exists, 'python-boto', command))) + + checks.append(run_and_print('Checking java version... ', partial(verify_java_version, '1.7'))) + checks.append(run_and_print('Checking java mvn version... ', partial(verify_mvn_java_version, '1.7', MVN))) + + if check_only: + sys.exit(0) + + if False in checks: + print("Exiting due to failing checks") + sys.exit(0) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Release') @@ -636,9 +682,12 @@ if __name__ == '__main__': help='Smoke tests the given release') parser.add_argument('--bwc', '-w', dest='bwc', metavar='backwards', default='backwards', help='Backwards compatibility version path to use to run compatibility tests against') + parser.add_argument('--check-only', dest='check_only', action='store_true', + help='Checks and reports for all requirements and then exits') parser.set_defaults(dryrun=True) parser.set_defaults(smoke=None) + parser.set_defaults(check_only=False) args = parser.parse_args() bwc_path = args.bwc src_branch = args.branch @@ -649,18 +698,19 @@ if __name__ == '__main__': build = not args.smoke smoke_test_version = args.smoke + check_environment_and_commandline_tools(args.check_only) + + # we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml + print_sonatype_notice() + + # we require to build with 1.7 + verify_java_version('1.7') + verify_mvn_java_version('1.7', MVN) + if os.path.exists(LOG): raise RuntimeError('please remove old release log %s first' % LOG) - check_gpg_credentials() - check_command_exists('gpg', 'gpg --version') - check_command_exists('expect', 'expect -v') - if not dry_run: - check_s3_credentials() - check_command_exists('createrepo', 'createrepo --version') - check_command_exists('s3cmd', 's3cmd --version') - check_command_exists('apt-ftparchive', 'apt-ftparchive --version') print('WARNING: dryrun is set to "false" - this will push and publish the release') input('Press Enter to continue...') From 820314db4e7e631880fcbad590b6abd822d20f14 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 26 May 2015 13:29:58 +0200 Subject: [PATCH 014/123] Change project.name to project.artifactId in bin/elasticsearch script because of 60519911b4e50bcc958c924a768dca2ae618101b --- bin/elasticsearch | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/elasticsearch b/bin/elasticsearch index 23591222fb0..bd324a7b5c4 100755 --- a/bin/elasticsearch +++ b/bin/elasticsearch @@ -57,7 +57,7 @@ # Maven will replace the project.name with elasticsearch below. If that # hasn't been done, we assume that this is not a packaged version and the # user has forgotten to run Maven to create a package. -IS_PACKAGED_VERSION='${project.name}' +IS_PACKAGED_VERSION='${project.artifactId}' if [ "$IS_PACKAGED_VERSION" != "elasticsearch" ]; then cat >&2 << EOF Error: You must build the project with Maven or download a pre-built package From 44fe99a3a878ce88a00ba963c53bf4a85a4410dc Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 26 May 2015 15:08:49 +0200 Subject: [PATCH 015/123] [TEST] make filter_path a default parameter in java rest runner Closes #11351 --- rest-api-spec/api/nodes.stats.json | 4 ---- rest-api-spec/api/search.json | 4 ---- .../org/elasticsearch/test/rest/client/RestClient.java | 9 +++++++-- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/rest-api-spec/api/nodes.stats.json b/rest-api-spec/api/nodes.stats.json index 478dc7c8465..0a0870020b3 100644 --- a/rest-api-spec/api/nodes.stats.json +++ b/rest-api-spec/api/nodes.stats.json @@ -56,10 +56,6 @@ "options" : ["node", "indices", "shards"], "default" : "node" }, - "filter_path": { - "type" : "list", - "description" : "A comma-separated list of fields to include in the returned response" - }, "types" : { "type" : "list", "description" : "A comma-separated list of document types for the `indexing` index metric" diff --git a/rest-api-spec/api/search.json b/rest-api-spec/api/search.json index 2d37ce4432f..e3c286c842c 100644 --- a/rest-api-spec/api/search.json +++ b/rest-api-spec/api/search.json @@ -72,10 +72,6 @@ "type" : "boolean", "description" : "Specify whether query terms should be lowercased" }, - "filter_path": { - "type" : "list", - "description" : "A comma-separated list of fields to include in the returned response" - }, "preference": { "type" : "string", "description" : "Specify the node or shard the operation should be performed on (default: random)" diff --git a/src/test/java/org/elasticsearch/test/rest/client/RestClient.java b/src/test/java/org/elasticsearch/test/rest/client/RestClient.java index 495fabc8e7b..b6222948fdb 100644 --- a/src/test/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/src/test/java/org/elasticsearch/test/rest/client/RestClient.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.rest.client; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import com.google.common.collect.Sets; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; @@ -41,6 +42,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; /** @@ -50,6 +52,8 @@ import java.util.concurrent.TimeUnit; public class RestClient implements Closeable { private static final ESLogger logger = Loggers.getLogger(RestClient.class); + //query_string params that don't need to be declared in the spec, thay are supported by default + private static final Set ALWAYS_ACCEPTED_QUERY_STRING_PARAMS = Sets.newHashSet("pretty", "source", "filter_path"); private final RestSpec restSpec; private final CloseableHttpClient httpClient; @@ -172,10 +176,11 @@ public class RestClient implements Closeable { if (restApi.getPathParts().contains(entry.getKey())) { pathParts.put(entry.getKey(), entry.getValue()); } else { - if (!restApi.getParams().contains(entry.getKey())) { + if (restApi.getParams().contains(entry.getKey()) || ALWAYS_ACCEPTED_QUERY_STRING_PARAMS.contains(entry.getKey())) { + httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); + } else { throw new IllegalArgumentException("param [" + entry.getKey() + "] not supported in [" + restApi.getName() + "] api"); } - httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); } } } From 9d1f6f7615f329c90b52c48c0e18819abb190e4e Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Tue, 26 May 2015 09:54:44 -0400 Subject: [PATCH 016/123] a few more ImmutableSettings -> Settings --- src/main/java/org/elasticsearch/node/NodeBuilder.java | 2 +- .../{ImmutableSettingsTests.java => SettingsTests.java} | 2 +- .../org/elasticsearch/common/settings/bar/BarTestClass.java | 2 +- .../org/elasticsearch/common/settings/foo/FooTestClass.java | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) rename src/test/java/org/elasticsearch/common/settings/{ImmutableSettingsTests.java => SettingsTests.java} (99%) diff --git a/src/main/java/org/elasticsearch/node/NodeBuilder.java b/src/main/java/org/elasticsearch/node/NodeBuilder.java index edd89efbee4..9107cf0833e 100644 --- a/src/main/java/org/elasticsearch/node/NodeBuilder.java +++ b/src/main/java/org/elasticsearch/node/NodeBuilder.java @@ -45,7 +45,7 @@ import org.elasticsearch.common.settings.Settings; *

*

  * Node node = NodeBuilder.nodeBuilder()
- *                      .settings(ImmutableSettings.settingsBuilder().put("node.data", false)
+ *                      .settings(Settings.settingsBuilder().put("node.data", false)
  *                      .node();
  * 
*

diff --git a/src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java b/src/test/java/org/elasticsearch/common/settings/SettingsTests.java similarity index 99% rename from src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java rename to src/test/java/org/elasticsearch/common/settings/SettingsTests.java index f8a542a9cc6..c8334ba0691 100644 --- a/src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java +++ b/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.*; /** */ -public class ImmutableSettingsTests extends ElasticsearchTestCase { +public class SettingsTests extends ElasticsearchTestCase { @Test public void testCamelCaseSupport() { diff --git a/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java b/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java index d4d5d14a86d..8c7b0c1f255 100644 --- a/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java +++ b/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java @@ -19,6 +19,6 @@ package org.elasticsearch.common.settings.bar; -//used in ImmutableSettingsTest +//used in SettingsTest public class BarTestClass { } diff --git a/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java b/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java index 36f152778b4..6d8ca4a7986 100644 --- a/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java +++ b/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java @@ -19,6 +19,6 @@ package org.elasticsearch.common.settings.foo; -// used in ImmutableSettingsTest +// used in SettingsTest public class FooTestClass { } From 045f01c085faa32d1ded248d69726197f610bda4 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Tue, 26 May 2015 17:44:52 +0200 Subject: [PATCH 017/123] Infra for deprecation logging Add support for a specific deprecation logging that can be used to turn on in order to notify users of a specific feature, flag, setting, parameter, ... being deprecated. The deprecation logger logs with a "deprecation." prefix logge (or "org.elasticsearch.deprecation." if full name is used), and outputs the logging to a dedicated deprecation log file. Deprecation logging are logged under the DEBUG category. The idea is not to enabled them by default (under WARN or ERROR) when running embedded in another application. By default they are turned off (INFO), in order to turn it on, the "deprecation" category need to be set to DEBUG. This can be set in the logging file or using the cluster update settings API, see the documentation Closes #11033 --- config/logging.yml | 13 +++++ docs/reference/setup/configuration.asciidoc | 17 +++++++ .../common/component/AbstractComponent.java | 5 +- .../common/logging/DeprecationLogger.java | 51 +++++++++++++++++++ .../common/logging/ESLoggerFactory.java | 8 +++ .../logging/log4j/Log4jESLoggerFactory.java | 1 - .../index/AbstractIndexComponent.java | 5 +- .../shard/AbstractIndexShardComponent.java | 5 +- .../common/logging/jdk/JDKESLoggerTests.java | 1 - .../logging/log4j/Log4jESLoggerTests.java | 23 ++++++++- 10 files changed, 121 insertions(+), 8 deletions(-) create mode 100644 src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java diff --git a/config/logging.yml b/config/logging.yml index a76eb175fc4..035106eeec6 100644 --- a/config/logging.yml +++ b/config/logging.yml @@ -4,6 +4,10 @@ rootLogger: ${es.logger.level}, console, file logger: # log action execution errors for easier debugging action: DEBUG + + # deprecation logging, turn to DEBUG to see them + deprecation: INFO, deprecation_log_file + # reduce the logging for aws, too much is logged under the default INFO com.amazonaws: WARN org.apache.http: INFO @@ -24,6 +28,7 @@ logger: additivity: index.search.slowlog: false index.indexing.slowlog: false + deprecation: false appender: console: @@ -51,6 +56,14 @@ appender: #type: pattern #conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" + deprecation_log_file: + type: dailyRollingFile + file: ${path.logs}/${cluster.name}_deprecation.log + datePattern: "'.'yyyy-MM-dd" + layout: + type: pattern + conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" + index_search_slow_log_file: type: dailyRollingFile file: ${path.logs}/${cluster.name}_index_search_slowlog.log diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index eed595e25ba..c6ae144e78e 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -337,3 +337,20 @@ the http://logging.apache.org/log4j/1.2/manual.html[log4j documentation]. Additional Appenders and other logging classes provided by http://logging.apache.org/log4j/extras/[log4j-extras] are also available, out of the box. + +==== Deprecation logging + +In addition to regular logging, Elasticsearch allows you to enable logging +of deprecated actions. For example this allows you to determine early, if +you need to migrate certain functionality in the future. By default, +deprecation logging is disabled. You can enable it in the `config/logging.yml` +file by setting the deprecation log level to `DEBUG`. + +[source,yaml] +-------------------------------------------------- +deprecation: DEBUG, deprecation_log_file +-------------------------------------------------- + +This will create a daily rolling deprecation log file in your log directory. +Check this file regularly, especially when you intend to upgrade to a new +major version. diff --git a/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/src/main/java/org/elasticsearch/common/component/AbstractComponent.java index 353d27747cd..a31bf119402 100644 --- a/src/main/java/org/elasticsearch/common/component/AbstractComponent.java +++ b/src/main/java/org/elasticsearch/common/component/AbstractComponent.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.component; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -29,16 +30,18 @@ import org.elasticsearch.common.settings.Settings; public abstract class AbstractComponent { protected final ESLogger logger; - + protected final DeprecationLogger deprecationLogger; protected final Settings settings; public AbstractComponent(Settings settings) { this.logger = Loggers.getLogger(getClass(), settings); + this.deprecationLogger = new DeprecationLogger(logger); this.settings = settings; } public AbstractComponent(Settings settings, Class customClass) { this.logger = Loggers.getLogger(customClass, settings); + this.deprecationLogger = new DeprecationLogger(logger); this.settings = settings; } diff --git a/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java new file mode 100644 index 00000000000..870b5f61466 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +/** + * A logger that logs deprecation notices. + */ +public class DeprecationLogger { + + private final ESLogger logger; + + /** + * Creates a new deprecation logger based on the parent logger. Automatically + * prefixes the logger name with "deprecation", if it starts with "org.elasticsearch.", + * it replaces "org.elasticsearch" with "org.elasticsearch.deprecation" to maintain + * the "org.elasticsearch" namespace. + */ + public DeprecationLogger(ESLogger parentLogger) { + String name = parentLogger.getName(); + if (name.startsWith("org.elasticsearch")) { + name = name.replace("org.elasticsearch.", "org.elasticsearch.deprecation."); + } else { + name = "deprecation." + name; + } + this.logger = ESLoggerFactory.getLogger(parentLogger.getPrefix(), name); + } + + /** + * Logs a deprecated message. + */ + public void deprecated(String msg, Object... params) { + logger.debug(msg, params); + } +} diff --git a/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 67434293b6e..a0645c6a4d3 100644 --- a/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -66,6 +66,14 @@ public abstract class ESLoggerFactory { return defaultFactory.newInstance(name.intern()); } + public static DeprecationLogger getDeprecationLogger(String name) { + return new DeprecationLogger(getLogger(name)); + } + + public static DeprecationLogger getDeprecationLogger(String prefix, String name) { + return new DeprecationLogger(getLogger(prefix, name)); + } + public static ESLogger getRootLogger() { return defaultFactory.rootLogger(); } diff --git a/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java b/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java index 1bfb2d83d6a..b95e0987c90 100644 --- a/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java +++ b/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.logging.log4j; -import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; diff --git a/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java b/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java index 6b2cd15a18d..3cfecc4cf07 100644 --- a/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java +++ b/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java @@ -19,6 +19,7 @@ package org.elasticsearch.index; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -30,9 +31,8 @@ import org.elasticsearch.index.settings.IndexSettings; public abstract class AbstractIndexComponent implements IndexComponent { protected final ESLogger logger; - + protected final DeprecationLogger deprecationLogger; protected final Index index; - protected final Settings indexSettings; /** @@ -45,6 +45,7 @@ public abstract class AbstractIndexComponent implements IndexComponent { this.index = index; this.indexSettings = indexSettings; this.logger = Loggers.getLogger(getClass(), indexSettings, index); + this.deprecationLogger = new DeprecationLogger(logger); } @Override diff --git a/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java b/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java index 9f9bdd5bde2..bb19dd49080 100644 --- a/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java +++ b/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.shard; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -30,15 +31,15 @@ import org.elasticsearch.index.settings.IndexSettings; public abstract class AbstractIndexShardComponent implements IndexShardComponent { protected final ESLogger logger; - + protected final DeprecationLogger deprecationLogger; protected final ShardId shardId; - protected final Settings indexSettings; protected AbstractIndexShardComponent(ShardId shardId, @IndexSettings Settings indexSettings) { this.shardId = shardId; this.indexSettings = indexSettings; this.logger = Loggers.getLogger(getClass(), indexSettings, shardId); + this.deprecationLogger = new DeprecationLogger(logger); } @Override diff --git a/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java b/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java index 2588912467b..d236ad5ecf8 100644 --- a/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java +++ b/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java @@ -91,7 +91,6 @@ public class JDKESLoggerTests extends ElasticsearchTestCase { assertThat(record.getMessage(), equalTo("This is a trace")); assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName())); assertThat(record.getSourceMethodName(), equalTo("locationInfoTest")); - } private static class TestHandler extends Handler { diff --git a/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java b/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java index d854b2bd33d..f0d0c076eba 100644 --- a/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java +++ b/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java @@ -24,6 +24,7 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.spi.LocationInfo; import org.apache.log4j.spi.LoggingEvent; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ElasticsearchTestCase; @@ -42,6 +43,8 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase { private ESLogger esTestLogger; private TestAppender testAppender; private String testLevel; + private DeprecationLogger deprecationLogger; + private TestAppender deprecationAppender; @Override public void setUp() throws Exception { @@ -61,6 +64,13 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase { assertThat(testLogger.getLevel(), equalTo(Level.TRACE)); testAppender = new TestAppender(); testLogger.addAppender(testAppender); + + // deprecation setup, needs to be set to debug to log + deprecationLogger = Log4jESLoggerFactory.getDeprecationLogger("test"); + deprecationAppender = new TestAppender(); + ESLogger logger = Log4jESLoggerFactory.getLogger("deprecation.test"); + logger.setLevel("DEBUG"); + (((Log4jESLogger) logger).logger()).addAppender(deprecationAppender); } @Override @@ -70,6 +80,8 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase { esTestLogger.setLevel(testLevel); Logger testLogger = ((Log4jESLogger) esTestLogger).logger(); testLogger.removeAppender(testAppender); + Logger deprecationLogger = ((Log4jESLogger) Log4jESLoggerFactory.getLogger("deprecation.test")).logger(); + deprecationLogger.removeAppender(deprecationAppender); } @Test @@ -122,7 +134,16 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase { assertThat(locationInfo, notNullValue()); assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName())); assertThat(locationInfo.getMethodName(), equalTo("locationInfoTest")); - + } + + @Test + public void testDeprecationLogger() { + deprecationLogger.deprecated("This is a deprecation message"); + List deprecationEvents = deprecationAppender.getEvents(); + LoggingEvent event = deprecationEvents.get(0); + assertThat(event, notNullValue()); + assertThat(event.getLevel(), equalTo(Level.DEBUG)); + assertThat(event.getRenderedMessage(), equalTo("This is a deprecation message")); } private static class TestAppender extends AppenderSkeleton { From 1fa21a76cf6d11635133200c02b173376696aee5 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Tue, 26 May 2015 18:16:12 +0200 Subject: [PATCH 018/123] Documentation: Fix elasticsearch documentation build The commit for closing #11033 was not building the asciidoc documentation. --- docs/reference/setup/configuration.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index c6ae144e78e..b822b223ce5 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -338,6 +338,8 @@ Additional Appenders and other logging classes provided by http://logging.apache.org/log4j/extras/[log4j-extras] are also available, out of the box. +[float] +[[deprecation-logging]] ==== Deprecation logging In addition to regular logging, Elasticsearch allows you to enable logging From 6c81a8daf390ac97dbeefddbaea4493ee4061fe9 Mon Sep 17 00:00:00 2001 From: javanna Date: Sat, 16 May 2015 12:58:27 +0200 Subject: [PATCH 019/123] Internal: count api to become a shortcut to the search api The count api used to have its own execution path, although it would do the same (up to bugs!) of the search api. This commit makes it a shortcut to the search api with size set to 0. The change is made in a backwards compatible manner, by leaving all of the java api code around too, given that you may not want to get back a whole SearchResponse when asking only for number of hits matching a query, also cause migrating from countResponse.getCount() to searchResponse.getHits().totalHits() doesn't look great from a user perspective. We can always decide to drop more code around the count api if we want to break backwards compatibility on the java api, making it a shortcut on the rest layer only. Closes #9117 Closes #11198 --- docs/reference/migration/migrate_2_0.asciidoc | 6 + .../elasticsearch/action/ActionModule.java | 3 - .../action/ActionRequestBuilder.java | 8 +- .../action/count/CountAction.java | 4 +- .../action/count/CountRequest.java | 49 +- .../action/count/CountRequestBuilder.java | 15 + .../action/count/CountResponse.java | 31 +- .../action/count/ShardCountRequest.java | 137 --- .../action/count/ShardCountResponse.java | 71 -- .../action/count/TransportCountAction.java | 193 ---- .../broadcast/BroadcastOperationResponse.java | 12 +- .../client/support/AbstractClient.java | 19 +- .../search/builder/SearchSourceBuilder.java | 20 +- .../action/IndicesRequestTests.java | 14 - .../action/count/CountRequestTests.java | 110 +++ .../action/count/CountResponseTests.java | 51 ++ .../broadcast/BroadcastActionsTests.java | 15 +- .../count/query/CountQueryTests.java | 843 ------------------ .../document/DocumentActionsTests.java | 15 +- 19 files changed, 263 insertions(+), 1353 deletions(-) delete mode 100644 src/main/java/org/elasticsearch/action/count/ShardCountRequest.java delete mode 100644 src/main/java/org/elasticsearch/action/count/ShardCountResponse.java delete mode 100644 src/main/java/org/elasticsearch/action/count/TransportCountAction.java create mode 100644 src/test/java/org/elasticsearch/action/count/CountRequestTests.java create mode 100644 src/test/java/org/elasticsearch/action/count/CountResponseTests.java delete mode 100644 src/test/java/org/elasticsearch/count/query/CountQueryTests.java diff --git a/docs/reference/migration/migrate_2_0.asciidoc b/docs/reference/migration/migrate_2_0.asciidoc index 2e7fcfe828e..34b022b95d2 100644 --- a/docs/reference/migration/migrate_2_0.asciidoc +++ b/docs/reference/migration/migrate_2_0.asciidoc @@ -404,6 +404,12 @@ The `count` search type has been deprecated. All benefits from this search type now be achieved by using the `query_then_fetch` search type (which is the default) and setting `size` to `0`. +=== The count api internally uses the search api + +The count api is now a shortcut to the search api with `size` set to 0. As a +result, a total failure will result in an exception being returned rather +than a normal response with `count` set to `0` and shard failures. + === JSONP support JSONP callback support has now been removed. CORS should be used to access Elasticsearch diff --git a/src/main/java/org/elasticsearch/action/ActionModule.java b/src/main/java/org/elasticsearch/action/ActionModule.java index 1c273f67b5b..0decb393405 100644 --- a/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/src/main/java/org/elasticsearch/action/ActionModule.java @@ -122,8 +122,6 @@ import org.elasticsearch.action.admin.indices.warmer.put.TransportPutWarmerActio import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; -import org.elasticsearch.action.count.CountAction; -import org.elasticsearch.action.count.TransportCountAction; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.exists.ExistsAction; @@ -273,7 +271,6 @@ public class ActionModule extends AbstractModule { registerAction(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class, TransportShardMultiTermsVectorAction.class); registerAction(DeleteAction.INSTANCE, TransportDeleteAction.class); - registerAction(CountAction.INSTANCE, TransportCountAction.class); registerAction(ExistsAction.INSTANCE, TransportExistsAction.class); registerAction(SuggestAction.INSTANCE, TransportSuggestAction.class); registerAction(UpdateAction.INSTANCE, TransportUpdateAction.class); diff --git a/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index e675e8befed..aaf5c9e48fd 100644 --- a/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -20,15 +20,9 @@ package org.elasticsearch.action; import com.google.common.base.Preconditions; - -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.PlainListenableActionFuture; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.threadpool.ThreadPool; /** @@ -87,7 +81,7 @@ public abstract class ActionRequestBuilder listener) { + public void execute(ActionListener listener) { client.execute(action, beforeExecute(request), listener); } diff --git a/src/main/java/org/elasticsearch/action/count/CountAction.java b/src/main/java/org/elasticsearch/action/count/CountAction.java index 4c7c8a2fcc2..4cc6210b60c 100644 --- a/src/main/java/org/elasticsearch/action/count/CountAction.java +++ b/src/main/java/org/elasticsearch/action/count/CountAction.java @@ -23,6 +23,8 @@ import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; /** + * Action that shortcuts to the search api with size set to 0. It doesn't have a corresponding + * transport action, it just runs the search api internally. */ public class CountAction extends Action { @@ -35,7 +37,7 @@ public class CountAction extends Action { private String[] types = Strings.EMPTY_ARRAY; - long nowInMillis; private int terminateAfter = DEFAULT_TERMINATE_AFTER; - CountRequest() { - } - /** * Constructs a new count request against the provided indices. No indices provided means it will * run against all indices. @@ -81,12 +78,6 @@ public class CountRequest extends BroadcastOperationRequest { super(indices); } - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = super.validate(); - return validationException; - } - /** * The minimum score of the documents to include in the count. */ @@ -121,6 +112,7 @@ public class CountRequest extends BroadcastOperationRequest { /** * The source to execute in the form of a map. */ + @SuppressWarnings("unchecked") public CountRequest source(Map querySource) { try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); @@ -228,24 +220,12 @@ public class CountRequest extends BroadcastOperationRequest { @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - minScore = in.readFloat(); - routing = in.readOptionalString(); - preference = in.readOptionalString(); - source = in.readBytesReference(); - types = in.readStringArray(); - terminateAfter = in.readVInt(); + throw new UnsupportedOperationException("CountRequest doesn't support being sent over the wire, just a shortcut to the search api"); } @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeFloat(minScore); - out.writeOptionalString(routing); - out.writeOptionalString(preference); - out.writeBytesReference(source); - out.writeStringArray(types); - out.writeVInt(terminateAfter); + throw new UnsupportedOperationException("CountRequest doesn't support being sent over the wire, just a shortcut to the search api"); } @Override @@ -258,4 +238,23 @@ public class CountRequest extends BroadcastOperationRequest { } return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", source[" + sSource + "]"; } + + public SearchRequest toSearchRequest() { + SearchRequest searchRequest = new SearchRequest(indices()); + searchRequest.indicesOptions(indicesOptions()); + searchRequest.types(types()); + searchRequest.routing(routing()); + searchRequest.preference(preference()); + searchRequest.source(source()); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(0); + if (minScore() != DEFAULT_MIN_SCORE) { + searchSourceBuilder.minScore(minScore()); + } + if (terminateAfter() != DEFAULT_TERMINATE_AFTER) { + searchSourceBuilder.terminateAfter(terminateAfter()); + } + searchRequest.extraSource(searchSourceBuilder); + return searchRequest; + } } diff --git a/src/main/java/org/elasticsearch/action/count/CountRequestBuilder.java b/src/main/java/org/elasticsearch/action/count/CountRequestBuilder.java index fd30a2d3fc7..3716bf21a68 100644 --- a/src/main/java/org/elasticsearch/action/count/CountRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/count/CountRequestBuilder.java @@ -20,6 +20,10 @@ package org.elasticsearch.action.count; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.DelegatingActionListener; import org.elasticsearch.action.support.QuerySourceBuilder; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -145,6 +149,17 @@ public class CountRequestBuilder extends BroadcastOperationRequestBuilder listener) { + CountRequest countRequest = beforeExecute(request); + client.execute(SearchAction.INSTANCE, countRequest.toSearchRequest(), new DelegatingActionListener(listener) { + @Override + protected CountResponse getDelegatedFromInstigator(SearchResponse response) { + return new CountResponse(response); + } + }); + } + @Override public String toString() { if (sourceBuilder != null) { diff --git a/src/main/java/org/elasticsearch/action/count/CountResponse.java b/src/main/java/org/elasticsearch/action/count/CountResponse.java index 394d266eedc..91fd77e596b 100644 --- a/src/main/java/org/elasticsearch/action/count/CountResponse.java +++ b/src/main/java/org/elasticsearch/action/count/CountResponse.java @@ -19,30 +19,27 @@ package org.elasticsearch.action.count; -import java.io.IOException; -import java.util.List; -import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; +import java.io.IOException; +import java.util.Arrays; + /** * The response of the count action. */ public class CountResponse extends BroadcastOperationResponse { - private boolean terminatedEarly; - private long count; + private final boolean terminatedEarly; + private final long count; - CountResponse() { - - } - - CountResponse(long count, boolean hasTerminatedEarly, int totalShards, int successfulShards, int failedShards, List shardFailures) { - super(totalShards, successfulShards, failedShards, shardFailures); - this.count = count; - this.terminatedEarly = hasTerminatedEarly; + public CountResponse(SearchResponse searchResponse) { + super(searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getFailedShards(), Arrays.asList(searchResponse.getShardFailures())); + this.count = searchResponse.getHits().totalHits(); + this.terminatedEarly = searchResponse.isTerminatedEarly() != null && searchResponse.isTerminatedEarly(); } /** @@ -65,15 +62,11 @@ public class CountResponse extends BroadcastOperationResponse { @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - count = in.readVLong(); - terminatedEarly = in.readBoolean(); + throw new UnsupportedOperationException("CountResponse doesn't support being sent over the wire, just a shortcut to the search api"); } @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVLong(count); - out.writeBoolean(terminatedEarly); + throw new UnsupportedOperationException("CountResponse doesn't support being sent over the wire, just a shortcut to the search api"); } } diff --git a/src/main/java/org/elasticsearch/action/count/ShardCountRequest.java b/src/main/java/org/elasticsearch/action/count/ShardCountRequest.java deleted file mode 100644 index 14a8b0026f9..00000000000 --- a/src/main/java/org/elasticsearch/action/count/ShardCountRequest.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.count; - -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -import static org.elasticsearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER; - -/** - * Internal count request executed directly against a specific index shard. - */ -class ShardCountRequest extends BroadcastShardOperationRequest { - - private float minScore; - private int terminateAfter; - - private BytesReference querySource; - - private String[] types = Strings.EMPTY_ARRAY; - - private long nowInMillis; - - @Nullable - private String[] filteringAliases; - - ShardCountRequest() { - - } - - ShardCountRequest(ShardId shardId, @Nullable String[] filteringAliases, CountRequest request) { - super(shardId, request); - this.minScore = request.minScore(); - this.querySource = request.source(); - this.types = request.types(); - this.filteringAliases = filteringAliases; - this.nowInMillis = request.nowInMillis; - this.terminateAfter = request.terminateAfter(); - } - - public float minScore() { - return minScore; - } - - public BytesReference querySource() { - return querySource; - } - - public String[] types() { - return this.types; - } - - public String[] filteringAliases() { - return filteringAliases; - } - - public long nowInMillis() { - return this.nowInMillis; - } - - public int terminateAfter() { - return this.terminateAfter; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - minScore = in.readFloat(); - - querySource = in.readBytesReference(); - - int typesSize = in.readVInt(); - if (typesSize > 0) { - types = new String[typesSize]; - for (int i = 0; i < typesSize; i++) { - types[i] = in.readString(); - } - } - int aliasesSize = in.readVInt(); - if (aliasesSize > 0) { - filteringAliases = new String[aliasesSize]; - for (int i = 0; i < aliasesSize; i++) { - filteringAliases[i] = in.readString(); - } - } - nowInMillis = in.readVLong(); - terminateAfter = in.readVInt(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeFloat(minScore); - - out.writeBytesReference(querySource); - - out.writeVInt(types.length); - for (String type : types) { - out.writeString(type); - } - if (filteringAliases != null) { - out.writeVInt(filteringAliases.length); - for (String alias : filteringAliases) { - out.writeString(alias); - } - } else { - out.writeVInt(0); - } - out.writeVLong(nowInMillis); - out.writeVInt(terminateAfter); - } -} diff --git a/src/main/java/org/elasticsearch/action/count/ShardCountResponse.java b/src/main/java/org/elasticsearch/action/count/ShardCountResponse.java deleted file mode 100644 index 1847ba0cb02..00000000000 --- a/src/main/java/org/elasticsearch/action/count/ShardCountResponse.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.count; - -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -/** - * Internal count response of a shard count request executed directly against a specific shard. - * - * - */ -class ShardCountResponse extends BroadcastShardOperationResponse { - - private long count; - private boolean terminatedEarly; - - ShardCountResponse() { - - } - - ShardCountResponse(ShardId shardId, long count, boolean terminatedEarly) { - super(shardId); - this.count = count; - this.terminatedEarly = terminatedEarly; - } - - public long getCount() { - return this.count; - } - - public boolean terminatedEarly() { - return this.terminatedEarly; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - count = in.readVLong(); - terminatedEarly = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVLong(count); - out.writeBoolean(terminatedEarly); - } -} diff --git a/src/main/java/org/elasticsearch/action/count/TransportCountAction.java b/src/main/java/org/elasticsearch/action/count/TransportCountAction.java deleted file mode 100644 index 93e0a378ed6..00000000000 --- a/src/main/java/org/elasticsearch/action/count/TransportCountAction.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.count; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; -import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.DefaultSearchContext; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; -import org.elasticsearch.search.query.QueryPhaseExecutionException; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; - -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReferenceArray; - -import static com.google.common.collect.Lists.newArrayList; -import static org.elasticsearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER; - -/** - * - */ -public class TransportCountAction extends TransportBroadcastOperationAction { - - private final IndicesService indicesService; - private final ScriptService scriptService; - private final PageCacheRecycler pageCacheRecycler; - private final BigArrays bigArrays; - - @Inject - public TransportCountAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, - IndicesService indicesService, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, - BigArrays bigArrays, ActionFilters actionFilters) { - super(settings, CountAction.NAME, threadPool, clusterService, transportService, actionFilters, - CountRequest.class, ShardCountRequest.class, ThreadPool.Names.SEARCH); - this.indicesService = indicesService; - this.scriptService = scriptService; - this.pageCacheRecycler = pageCacheRecycler; - this.bigArrays = bigArrays; - } - - @Override - protected void doExecute(CountRequest request, ActionListener listener) { - request.nowInMillis = System.currentTimeMillis(); - super.doExecute(request, listener); - } - - @Override - protected ShardCountRequest newShardRequest(int numShards, ShardRouting shard, CountRequest request) { - String[] filteringAliases = clusterService.state().metaData().filteringAliases(shard.index(), request.indices()); - return new ShardCountRequest(shard.shardId(), filteringAliases, request); - } - - @Override - protected ShardCountResponse newShardResponse() { - return new ShardCountResponse(); - } - - @Override - protected GroupShardsIterator shards(ClusterState clusterState, CountRequest request, String[] concreteIndices) { - Map> routingMap = clusterState.metaData().resolveSearchRouting(request.routing(), request.indices()); - return clusterService.operationRouting().searchShards(clusterState, request.indices(), concreteIndices, routingMap, request.preference()); - } - - @Override - protected ClusterBlockException checkGlobalBlock(ClusterState state, CountRequest request) { - return state.blocks().globalBlockedException(ClusterBlockLevel.READ); - } - - @Override - protected ClusterBlockException checkRequestBlock(ClusterState state, CountRequest countRequest, String[] concreteIndices) { - return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices); - } - - @Override - protected CountResponse newResponse(CountRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { - int successfulShards = 0; - int failedShards = 0; - long count = 0; - boolean terminatedEarly = false; - List shardFailures = null; - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // simply ignore non active shards - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = newArrayList(); - } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); - } else { - count += ((ShardCountResponse) shardResponse).getCount(); - if (((ShardCountResponse) shardResponse).terminatedEarly()) { - terminatedEarly = true; - } - successfulShards++; - } - } - return new CountResponse(count, terminatedEarly, shardsResponses.length(), successfulShards, failedShards, shardFailures); - } - - @Override - protected ShardCountResponse shardOperation(ShardCountRequest request) { - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard indexShard = indexService.shardSafe(request.shardId().id()); - - SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id()); - SearchContext context = new DefaultSearchContext(0, - new ShardSearchLocalRequest(request.types(), request.nowInMillis(), request.filteringAliases()), - shardTarget, indexShard.acquireSearcher("count"), indexService, indexShard, - scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter()); - SearchContext.setCurrent(context); - - try { - // TODO: min score should move to be "null" as a value that is not initialized... - if (request.minScore() != -1) { - context.minimumScore(request.minScore()); - } - BytesReference source = request.querySource(); - if (source != null && source.length() > 0) { - try { - QueryParseContext.setTypes(request.types()); - context.parsedQuery(indexService.queryParserService().parseQuery(source)); - } finally { - QueryParseContext.removeTypes(); - } - } - final boolean hasTerminateAfterCount = request.terminateAfter() != DEFAULT_TERMINATE_AFTER; - boolean terminatedEarly = false; - context.preProcess(); - try { - long count; - if (hasTerminateAfterCount) { - final Lucene.EarlyTerminatingCollector countCollector = - Lucene.createCountBasedEarlyTerminatingCollector(request.terminateAfter()); - terminatedEarly = Lucene.countWithEarlyTermination(context.searcher(), context.query(), countCollector); - count = countCollector.count(); - } else { - count = Lucene.count(context.searcher(), context.query()); - } - return new ShardCountResponse(request.shardId(), count, terminatedEarly); - } catch (Exception e) { - throw new QueryPhaseExecutionException(context, "failed to execute count", e); - } - } finally { - // this will also release the index searcher - context.close(); - SearchContext.removeCurrent(); - } - } -} diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java index 2fd4f97ca29..e8e2a2aa0ce 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java @@ -19,16 +19,16 @@ package org.elasticsearch.action.support.broadcast; -import static org.elasticsearch.action.support.DefaultShardOperationFailedException.readShardOperationFailed; - -import java.io.IOException; -import java.util.List; - import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.action.support.DefaultShardOperationFailedException.readShardOperationFailed; + /** * Base class for all broadcast operation based responses. */ @@ -42,7 +42,7 @@ public abstract class BroadcastOperationResponse extends ActionResponse { protected BroadcastOperationResponse() { } - protected BroadcastOperationResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { + protected BroadcastOperationResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { this.totalShards = totalShards; this.successfulShards = successfulShards; this.failedShards = failedShards; diff --git a/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 18b0a377ac0..75e8ada560b 100644 --- a/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -259,6 +259,8 @@ import org.elasticsearch.action.suggest.SuggestAction; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; +import org.elasticsearch.action.support.AdapterActionFuture; +import org.elasticsearch.action.support.DelegatingActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.termvectors.*; @@ -335,7 +337,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client protected abstract > void doExecute(final Action action, final Request request, ActionListener listener); - @Override public ActionFuture index(final IndexRequest request) { return execute(IndexAction.INSTANCE, request); @@ -594,12 +595,24 @@ public abstract class AbstractClient extends AbstractComponent implements Client @Override public ActionFuture count(final CountRequest request) { - return execute(CountAction.INSTANCE, request); + AdapterActionFuture actionFuture = new AdapterActionFuture() { + @Override + protected CountResponse convert(SearchResponse listenerResponse) { + return new CountResponse(listenerResponse); + } + }; + execute(SearchAction.INSTANCE, request.toSearchRequest(), actionFuture); + return actionFuture; } @Override public void count(final CountRequest request, final ActionListener listener) { - execute(CountAction.INSTANCE, request, listener); + execute(SearchAction.INSTANCE, request.toSearchRequest(), new DelegatingActionListener(listener) { + @Override + protected CountResponse getDelegatedFromInstigator(SearchResponse response) { + return new CountResponse(response); + } + }); } @Override diff --git a/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 57dfe4a362b..55e74d9303d 100644 --- a/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -47,10 +47,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; /** * A search source builder allowing to easily build search source. Simple @@ -188,6 +185,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Constructs a new search source builder with a query from a map. */ + @SuppressWarnings("unchecked") public SearchSourceBuilder query(Map query) { try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); @@ -251,6 +249,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Constructs a new search source builder with a query from a map. */ + @SuppressWarnings("unchecked") public SearchSourceBuilder postFilter(Map postFilter) { try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); @@ -415,9 +414,6 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Set the rescore window size for rescores that don't specify their window. - * - * @param defaultRescoreWindowSize - * @return */ public SearchSourceBuilder defaultRescoreWindowSize(int defaultRescoreWindowSize) { this.defaultRescoreWindowSize = defaultRescoreWindowSize; @@ -427,6 +423,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Sets a raw (xcontent / json) addAggregation. */ + @SuppressWarnings("unchecked") public SearchSourceBuilder aggregations(Map aggregations) { try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); @@ -482,9 +479,6 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Indicates whether the response should contain the stored _source for * every hit - * - * @param fetch - * @return */ public SearchSourceBuilder fetchSource(boolean fetch) { if (this.fetchSourceContext == null) { @@ -563,9 +557,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { if (fieldNames == null) { fieldNames = new ArrayList<>(); } - for (String field : fields) { - fieldNames.add(field); - } + Collections.addAll(fieldNames, fields); return this; } @@ -777,7 +769,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { } if (trackScores) { - builder.field("track_scores", trackScores); + builder.field("track_scores", true); } if (indexBoost != null) { diff --git a/src/test/java/org/elasticsearch/action/IndicesRequestTests.java b/src/test/java/org/elasticsearch/action/IndicesRequestTests.java index 62df5e8fd54..7825d62fe81 100644 --- a/src/test/java/org/elasticsearch/action/IndicesRequestTests.java +++ b/src/test/java/org/elasticsearch/action/IndicesRequestTests.java @@ -57,8 +57,6 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.count.CountAction; -import org.elasticsearch.action.count.CountRequest; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.exists.ExistsAction; @@ -356,18 +354,6 @@ public class IndicesRequestTests extends ElasticsearchIntegrationTest { assertIndicesSubset(indices, multiGetShardAction); } - @Test - public void testCount() { - String countShardAction = CountAction.NAME + "[s]"; - interceptTransportActions(countShardAction); - - CountRequest countRequest = new CountRequest(randomIndicesOrAliases()); - internalCluster().clientNodeClient().count(countRequest).actionGet(); - - clearInterceptedActions(); - assertSameIndices(countRequest, countShardAction); - } - @Test public void testExists() { String existsShardAction = ExistsAction.NAME + "[s]"; diff --git a/src/test/java/org/elasticsearch/action/count/CountRequestTests.java b/src/test/java/org/elasticsearch/action/count/CountRequestTests.java new file mode 100644 index 00000000000..a972ff56d12 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/count/CountRequestTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.count; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.QuerySourceBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; + +public class CountRequestTests extends ElasticsearchTestCase { + + @Test + public void testToSearchRequest() { + CountRequest countRequest; + if (randomBoolean()) { + countRequest = new CountRequest(randomStringArray()); + } else { + countRequest = new CountRequest(); + } + if (randomBoolean()) { + countRequest.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + } + if (randomBoolean()) { + countRequest.types(randomStringArray()); + } + if (randomBoolean()) { + countRequest.routing(randomStringArray()); + } + if (randomBoolean()) { + countRequest.preference(randomAsciiOfLengthBetween(1, 10)); + } + if (randomBoolean()) { + countRequest.source(new QuerySourceBuilder().setQuery(QueryBuilders.termQuery("field", "value"))); + } + if (randomBoolean()) { + countRequest.minScore(randomFloat()); + } + if (randomBoolean()) { + countRequest.terminateAfter(randomIntBetween(1, 1000)); + } + + SearchRequest searchRequest = countRequest.toSearchRequest(); + assertThat(searchRequest.indices(), equalTo(countRequest.indices())); + assertThat(searchRequest.indicesOptions(), equalTo(countRequest.indicesOptions())); + assertThat(searchRequest.types(), equalTo(countRequest.types())); + assertThat(searchRequest.routing(), equalTo(countRequest.routing())); + assertThat(searchRequest.preference(), equalTo(countRequest.preference())); + + if (countRequest.source() == null) { + assertThat(searchRequest.source(), nullValue()); + } else { + Map sourceMap = XContentHelper.convertToMap(searchRequest.source(), false).v2(); + assertThat(sourceMap.size(), equalTo(1)); + assertThat(sourceMap.get("query"), notNullValue()); + } + + Map extraSourceMap = XContentHelper.convertToMap(searchRequest.extraSource(), false).v2(); + int count = 1; + assertThat((Integer)extraSourceMap.get("size"), equalTo(0)); + if (countRequest.minScore() == CountRequest.DEFAULT_MIN_SCORE) { + assertThat(extraSourceMap.get("min_score"), nullValue()); + } else { + assertThat(((Number)extraSourceMap.get("min_score")).floatValue(), equalTo(countRequest.minScore())); + count++; + } + if (countRequest.terminateAfter() == SearchContext.DEFAULT_TERMINATE_AFTER) { + assertThat(extraSourceMap.get("terminate_after"), nullValue()); + } else { + assertThat((Integer)extraSourceMap.get("terminate_after"), equalTo(countRequest.terminateAfter())); + count++; + } + assertThat(extraSourceMap.size(), equalTo(count)); + } + + private static String[] randomStringArray() { + int count = randomIntBetween(1, 5); + String[] indices = new String[count]; + for (int i = 0; i < count; i++) { + indices[i] = randomAsciiOfLengthBetween(1, 10); + } + return indices; + } +} diff --git a/src/test/java/org/elasticsearch/action/count/CountResponseTests.java b/src/test/java/org/elasticsearch/action/count/CountResponseTests.java new file mode 100644 index 00000000000..bbe6c64edf3 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/count/CountResponseTests.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.count; + +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.search.internal.InternalSearchHits; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class CountResponseTests extends ElasticsearchTestCase { + + @Test + public void testFromSearchResponse() { + InternalSearchResponse internalSearchResponse = new InternalSearchResponse(new InternalSearchHits(null, randomLong(), randomFloat()), null, null, randomBoolean(), randomBoolean()); + ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[randomIntBetween(0, 5)]; + for (int i = 0; i < shardSearchFailures.length; i++) { + shardSearchFailures[i] = new ShardSearchFailure(new IllegalArgumentException()); + } + SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, randomIntBetween(0, 100), randomIntBetween(0, 100), randomIntBetween(0, 100), shardSearchFailures); + + CountResponse countResponse = new CountResponse(searchResponse); + assertThat(countResponse.getTotalShards(), equalTo(searchResponse.getTotalShards())); + assertThat(countResponse.getSuccessfulShards(), equalTo(searchResponse.getSuccessfulShards())); + assertThat(countResponse.getFailedShards(), equalTo(searchResponse.getFailedShards())); + assertThat(countResponse.getShardFailures(), equalTo((ShardOperationFailedException[])searchResponse.getShardFailures())); + assertThat(countResponse.getCount(), equalTo(searchResponse.getHits().totalHits())); + assertThat(countResponse.terminatedEarly(), equalTo(searchResponse.isTerminatedEarly())); + } +} diff --git a/src/test/java/org/elasticsearch/broadcast/BroadcastActionsTests.java b/src/test/java/org/elasticsearch/broadcast/BroadcastActionsTests.java index 09b4b601ee3..baaa5045be7 100644 --- a/src/test/java/org/elasticsearch/broadcast/BroadcastActionsTests.java +++ b/src/test/java/org/elasticsearch/broadcast/BroadcastActionsTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.broadcast; import com.google.common.base.Charsets; -import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.count.CountResponse; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ElasticsearchIntegrationTest; @@ -33,7 +33,6 @@ import static org.elasticsearch.client.Requests.countRequest; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class BroadcastActionsTests extends ElasticsearchIntegrationTest { @@ -72,14 +71,10 @@ public class BroadcastActionsTests extends ElasticsearchIntegrationTest { for (int i = 0; i < 5; i++) { // test failed (simply query that can't be parsed) - CountResponse countResponse = client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(Charsets.UTF_8))).actionGet(); - - assertThat(countResponse.getCount(), equalTo(0l)); - assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); - assertThat(countResponse.getSuccessfulShards(), equalTo(0)); - assertThat(countResponse.getFailedShards(), equalTo(numShards.numPrimaries)); - for (ShardOperationFailedException exp : countResponse.getShardFailures()) { - assertThat(exp.reason(), containsString("QueryParsingException")); + try { + client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(Charsets.UTF_8))).actionGet(); + } catch(SearchPhaseExecutionException e) { + assertThat(e.shardFailures().length, equalTo(numShards.numPrimaries)); } } } diff --git a/src/test/java/org/elasticsearch/count/query/CountQueryTests.java b/src/test/java/org/elasticsearch/count/query/CountQueryTests.java deleted file mode 100644 index e693be7c8bb..00000000000 --- a/src/test/java/org/elasticsearch/count/query/CountQueryTests.java +++ /dev/null @@ -1,843 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.count.query; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.count.CountResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.CommonTermsQueryBuilder.Operator; -import org.elasticsearch.index.query.MatchQueryBuilder; -import org.elasticsearch.index.query.MatchQueryBuilder.Type; -import org.elasticsearch.index.query.MultiMatchQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.WrapperQueryBuilder; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.ElasticsearchIntegrationTest; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.ISODateTimeFormat; -import org.junit.Test; - -import java.io.IOException; - -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.existsQuery; -import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; -import static org.elasticsearch.index.query.QueryBuilders.idsQuery; -import static org.elasticsearch.index.query.QueryBuilders.limitQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.QueryBuilders.missingQuery; -import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; -import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.termsLookupQuery; -import static org.elasticsearch.index.query.QueryBuilders.termsQuery; -import static org.elasticsearch.index.query.QueryBuilders.typeQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; - -public class CountQueryTests extends ElasticsearchIntegrationTest { - - @Test - public void passQueryAsStringTest() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").setRefresh(true).get(); - - CountResponse countResponse = client().prepareCount().setSource(new BytesArray("{ \"query\" : { \"term\" : { \"field1\" : \"value1_1\" }}}").array()).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testIndexOptions() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=string,index_options=docs")); - - client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)).get(); - assertHitCount(countResponse, 0l); - assertThat(countResponse.getFailedShards(), anyOf(equalTo(1), equalTo(2))); - assertThat(countResponse.getFailedShards(), equalTo(countResponse.getShardFailures().length)); - for (ShardOperationFailedException shardFailure : countResponse.getShardFailures()) { - assertThat(shardFailure.status(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); - assertThat(shardFailure.reason(), containsString("[field \"field1\" was indexed without position data; cannot run PhraseQuery")); - } - } - - @Test - public void testCommonTermsQuery() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=string,analyzer=whitespace") - .setSettings(SETTING_NUMBER_OF_SHARDS, 1)); - - indexRandom(true, - client().prepareIndex("test", "type1", "3").setSource("field1", "quick lazy huge brown pidgin", "field2", "the quick lazy huge brown fox jumps over the tree"), - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox"), - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") ); - - CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.OR)).get(); - assertHitCount(countResponse, 3l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.AND)).get(); - assertHitCount(countResponse, 2l); - - // Default - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the quick brown").cutoffFrequency(3)).get(); - assertHitCount(countResponse, 3l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the huge fox").lowFreqMinimumShouldMatch("2")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("3")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("4")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setSource(new BytesArray("{ \"query\" : { \"common\" : { \"field1\" : { \"query\" : \"the lazy fox brown\", \"cutoff_frequency\" : 1, \"minimum_should_match\" : { \"high_freq\" : 4 } } } } }").array()).get(); - assertHitCount(countResponse, 1l); - - // Default - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).analyzer("standard")).get(); - assertHitCount(countResponse, 3l); - // standard drops "the" since its a stopword - - // try the same with match query - countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND)).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.OR)).get(); - assertHitCount(countResponse, 3l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND).analyzer("stop")).get(); - assertHitCount(countResponse, 3l); - // standard drops "the" since its a stopword - - // try the same with multi match query - countResponse = client().prepareCount().setQuery(QueryBuilders.multiMatchQuery("the quick brown", "field1", "field2").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND)).get(); - assertHitCount(countResponse, 3l); - } - - @Test - public void queryStringAnalyzedWildcard() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value_1", "field2", "value_2").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("value*").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("*ue*").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("*ue_1").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("val*e_1").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("v?l*e?1").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testLowercaseExpandedTerms() { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value_1", "field2", "value_2").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("VALUE_3~1").lowercaseExpandedTerms(true)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount().setQuery(queryStringQuery("VALUE_3~1").lowercaseExpandedTerms(false)).get(); - assertHitCount(countResponse, 0l); - countResponse = client().prepareCount().setQuery(queryStringQuery("ValUE_*").lowercaseExpandedTerms(true)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount().setQuery(queryStringQuery("vAl*E_1")).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount().setQuery(queryStringQuery("[VALUE_1 TO VALUE_3]")).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount().setQuery(queryStringQuery("[VALUE_1 TO VALUE_3]").lowercaseExpandedTerms(false)).get(); - assertHitCount(countResponse, 0l); - } - - @Test //https://github.com/elasticsearch/elasticsearch/issues/3540 - public void testDateRangeInQueryString() { - //the mapping needs to be provided upfront otherwise we are not sure how many failures we get back - //as with dynamic mappings some shards might be lacking behind and parse a different query - assertAcked(prepareCreate("test").addMapping( - "type", "past", "type=date", "future", "type=date" - )); - ensureGreen(); - - NumShards test = getNumShards("test"); - - String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); - String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); - - client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("future:[now/d TO now+2M/d]").lowercaseExpandedTerms(false)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount("test").setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get(); - //D is an unsupported unit in date math - assertThat(countResponse.getSuccessfulShards(), equalTo(0)); - assertThat(countResponse.getFailedShards(), equalTo(test.numPrimaries)); - assertThat(countResponse.getShardFailures().length, equalTo(test.numPrimaries)); - for (ShardOperationFailedException shardFailure : countResponse.getShardFailures()) { - assertThat(shardFailure.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(shardFailure.reason(), allOf(containsString("Failed to parse"), containsString("unit [D] not supported for date math"))); - } - } - - @Test - public void typeFilterTypeIndexedTests() throws Exception { - typeFilterTests("not_analyzed"); - } - - @Test - public void typeFilterTypeNotIndexedTests() throws Exception { - typeFilterTests("no"); - } - - private void typeFilterTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - assertAcked(prepareCreate("test").setSettings(indexSettings) - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_type").field("index", index).endObject() - .endObject().endObject()) - .addMapping("type2", jsonBuilder().startObject().startObject("type2") - .startObject("_type").field("index", index).endObject() - .endObject().endObject())); - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type2", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value1"), - client().prepareIndex("test", "type2", "2").setSource("field1", "value1"), - client().prepareIndex("test", "type2", "3").setSource("field1", "value1")); - - assertHitCount(client().prepareCount().setQuery(filteredQuery(matchAllQuery(), typeQuery("type1"))).get(), 2l); - assertHitCount(client().prepareCount().setQuery(filteredQuery(matchAllQuery(), typeQuery("type2"))).get(), 3l); - - assertHitCount(client().prepareCount().setTypes("type1").setQuery(matchAllQuery()).get(), 2l); - assertHitCount(client().prepareCount().setTypes("type2").setQuery(matchAllQuery()).get(), 3l); - - assertHitCount(client().prepareCount().setTypes("type1", "type2").setQuery(matchAllQuery()).get(), 5l); - } - - @Test - public void idsQueryTestsIdIndexed() throws Exception { - idsQueryTests("not_analyzed"); - } - - @Test - public void idsQueryTestsIdNotIndexed() throws Exception { - idsQueryTests("no"); - } - - private void idsQueryTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - assertAcked(prepareCreate("test").setSettings(indexSettings) - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_id").field("index", index).endObject() - .endObject().endObject())); - - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "value3")); - - CountResponse countResponse = client().prepareCount().setQuery(constantScoreQuery(idsQuery("type1").ids("1", "3"))).get(); - assertHitCount(countResponse, 2l); - - // no type - countResponse = client().prepareCount().setQuery(constantScoreQuery(idsQuery().ids("1", "3"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(idsQuery("type1").ids("1", "3")).get(); - assertHitCount(countResponse, 2l); - - // no type - countResponse = client().prepareCount().setQuery(idsQuery().ids("1", "3")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(idsQuery("type1").ids("7", "10")).get(); - assertHitCount(countResponse, 0l); - - // repeat..., with terms - countResponse = client().prepareCount().setTypes("type1").setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testLimitFilter() throws Exception { - assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1)); - - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value1_2"), - client().prepareIndex("test", "type1", "3").setSource("field2", "value2_3"), - client().prepareIndex("test", "type1", "4").setSource("field3", "value3_4")); - - CountResponse countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), limitQuery(2))).get(); - assertHitCount(countResponse, 4l); // limit is a no-op - } - - @Test - public void filterExistsMissingTests() throws Exception { - createIndex("test"); - - indexRandom(true, - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().startObject("obj1").field("obj1_val", "1").endObject().field("x1", "x_1").field("field1", "value1_1").field("field2", "value2_1").endObject()), - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().startObject("obj1").field("obj1_val", "1").endObject().field("x2", "x_2").field("field1", "value1_2").endObject()), - client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().startObject("obj2").field("obj2_val", "1").endObject().field("y1", "y_1").field("field2", "value2_3").endObject()), - client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject().startObject("obj2").field("obj2_val", "1").endObject().field("y2", "y_2").field("field3", "value3_4").endObject())); - - CountResponse countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(constantScoreQuery(existsQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("_exists_:field1")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("field2"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("field3"))).get(); - assertHitCount(countResponse, 1l); - - // wildcard check - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("x*"))).get(); - assertHitCount(countResponse, 2l); - - // object check - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("obj1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(constantScoreQuery(missingQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("_missing_:field1")).get(); - assertHitCount(countResponse, 2l); - - // wildcard check - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingQuery("x*"))).get(); - assertHitCount(countResponse, 2l); - - // object check - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingQuery("obj1"))).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void passQueryAsJSONStringTest() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").setRefresh(true).get(); - - WrapperQueryBuilder wrapper = new WrapperQueryBuilder("{ \"term\" : { \"field1\" : \"value1_1\" } }"); - assertHitCount(client().prepareCount().setQuery(wrapper).get(), 1l); - - BoolQueryBuilder bool = boolQuery().must(wrapper).must(new TermQueryBuilder("field2", "value2_1")); - assertHitCount(client().prepareCount().setQuery(bool).get(), 1l); - } - - @Test - public void testMatchQueryNumeric() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("long", 1l, "double", 1.0d).get(); - client().prepareIndex("test", "type1", "2").setSource("long", 2l, "double", 2.0d).get(); - client().prepareIndex("test", "type1", "3").setSource("long", 3l, "double", 3.0d).get(); - refresh(); - CountResponse countResponse = client().prepareCount().setQuery(matchQuery("long", "1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(matchQuery("double", "2")).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testMultiMatchQuery() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value4", "field3", "value3").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value2", "field2", "value5", "field3", "value2").get(); - client().prepareIndex("test", "type1", "3").setSource("field1", "value3", "field2", "value6", "field3", "value1").get(); - refresh(); - - MultiMatchQueryBuilder builder = QueryBuilders.multiMatchQuery("value1 value2 value4", "field1", "field2"); - CountResponse countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 2l); - - refresh(); - builder = QueryBuilders.multiMatchQuery("value1", "field1", "field2") - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. - countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 1l); - - refresh(); - builder = QueryBuilders.multiMatchQuery("value1", "field1", "field3^1.5") - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. - countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 2l); - - refresh(); - builder = QueryBuilders.multiMatchQuery("value1").field("field1").field("field3", 1.5f) - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. - countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 2l); - - // Test lenient - client().prepareIndex("test", "type1", "3").setSource("field1", "value7", "field2", "value8", "field4", 5).get(); - refresh(); - - builder = QueryBuilders.multiMatchQuery("value1", "field1", "field2", "field4"); - builder.lenient(true); - countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testMatchQueryZeroTermsQuery() { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=string,analyzer=classic", "field2", "type=string,analyzer=classic")); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value2").get(); - refresh(); - - BoolQueryBuilder boolQuery = boolQuery() - .must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)) - .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)); - CountResponse countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 0l); - - boolQuery = boolQuery() - .must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)) - .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); - countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 1l); - - boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); - countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testMultiMatchQueryZeroTermsQuery() { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=string,analyzer=classic", "field2", "type=string,analyzer=classic")); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value3", "field2", "value4").get(); - refresh(); - - BoolQueryBuilder boolQuery = boolQuery() - .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)) - .must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)); // Fields are ORed together - CountResponse countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 0l); - - boolQuery = boolQuery() - .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)) - .must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); - countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 1l); - - boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); - countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testMultiMatchQueryMinShouldMatch() { - createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", new String[]{"value1", "value2", "value3"}).get(); - client().prepareIndex("test", "type1", "2").setSource("field2", "value1").get(); - refresh(); - - MultiMatchQueryBuilder multiMatchQuery = multiMatchQuery("value1 value2 foo", "field1", "field2"); - - multiMatchQuery.useDisMax(true); - multiMatchQuery.minimumShouldMatch("70%"); - CountResponse countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 1l); - - multiMatchQuery.minimumShouldMatch("30%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 2l); - - multiMatchQuery.useDisMax(false); - multiMatchQuery.minimumShouldMatch("70%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 1l); - - multiMatchQuery.minimumShouldMatch("30%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 2l); - - multiMatchQuery = multiMatchQuery("value1 value2 bar", "field1"); - multiMatchQuery.minimumShouldMatch("100%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 0l); - - multiMatchQuery.minimumShouldMatch("70%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testFuzzyQueryString() { - createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("str", "kimchy", "date", "2012-02-01", "num", 12).get(); - client().prepareIndex("test", "type1", "2").setSource("str", "shay", "date", "2012-02-05", "num", 20).get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("str:kimcy~1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:11~1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("date:2012-02-02~1d")).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testSpecialRangeSyntaxInQueryString() { - createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("str", "kimchy", "date", "2012-02-01", "num", 12).get(); - client().prepareIndex("test", "type1", "2").setSource("str", "shay", "date", "2012-02-05", "num", 20).get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("num:>19")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:>20")).get(); - assertHitCount(countResponse, 0l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:>=20")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:>11")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:<20")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:<=20")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("+num:>11 +num:<20")).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testEmptyTermsFilter() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "terms", "type=string")); - ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("term", "1").get(); - client().prepareIndex("test", "type", "2").setSource("term", "2").get(); - client().prepareIndex("test", "type", "3").setSource("term", "3").get(); - client().prepareIndex("test", "type", "4").setSource("term", "4").get(); - refresh(); - CountResponse countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsQuery("term", new String[0]))).get(); - assertHitCount(countResponse, 0l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), idsQuery())).get(); - assertHitCount(countResponse, 0l); - } - - @Test - public void testTermsLookupFilter() throws Exception { - assertAcked(prepareCreate("lookup").addMapping("type", "terms", "type=string", "other", "type=string")); - assertAcked(prepareCreate("lookup2").addMapping("type", - jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("arr").startObject("properties").startObject("term").field("type", "string") - .endObject().endObject().endObject().endObject().endObject().endObject())); - assertAcked(prepareCreate("test").addMapping("type", "term", "type=string")); - ensureGreen(); - - indexRandom(true, client().prepareIndex("lookup", "type", "1").setSource("terms", new String[]{"1", "3"}), - client().prepareIndex("lookup", "type", "2").setSource("terms", new String[]{"2"}), - client().prepareIndex("lookup", "type", "3").setSource("terms", new String[]{"2", "4"}), - client().prepareIndex("lookup", "type", "4").setSource("other", "value"), - client().prepareIndex("lookup2", "type", "1").setSource(XContentFactory.jsonBuilder().startObject() - .startArray("arr") - .startObject().field("term", "1").endObject() - .startObject().field("term", "3").endObject() - .endArray() - .endObject()), - client().prepareIndex("lookup2", "type", "2").setSource(XContentFactory.jsonBuilder().startObject() - .startArray("arr") - .startObject().field("term", "2").endObject() - .endArray() - .endObject()), - client().prepareIndex("lookup2", "type", "3").setSource(XContentFactory.jsonBuilder().startObject() - .startArray("arr") - .startObject().field("term", "2").endObject() - .startObject().field("term", "4").endObject() - .endArray() - .endObject()), - client().prepareIndex("test", "type", "1").setSource("term", "1"), - client().prepareIndex("test", "type", "2").setSource("term", "2"), - client().prepareIndex("test", "type", "3").setSource("term", "3"), - client().prepareIndex("test", "type", "4").setSource("term", "4")); - - CountResponse countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms"))).get(); - assertHitCount(countResponse, 2l); - - // same as above, just on the _id... - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("_id").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms"))).get(); - assertHitCount(countResponse, 2l); - - // another search with same parameters... - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("2").lookupPath("terms"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("3").lookupPath("terms")) - ).get(); - assertNoFailures(countResponse); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("4").lookupPath("terms"))).get(); - assertHitCount(countResponse, 0l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("1").lookupPath("arr.term"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("2").lookupPath("arr.term"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("3").lookupPath("arr.term"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("not_exists").lookupIndex("lookup2").lookupType("type").lookupId("3").lookupPath("arr.term"))).get(); - assertHitCount(countResponse, 0l); - } - - @Test - public void testBasicFilterById() throws Exception { - createIndex("test"); - ensureGreen(); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type2", "2").setSource("field1", "value2").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery("type1", "type2").ids("1", "2"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().ids("1", "2"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery("type1").ids("1", "2"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().ids("1"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery(null).ids("1"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery("type1", "type2", "type3").ids("1", "2", "3", "4"))).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testBasicQueryById() throws Exception { - createIndex("test"); - ensureGreen(); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type2", "2").setSource("field1", "value2").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery("type1", "type2").ids("1", "2")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery().ids("1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery().ids("1", "2")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery("type1").ids("1", "2")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery().ids("1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery(null).ids("1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery("type1", "type2", "type3").ids("1", "2", "3", "4")).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testNumericTermsAndRanges() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("type1", - "num_byte", "type=byte", "num_short", "type=short", - "num_integer", "type=integer", "num_long", "type=long", - "num_float", "type=float", "num_double", "type=double")); - ensureGreen(); - - client().prepareIndex("test", "type1", "1").setSource("num_byte", 1, "num_short", 1, "num_integer", 1, - "num_long", 1, "num_float", 1, "num_double", 1).get(); - - client().prepareIndex("test", "type1", "2").setSource("num_byte", 2, "num_short", 2, "num_integer", 2, - "num_long", 2, "num_float", 2, "num_double", 2).get(); - - client().prepareIndex("test", "type1", "17").setSource("num_byte", 17, "num_short", 17, "num_integer", 17, - "num_long", 17, "num_float", 17, "num_double", 17).get(); - refresh(); - - CountResponse countResponse; - logger.info("--> term query on 1"); - countResponse = client().prepareCount("test").setQuery(termQuery("num_byte", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_short", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_integer", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_long", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_float", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_double", 1)).get(); - assertHitCount(countResponse, 1l); - - logger.info("--> terms query on 1"); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_byte", new int[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_short", new int[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_integer", new int[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_long", new int[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_float", new double[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_double", new double[]{1})).get(); - assertHitCount(countResponse, 1l); - - logger.info("--> term filter on 1"); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_byte", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_short", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_integer", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_long", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_float", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_double", 1))).get(); - assertHitCount(countResponse, 1l); - - logger.info("--> terms filter on 1"); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_byte", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_short", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_integer", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_long", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_float", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_double", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - } - - @Test // see #2994 - public void testSimpleSpan() throws IOException { - createIndex("test"); - ensureGreen(); - - client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar").get(); - client().prepareIndex("test", "test", "2").setSource("description", "foo other anything").get(); - client().prepareIndex("test", "test", "3").setSource("description", "foo other").get(); - client().prepareIndex("test", "test", "4").setSource("description", "foo").get(); - refresh(); - - CountResponse response = client().prepareCount("test") - .setQuery(QueryBuilders.spanOrQuery().clause(QueryBuilders.spanTermQuery("description", "bar"))).get(); - assertHitCount(response, 1l); - - response = client().prepareCount("test").setQuery( - QueryBuilders.spanNearQuery() - .clause(QueryBuilders.spanTermQuery("description", "foo")) - .clause(QueryBuilders.spanTermQuery("description", "other")) - .slop(3)).get(); - assertHitCount(response, 3l); - } -} diff --git a/src/test/java/org/elasticsearch/document/DocumentActionsTests.java b/src/test/java/org/elasticsearch/document/DocumentActionsTests.java index 8673a93dc3e..0b869d72e36 100644 --- a/src/test/java/org/elasticsearch/document/DocumentActionsTests.java +++ b/src/test/java/org/elasticsearch/document/DocumentActionsTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.document; -import com.google.common.base.Charsets; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; @@ -30,6 +29,7 @@ import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ElasticsearchIntegrationTest; @@ -40,7 +40,8 @@ import java.io.IOException; import static org.elasticsearch.client.Requests.*; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; /** * @@ -163,11 +164,11 @@ public class DocumentActionsTests extends ElasticsearchIntegrationTest { assertThat(countResponse.getFailedShards(), equalTo(0)); // test failed (simply query that can't be parsed) - countResponse = client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(Charsets.UTF_8))).actionGet(); - - assertThat(countResponse.getCount(), equalTo(0l)); - assertThat(countResponse.getSuccessfulShards(), equalTo(0)); - assertThat(countResponse.getFailedShards(), equalTo(numShards.numPrimaries)); + try { + client().count(countRequest("test").source("{ term : { _type : \"type1 } }")).actionGet(); + } catch(SearchPhaseExecutionException e) { + assertThat(e.shardFailures().length, equalTo(numShards.numPrimaries)); + } // count with no query is a match all one countResponse = client().prepareCount("test").execute().actionGet(); From fcccd45601bdeb6b1bf54da4a5bde317297cb3bd Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 26 May 2015 21:39:20 +0200 Subject: [PATCH 020/123] Be more lenient in EIT#waitForDocs The count request now acts like search and barfs if all shards fail this behavior changed and some tests like RecoveryWhileUnderLoadTests relied on the lenient behavior of the old count API. This might be a temporary solution to stop current test failures. Relates to #11198 --- .../test/ElasticsearchIntegrationTest.java | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index ea05c58ae35..d046a2f05ad 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -1067,12 +1067,17 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase lastKnownCount.set(indexer.totalIndexedDocs()); } if (lastKnownCount.get() >= numDocs) { - long count = client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(); - if (count == lastKnownCount.get()) { - // no progress - try to refresh for the next time - client().admin().indices().prepareRefresh().get(); + try { + long count = client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(); + if (count == lastKnownCount.get()) { + // no progress - try to refresh for the next time + client().admin().indices().prepareRefresh().get(); + } + lastKnownCount.set(count); + } catch (Throwable e) { // count now acts like search and barfs if all shards failed... + logger.debug("failed to executed count", e); + return false; } - lastKnownCount.set(count); logger.debug("[{}] docs visible for search. waiting for [{}]", lastKnownCount.get(), numDocs); } else { logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount.get(), numDocs); From 6646881bb6176d7761aaaf9bc83788e3055670f3 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 26 May 2015 14:33:24 -0600 Subject: [PATCH 021/123] [CORE] Read segment info from latest commit whenever possible Instead of listing the directory to file the latest segments_N file, we should re-use the generation/filename from the last commit. This allows us to avoid potential race conditions on the filesystem as well as reduce the number of directory listings performed. --- .../elasticsearch/common/lucene/Lucene.java | 7 ++++-- .../elasticsearch/index/engine/Engine.java | 23 +++++++++++-------- .../index/engine/InternalEngine.java | 2 +- .../index/engine/ShadowEngine.java | 4 ++-- .../org/elasticsearch/index/store/Store.java | 3 ++- 5 files changed, 23 insertions(+), 16 deletions(-) diff --git a/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/src/main/java/org/elasticsearch/common/lucene/Lucene.java index e38af307a8a..385607d89ba 100644 --- a/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -159,8 +159,11 @@ public class Lucene { /** * Reads the segments infos from the given commit, failing if it fails to load */ - public static SegmentInfos readSegmentInfos(IndexCommit commit, Directory directory) throws IOException { - return SegmentInfos.readCommit(directory, commit.getSegmentsFileName()); + public static SegmentInfos readSegmentInfos(IndexCommit commit) throws IOException { + // Using commit.getSegmentsFileName() does NOT work here, have to + // manually create the segment filename + String filename = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", commit.getGeneration()); + return SegmentInfos.readCommit(commit.getDirectory(), filename); } /** diff --git a/src/main/java/org/elasticsearch/index/engine/Engine.java b/src/main/java/org/elasticsearch/index/engine/Engine.java index 3a2f1eba93d..e2811bb0698 100644 --- a/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -21,15 +21,7 @@ package org.elasticsearch.index.engine; import com.google.common.base.Preconditions; -import org.apache.lucene.index.FilterLeafReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SegmentCommitInfo; -import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.index.SegmentReader; -import org.apache.lucene.index.Term; +import org.apache.lucene.index.*; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherManager; @@ -324,7 +316,18 @@ public abstract class Engine implements Closeable { return new CommitStats(getLastCommittedSegmentInfos()); } - + /** + * Read the last segments info from the commit pointed to by the searcher manager + */ + protected static SegmentInfos readLastCommittedSegmentInfos(SearcherManager sm) throws IOException { + IndexSearcher searcher = sm.acquire(); + try { + IndexCommit latestCommit = ((DirectoryReader) searcher.getIndexReader()).getIndexCommit(); + return Lucene.readSegmentInfos(latestCommit); + } finally { + sm.release(searcher); + } + } /** * Global stats on segments. diff --git a/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 151298acd07..393dff33907 100644 --- a/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -275,7 +275,7 @@ public class InternalEngine extends Engine { try { final DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter, true), shardId); searcherManager = new SearcherManager(directoryReader, searcherFactory); - lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); + lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); success = true; return searcherManager; } catch (IOException e) { diff --git a/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java b/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java index e09acaa0e28..301f6176a00 100644 --- a/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java +++ b/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java @@ -79,7 +79,7 @@ public class ShadowEngine extends Engine { if (Lucene.waitForIndex(store.directory(), nonexistentRetryTime)) { reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(store.directory()), shardId); this.searcherManager = new SearcherManager(reader, searcherFactory); - this.lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); + this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); success = true; } else { throw new IndexShardException(shardId, "failed to open a shadow engine after" + @@ -148,7 +148,7 @@ public class ShadowEngine extends Engine { store.incRef(); try (ReleasableLock lock = readLock.acquire()) { // reread the last committed segment infos - lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); + lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); } catch (Throwable e) { if (isClosed.get() == false) { logger.warn("failed to read latest segment infos on flush", e); diff --git a/src/main/java/org/elasticsearch/index/store/Store.java b/src/main/java/org/elasticsearch/index/store/Store.java index 722c00c506d..c889dd16c20 100644 --- a/src/main/java/org/elasticsearch/index/store/Store.java +++ b/src/main/java/org/elasticsearch/index/store/Store.java @@ -151,8 +151,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * @throws IOException if the index is corrupted or the segments file is not present */ private static SegmentInfos readSegmentsInfo(IndexCommit commit, Directory directory) throws IOException { + assert commit == null || commit.getDirectory() == directory; try { - return commit == null ? Lucene.readSegmentInfos(directory) : Lucene.readSegmentInfos(commit, directory); + return commit == null ? Lucene.readSegmentInfos(directory) : Lucene.readSegmentInfos(commit); } catch (EOFException eof) { // TODO this should be caught by lucene - EOF is almost certainly an index corruption throw new CorruptIndexException("Read past EOF while reading segment infos", "commit(" + commit + ")", eof); From 7451b4708ec912d4ed40fe11d7bcf5b584f05bc4 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 26 May 2015 11:58:27 +0300 Subject: [PATCH 022/123] Simplify Transport*OperationAction names As a follow up to #11332, this commit simplifies more class names by remove the superfluous Operation: TransportBroadcastOperationAction -> TransportBroadcastAction TransportMasterNodeOperationAction -> TransportMasterNodeAction TransportMasterNodeReadOperationAction -> TransportMasterNodeReadAction TransportShardSingleOperationAction -> TransportSingleShardAction Closes #11349 --- .../cluster/health/ClusterHealthRequest.java | 4 ++-- .../health/TransportClusterHealthAction.java | 4 ++-- .../node/hotthreads/NodeHotThreads.java | 4 ++-- .../hotthreads/NodesHotThreadsRequest.java | 4 ++-- .../hotthreads/NodesHotThreadsResponse.java | 4 ++-- .../TransportNodesHotThreadsAction.java | 8 +++---- .../admin/cluster/node/info/NodeInfo.java | 4 ++-- .../cluster/node/info/NodesInfoRequest.java | 4 ++-- .../cluster/node/info/NodesInfoResponse.java | 4 ++-- .../node/info/TransportNodesInfoAction.java | 8 +++---- .../admin/cluster/node/stats/NodeStats.java | 7 ++---- .../cluster/node/stats/NodesStatsRequest.java | 4 ++-- .../node/stats/NodesStatsResponse.java | 4 ++-- .../node/stats/TransportNodesStatsAction.java | 8 +++---- .../TransportDeleteRepositoryAction.java | 4 ++-- .../get/GetRepositoriesRequest.java | 5 ++--- .../get/TransportGetRepositoriesAction.java | 4 ++-- .../put/TransportPutRepositoryAction.java | 4 ++-- .../TransportVerifyRepositoryAction.java | 4 ++-- .../TransportClusterRerouteAction.java | 4 ++-- .../TransportClusterUpdateSettingsAction.java | 4 ++-- .../shards/ClusterSearchShardsRequest.java | 4 ++-- .../TransportClusterSearchShardsAction.java | 4 ++-- .../create/CreateSnapshotRequest.java | 4 ++-- .../create/TransportCreateSnapshotAction.java | 4 ++-- .../delete/DeleteSnapshotRequest.java | 4 ++-- .../delete/TransportDeleteSnapshotAction.java | 4 ++-- .../snapshots/get/GetSnapshotsRequest.java | 4 ++-- .../get/TransportGetSnapshotsAction.java | 4 ++-- .../restore/RestoreSnapshotRequest.java | 4 ++-- .../TransportRestoreSnapshotAction.java | 4 ++-- .../status/SnapshotIndexShardStatus.java | 4 ++-- .../status/SnapshotsStatusRequest.java | 4 ++-- .../status/TransportNodesSnapshotsStatus.java | 10 ++++----- .../TransportSnapshotsStatusAction.java | 4 ++-- .../cluster/state/ClusterStateRequest.java | 5 ++--- .../state/TransportClusterStateAction.java | 4 ++-- .../stats/ClusterStatsNodeResponse.java | 4 ++-- .../cluster/stats/ClusterStatsRequest.java | 4 ++-- .../cluster/stats/ClusterStatsResponse.java | 4 ++-- .../stats/TransportClusterStatsAction.java | 8 +++---- .../tasks/PendingClusterTasksRequest.java | 9 ++------ .../TransportPendingClusterTasksAction.java | 4 ++-- .../alias/TransportIndicesAliasesAction.java | 4 ++-- .../exists/TransportAliasesExistAction.java | 4 ++-- .../indices/alias/get/GetAliasesRequest.java | 5 ++--- .../alias/get/TransportGetAliasesAction.java | 4 ++-- .../cache/clear/ClearIndicesCacheRequest.java | 5 ++--- .../clear/ClearIndicesCacheResponse.java | 4 ++-- .../clear/ShardClearIndicesCacheRequest.java | 5 ++--- .../clear/ShardClearIndicesCacheResponse.java | 4 ++-- .../TransportClearIndicesCacheAction.java | 4 ++-- .../close/TransportCloseIndexAction.java | 4 ++-- .../create/TransportCreateIndexAction.java | 4 ++-- .../indices/delete/DeleteIndexRequest.java | 4 ++-- .../delete/TransportDeleteIndexAction.java | 4 ++-- .../exists/indices/IndicesExistsRequest.java | 5 ++--- .../indices/TransportIndicesExistsAction.java | 4 ++-- .../types/TransportTypesExistsAction.java | 4 ++-- .../exists/types/TypesExistsRequest.java | 5 ++--- .../admin/indices/flush/FlushRequest.java | 5 ++--- .../admin/indices/flush/FlushResponse.java | 4 ++-- .../indices/flush/ShardFlushRequest.java | 5 ++--- .../indices/flush/ShardFlushResponse.java | 4 ++-- .../indices/flush/TransportFlushAction.java | 4 ++-- .../put/TransportPutMappingAction.java | 4 ++-- .../open/TransportOpenIndexAction.java | 4 ++-- .../indices/optimize/OptimizeRequest.java | 5 ++--- .../indices/optimize/OptimizeResponse.java | 4 ++-- .../optimize/ShardOptimizeRequest.java | 5 ++--- .../optimize/ShardOptimizeResponse.java | 4 ++-- .../optimize/TransportOptimizeAction.java | 4 ++-- .../indices/recovery/RecoveryRequest.java | 8 +++---- .../indices/recovery/RecoveryResponse.java | 4 ++-- .../recovery/ShardRecoveryResponse.java | 4 ++-- .../recovery/TransportRecoveryAction.java | 8 +++---- .../admin/indices/refresh/RefreshRequest.java | 8 ++----- .../indices/refresh/RefreshResponse.java | 4 ++-- .../indices/refresh/ShardRefreshRequest.java | 8 ++----- .../indices/refresh/ShardRefreshResponse.java | 4 ++-- .../refresh/TransportRefreshAction.java | 4 ++-- .../indices/seal/SealIndicesRequest.java | 4 ++-- .../segments/IndicesSegmentResponse.java | 5 ++--- .../segments/IndicesSegmentsRequest.java | 5 ++--- .../admin/indices/segments/ShardSegments.java | 4 ++-- .../TransportIndicesSegmentsAction.java | 10 ++++----- .../settings/get/GetSettingsRequest.java | 5 ++--- .../get/TransportGetSettingsAction.java | 4 ++-- .../put/TransportUpdateSettingsAction.java | 4 ++-- .../indices/stats/IndicesStatsRequest.java | 4 ++-- .../indices/stats/IndicesStatsResponse.java | 4 ++-- .../admin/indices/stats/ShardStats.java | 4 ++-- .../stats/TransportIndicesStatsAction.java | 8 +++---- .../delete/DeleteIndexTemplateRequest.java | 4 ++-- .../TransportDeleteIndexTemplateAction.java | 4 ++-- .../get/GetIndexTemplatesRequest.java | 5 ++--- .../get/TransportGetIndexTemplatesAction.java | 4 ++-- .../template/put/PutIndexTemplateRequest.java | 4 ++-- .../put/TransportPutIndexTemplateAction.java | 4 ++-- .../query/ShardValidateQueryRequest.java | 4 ++-- .../query/ShardValidateQueryResponse.java | 4 ++-- .../query/TransportValidateQueryAction.java | 4 ++-- .../validate/query/ValidateQueryRequest.java | 4 ++-- .../validate/query/ValidateQueryResponse.java | 4 ++-- .../delete/TransportDeleteWarmerAction.java | 4 ++-- .../warmer/put/TransportPutWarmerAction.java | 4 ++-- .../action/count/CountRequest.java | 4 ++-- .../action/count/CountResponse.java | 4 ++-- .../action/exists/ExistsRequest.java | 4 ++-- .../action/exists/ExistsResponse.java | 4 ++-- .../action/exists/ShardExistsRequest.java | 4 ++-- .../action/exists/ShardExistsResponse.java | 4 ++-- .../action/exists/TransportExistsAction.java | 6 ++--- .../action/explain/ExplainRequest.java | 4 ++-- .../explain/TransportExplainAction.java | 8 +++---- .../action/fieldstats/FieldStatsRequest.java | 4 ++-- .../action/fieldstats/FieldStatsResponse.java | 4 ++-- .../fieldstats/FieldStatsShardRequest.java | 4 ++-- .../fieldstats/FieldStatsShardResponse.java | 4 ++-- .../TransportFieldStatsTransportAction.java | 9 +++++--- .../elasticsearch/action/get/GetRequest.java | 4 ++-- .../action/get/MultiGetShardRequest.java | 8 ++----- .../action/get/TransportGetAction.java | 4 ++-- .../get/TransportShardMultiGetAction.java | 8 +++---- .../action/percolate/PercolateRequest.java | 6 ++--- .../action/percolate/PercolateResponse.java | 4 ++-- .../percolate/PercolateShardRequest.java | 5 ++--- .../percolate/PercolateShardResponse.java | 5 ++--- .../percolate/TransportPercolateAction.java | 4 ++-- .../TransportShardMultiPercolateAction.java | 8 +++---- .../action/suggest/ShardSuggestRequest.java | 4 ++-- .../action/suggest/ShardSuggestResponse.java | 4 ++-- .../action/suggest/SuggestRequest.java | 4 ++-- .../action/suggest/SuggestResponse.java | 4 ++-- .../suggest/TransportSuggestAction.java | 6 ++--- .../BroadcastOperationRequestBuilder.java | 2 +- ...tionRequest.java => BroadcastRequest.java} | 8 +++---- ...onResponse.java => BroadcastResponse.java} | 8 +++---- ...equest.java => BroadcastShardRequest.java} | 8 +++---- ...ponse.java => BroadcastShardResponse.java} | 6 ++--- ...ion.java => TransportBroadcastAction.java} | 6 ++--- .../support/master/AcknowledgedRequest.java | 2 +- .../MasterNodeOperationRequestBuilder.java | 4 +--- ...MasterNodeReadOperationRequestBuilder.java | 4 +--- ...equest.java => MasterNodeReadRequest.java} | 3 +-- ...ionRequest.java => MasterNodeRequest.java} | 6 ++--- ...on.java => TransportMasterNodeAction.java} | 11 ++++++---- ...ava => TransportMasterNodeReadAction.java} | 4 ++-- .../master/info/ClusterInfoRequest.java | 4 ++-- .../info/TransportClusterInfoAction.java | 4 ++-- ...ationRequest.java => BaseNodeRequest.java} | 6 ++--- ...ionResponse.java => BaseNodeResponse.java} | 6 ++--- ...tionRequest.java => BaseNodesRequest.java} | 8 +++---- ...onResponse.java => BaseNodesResponse.java} | 20 ++++++++--------- .../nodes/NodesOperationRequestBuilder.java | 2 +- ...nAction.java => TransportNodesAction.java} | 22 +++++++++---------- .../SingleShardOperationRequestBuilder.java | 3 +-- ...onRequest.java => SingleShardRequest.java} | 10 ++++----- ...n.java => TransportSingleShardAction.java} | 6 ++--- .../MultiTermVectorsShardRequest.java | 4 ++-- .../termvectors/TermVectorsRequest.java | 4 ++-- .../TransportShardMultiTermsVectorAction.java | 4 ++-- .../TransportTermVectorsAction.java | 6 ++--- .../termvectors/dfs/DfsOnlyRequest.java | 4 ++-- .../termvectors/dfs/DfsOnlyResponse.java | 4 ++-- .../termvectors/dfs/ShardDfsOnlyRequest.java | 4 ++-- .../termvectors/dfs/ShardDfsOnlyResponse.java | 4 ++-- .../dfs/TransportDfsOnlyAction.java | 4 ++-- .../metadata/MetaDataDeleteIndexService.java | 4 ++-- .../MetaDataIndexTemplateService.java | 6 ++--- .../gateway/AsyncShardFetch.java | 20 ++++++++--------- .../gateway/GatewayAllocator.java | 8 +++---- .../TransportNodesListGatewayMetaState.java | 10 ++++----- ...ransportNodesListGatewayStartedShards.java | 10 ++++----- .../TransportNodesListShardStoreMetaData.java | 10 ++++----- .../rest/action/support/RestActions.java | 4 ++-- .../org/elasticsearch/tribe/TribeService.java | 4 ++-- .../action/admin/HotThreadsTest.java | 11 +++------- .../gateway/AsyncShardFetchTests.java | 4 ++-- .../hamcrest/ElasticsearchAssertions.java | 8 +++---- 180 files changed, 449 insertions(+), 491 deletions(-) rename src/main/java/org/elasticsearch/action/support/broadcast/{BroadcastOperationRequest.java => BroadcastRequest.java} (88%) rename src/main/java/org/elasticsearch/action/support/broadcast/{BroadcastOperationResponse.java => BroadcastResponse.java} (90%) rename src/main/java/org/elasticsearch/action/support/broadcast/{BroadcastShardOperationRequest.java => BroadcastShardRequest.java} (86%) rename src/main/java/org/elasticsearch/action/support/broadcast/{BroadcastShardOperationResponse.java => BroadcastShardResponse.java} (89%) rename src/main/java/org/elasticsearch/action/support/broadcast/{TransportBroadcastOperationAction.java => TransportBroadcastAction.java} (95%) rename src/main/java/org/elasticsearch/action/support/master/{MasterNodeReadOperationRequest.java => MasterNodeReadRequest.java} (90%) rename src/main/java/org/elasticsearch/action/support/master/{MasterNodeOperationRequest.java => MasterNodeRequest.java} (90%) rename src/main/java/org/elasticsearch/action/support/master/{TransportMasterNodeOperationAction.java => TransportMasterNodeAction.java} (94%) rename src/main/java/org/elasticsearch/action/support/master/{TransportMasterNodeReadOperationAction.java => TransportMasterNodeReadAction.java} (80%) rename src/main/java/org/elasticsearch/action/support/nodes/{NodeOperationRequest.java => BaseNodeRequest.java} (88%) rename src/main/java/org/elasticsearch/action/support/nodes/{NodeOperationResponse.java => BaseNodeResponse.java} (90%) rename src/main/java/org/elasticsearch/action/support/nodes/{NodesOperationRequest.java => BaseNodesRequest.java} (90%) rename src/main/java/org/elasticsearch/action/support/nodes/{NodesOperationResponse.java => BaseNodesResponse.java} (79%) rename src/main/java/org/elasticsearch/action/support/nodes/{TransportNodesOperationAction.java => TransportNodesAction.java} (87%) rename src/main/java/org/elasticsearch/action/support/single/shard/{SingleShardOperationRequest.java => SingleShardRequest.java} (89%) rename src/main/java/org/elasticsearch/action/support/single/shard/{TransportShardSingleOperationAction.java => TransportSingleShardAction.java} (96%) diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java index f373689566f..aff58a7b327 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.health; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -37,7 +37,7 @@ import static org.elasticsearch.common.unit.TimeValue.readTimeValue; /** * */ -public class ClusterHealthRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class ClusterHealthRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices; private TimeValue timeout = new TimeValue(30, TimeUnit.SECONDS); diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index d183ddd865c..c4cb95c501b 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.health; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.common.Strings; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportClusterHealthAction extends TransportMasterNodeReadOperationAction { +public class TransportClusterHealthAction extends TransportMasterNodeReadAction { private final ClusterName clusterName; private final GatewayAllocator gatewayAllocator; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java index 9485395057e..635be28a646 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; /** */ -public class NodeHotThreads extends NodeOperationResponse { +public class NodeHotThreads extends BaseNodeResponse { private String hotThreads; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequest.java index fe092d7dc81..f7ab360c59f 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; import org.elasticsearch.Version; -import org.elasticsearch.action.support.nodes.NodesOperationRequest; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; @@ -30,7 +30,7 @@ import java.util.concurrent.TimeUnit; /** */ -public class NodesHotThreadsRequest extends NodesOperationRequest { +public class NodesHotThreadsRequest extends BaseNodesRequest { int threads = 3; String type = "cpu"; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java index 2b04435eeb4..22d4795fc95 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; /** */ -public class NodesHotThreadsResponse extends NodesOperationResponse { +public class NodesHotThreadsResponse extends BaseNodesResponse { NodesHotThreadsResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index 8b509ef2d94..a73982e5c0c 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -22,8 +22,8 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.NodeOperationRequest; -import org.elasticsearch.action.support.nodes.TransportNodesOperationAction; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -41,7 +41,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesHotThreadsAction extends TransportNodesOperationAction { +public class TransportNodesHotThreadsAction extends TransportNodesAction { @Inject public TransportNodesHotThreadsAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, @@ -92,7 +92,7 @@ public class TransportNodesHotThreadsAction extends TransportNodesOperationActio return false; } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { NodesHotThreadsRequest request; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 66904933db4..f3c5eb7e1fc 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.node.info; import com.google.common.collect.ImmutableMap; import org.elasticsearch.Build; import org.elasticsearch.Version; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -42,7 +42,7 @@ import java.util.Map; /** * Node information (static, does not change over time). */ -public class NodeInfo extends NodeOperationResponse { +public class NodeInfo extends BaseNodeResponse { @Nullable private ImmutableMap serviceAttributes; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java index 589471af36d..d01167ceeca 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.info; -import org.elasticsearch.action.support.nodes.NodesOperationRequest; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; /** * A request to get node (cluster) level information. */ -public class NodesInfoRequest extends NodesOperationRequest { +public class NodesInfoRequest extends BaseNodesRequest { private boolean settings = true; private boolean os = true; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java index b7ce109484d..e872be90142 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.info; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -34,7 +34,7 @@ import java.util.Map; /** * */ -public class NodesInfoResponse extends NodesOperationResponse implements ToXContent { +public class NodesInfoResponse extends BaseNodesResponse implements ToXContent { public NodesInfoResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index 29a904fa551..74221fc79ed 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.admin.cluster.node.info; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.NodeOperationRequest; -import org.elasticsearch.action.support.nodes.TransportNodesOperationAction; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -40,7 +40,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesInfoAction extends TransportNodesOperationAction { +public class TransportNodesInfoAction extends TransportNodesAction { private final NodeService nodeService; @@ -87,7 +87,7 @@ public class TransportNodesInfoAction extends TransportNodesOperationAction { +public class NodesStatsRequest extends BaseNodesRequest { private CommonStatsFlags indices = new CommonStatsFlags(); private boolean os; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java index 27affe2b898..e57fd552d7c 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.stats; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.io.IOException; /** * */ -public class NodesStatsResponse extends NodesOperationResponse implements ToXContent { +public class NodesStatsResponse extends BaseNodesResponse implements ToXContent { NodesStatsResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 1f8dea37aae..d808f859037 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -21,8 +21,8 @@ package org.elasticsearch.action.admin.cluster.node.stats; import com.google.common.collect.Lists; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.NodeOperationRequest; -import org.elasticsearch.action.support.nodes.TransportNodesOperationAction; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -40,7 +40,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesStatsAction extends TransportNodesOperationAction { +public class TransportNodesStatsAction extends TransportNodesAction { private final NodeService nodeService; @@ -87,7 +87,7 @@ public class TransportNodesStatsAction extends TransportNodesOperationAction { +public class TransportDeleteRepositoryAction extends TransportMasterNodeAction { private final RepositoriesService repositoriesService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequest.java index 86c999982d1..4f5f99b2072 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequest.java @@ -19,9 +19,8 @@ package org.elasticsearch.action.admin.cluster.repositories.get; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +32,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Get repository request */ -public class GetRepositoriesRequest extends MasterNodeReadOperationRequest { +public class GetRepositoriesRequest extends MasterNodeReadRequest { private String[] repositories = Strings.EMPTY_ARRAY; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java index 45026f08786..5c58628dc35 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.repositories.get; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -39,7 +39,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for get repositories operation */ -public class TransportGetRepositoriesAction extends TransportMasterNodeReadOperationAction { +public class TransportGetRepositoriesAction extends TransportMasterNodeReadAction { @Inject public TransportGetRepositoriesAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java index bde461ccf8a..a395d9b9ef2 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.repositories.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for register repository operation */ -public class TransportPutRepositoryAction extends TransportMasterNodeOperationAction { +public class TransportPutRepositoryAction extends TransportMasterNodeAction { private final RepositoriesService repositoriesService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java index 978738d9f8c..ed687ae2bfd 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.repositories.verify; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for verifying repository operation */ -public class TransportVerifyRepositoryAction extends TransportMasterNodeOperationAction { +public class TransportVerifyRepositoryAction extends TransportMasterNodeAction { private final RepositoriesService repositoriesService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index 1d4f0a6185e..d9829d1e078 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.reroute; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** */ -public class TransportClusterRerouteAction extends TransportMasterNodeOperationAction { +public class TransportClusterRerouteAction extends TransportMasterNodeAction { private final AllocationService allocationService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index db872f868fd..31375a8912d 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.settings; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -49,7 +49,7 @@ import static org.elasticsearch.cluster.ClusterState.builder; /** * */ -public class TransportClusterUpdateSettingsAction extends TransportMasterNodeOperationAction { +public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAction { private final AllocationService allocationService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index de8e1fcdfab..21ecf8a4c4f 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.shards; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -32,7 +32,7 @@ import java.io.IOException; /** */ -public class ClusterSearchShardsRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class ClusterSearchShardsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices; @Nullable private String routing; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index 1575f3c61a8..10ef0348e46 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.shards; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -42,7 +42,7 @@ import static com.google.common.collect.Sets.newHashSet; /** */ -public class TransportClusterSearchShardsAction extends TransportMasterNodeReadOperationAction { +public class TransportClusterSearchShardsAction extends TransportMasterNodeReadAction { @Inject public TransportClusterSearchShardsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 9e8c87bbbae..1373eed1507 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -60,7 +60,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBo *

  • must not contain invalid file name characters {@link org.elasticsearch.common.Strings#INVALID_FILENAME_CHARS}
  • * */ -public class CreateSnapshotRequest extends MasterNodeOperationRequest implements IndicesRequest.Replaceable { +public class CreateSnapshotRequest extends MasterNodeRequest implements IndicesRequest.Replaceable { private String snapshot; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java index 533d0778925..e97633932d1 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.create; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for create snapshot operation */ -public class TransportCreateSnapshotAction extends TransportMasterNodeOperationAction { +public class TransportCreateSnapshotAction extends TransportMasterNodeAction { private final SnapshotsService snapshotsService; @Inject diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java index 5d6c5544184..d997786d5fc 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -35,7 +35,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * files that are associated with this particular snapshot. All files that are shared with * at least one other existing snapshot are left intact. */ -public class DeleteSnapshotRequest extends MasterNodeOperationRequest { +public class DeleteSnapshotRequest extends MasterNodeRequest { private String repository; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java index 5b5a31bedde..97f89c9f647 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for delete snapshot operation */ -public class TransportDeleteSnapshotAction extends TransportMasterNodeOperationAction { +public class TransportDeleteSnapshotAction extends TransportMasterNodeAction { private final SnapshotsService snapshotsService; @Inject diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java index 6b6927b5e0d..03edadd755b 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Get snapshot request */ -public class GetSnapshotsRequest extends MasterNodeOperationRequest { +public class GetSnapshotsRequest extends MasterNodeRequest { public static final String ALL_SNAPSHOTS = "_all"; public static final String CURRENT_SNAPSHOT = "_current"; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 9ead8554943..6f480219857 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -39,7 +39,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport Action for get snapshots operation */ -public class TransportGetSnapshotsAction extends TransportMasterNodeOperationAction { +public class TransportGetSnapshotsAction extends TransportMasterNodeAction { private final SnapshotsService snapshotsService; @Inject diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 63f595747a1..ddb177e273b 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -48,7 +48,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBo /** * Restore snapshot request */ -public class RestoreSnapshotRequest extends MasterNodeOperationRequest { +public class RestoreSnapshotRequest extends MasterNodeRequest { private String snapshot; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index 70ab71b6e8f..205a2d1178b 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.restore; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for restore snapshot operation */ -public class TransportRestoreSnapshotAction extends TransportMasterNodeOperationAction { +public class TransportRestoreSnapshotAction extends TransportMasterNodeAction { private final RestoreService restoreService; @Inject diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java index 878ca704345..1f358915662 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -32,7 +32,7 @@ import java.io.IOException; /** */ -public class SnapshotIndexShardStatus extends BroadcastShardOperationResponse implements ToXContent { +public class SnapshotIndexShardStatus extends BroadcastShardResponse implements ToXContent { private SnapshotIndexShardStage stage = SnapshotIndexShardStage.INIT; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequest.java index c84ad073bd8..b7b2b631b31 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Get snapshot status request */ -public class SnapshotsStatusRequest extends MasterNodeOperationRequest { +public class SnapshotsStatusRequest extends MasterNodeRequest { private String repository = "_all"; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index b23010e3ecc..22a1753660e 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -47,7 +47,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * Transport client that collects snapshot shard statuses from data nodes */ -public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction { +public class TransportNodesSnapshotsStatus extends TransportNodesAction { public static final String ACTION_NAME = SnapshotsStatusAction.NAME + "[nodes]"; @@ -128,7 +128,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction return true; } - static class Request extends NodesOperationRequest { + static class Request extends BaseNodesRequest { private SnapshotId[] snapshotIds; @@ -157,7 +157,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction } } - public static class NodesSnapshotStatus extends NodesOperationResponse { + public static class NodesSnapshotStatus extends BaseNodesResponse { private FailedNodeException[] failures; @@ -194,7 +194,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { private SnapshotId[] snapshotIds; @@ -230,7 +230,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction } } - public static class NodeSnapshotStatus extends NodeOperationResponse { + public static class NodeSnapshotStatus extends BaseNodeResponse { private ImmutableMap> status; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index edfc9d5fd32..a38e894e90f 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -49,7 +49,7 @@ import static com.google.common.collect.Sets.newHashSet; /** */ -public class TransportSnapshotsStatusAction extends TransportMasterNodeOperationAction { +public class TransportSnapshotsStatusAction extends TransportMasterNodeAction { private final SnapshotsService snapshotsService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java index 574ed0170f3..4edd26812a0 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java @@ -19,11 +19,10 @@ package org.elasticsearch.action.admin.cluster.state; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +32,7 @@ import java.io.IOException; /** * */ -public class ClusterStateRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class ClusterStateRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private boolean routingTable = true; private boolean nodes = true; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index d4d647b99b0..e989b7a5df8 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.state; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -39,7 +39,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportClusterStateAction extends TransportMasterNodeReadOperationAction { +public class TransportClusterStateAction extends TransportMasterNodeReadAction { private final ClusterName clusterName; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java index 24222da8b5a..d0f91f7e9b9 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -31,7 +31,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -public class ClusterStatsNodeResponse extends NodeOperationResponse { +public class ClusterStatsNodeResponse extends BaseNodeResponse { private NodeInfo nodeInfo; private NodeStats nodeStats; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java index 3a0c26af10c..d33f9acf296 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.stats; -import org.elasticsearch.action.support.nodes.NodesOperationRequest; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; /** * A request to get cluster level stats. */ -public class ClusterStatsRequest extends NodesOperationRequest { +public class ClusterStatsRequest extends BaseNodesRequest { ClusterStatsRequest() { } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java index 3b84e86ea8e..aebdf6c31c3 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -37,7 +37,7 @@ import java.util.Map; /** * */ -public class ClusterStatsResponse extends NodesOperationResponse implements ToXContent { +public class ClusterStatsResponse extends BaseNodesResponse implements ToXContent { ClusterStatsNodes nodesStats; ClusterStatsIndices indicesStats; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index c2254ed1294..40c1c8b9e74 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.NodeOperationRequest; -import org.elasticsearch.action.support.nodes.TransportNodesOperationAction; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -51,7 +51,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportClusterStatsAction extends TransportNodesOperationAction { private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, @@ -142,7 +142,7 @@ public class TransportClusterStatsAction extends TransportNodesOperationAction { +public class PendingClusterTasksRequest extends MasterNodeReadRequest { @Override public ActionRequestValidationException validate() { diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java index 40f634cef2c..5b7f2a881c3 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.tasks; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -33,7 +33,7 @@ import org.elasticsearch.transport.TransportService; /** */ -public class TransportPendingClusterTasksAction extends TransportMasterNodeReadOperationAction { +public class TransportPendingClusterTasksAction extends TransportMasterNodeReadAction { private final ClusterService clusterService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 2ba0c606614..4bedc3e3ee8 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -23,7 +23,7 @@ import com.google.common.collect.Sets; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -42,7 +42,7 @@ import java.util.*; /** * Add/remove aliases action */ -public class TransportIndicesAliasesAction extends TransportMasterNodeOperationAction { +public class TransportIndicesAliasesAction extends TransportMasterNodeAction { private final MetaDataIndexAliasesService indexAliasesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java b/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java index a85202a308c..29754edeb6f 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.alias.exists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -33,7 +33,7 @@ import org.elasticsearch.transport.TransportService; /** */ -public class TransportAliasesExistAction extends TransportMasterNodeReadOperationAction { +public class TransportAliasesExistAction extends TransportMasterNodeReadAction { @Inject public TransportAliasesExistAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java index e22e110c942..182b86fd149 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java @@ -18,11 +18,10 @@ */ package org.elasticsearch.action.admin.indices.alias.get; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.AliasesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,7 +30,7 @@ import java.io.IOException; /** */ -public class GetAliasesRequest extends MasterNodeReadOperationRequest implements AliasesRequest { +public class GetAliasesRequest extends MasterNodeReadRequest implements AliasesRequest { private String[] indices = Strings.EMPTY_ARRAY; private String[] aliases = Strings.EMPTY_ARRAY; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index 9c1475136a8..a1088d4fbcd 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.alias.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,7 +36,7 @@ import java.util.List; /** */ -public class TransportGetAliasesAction extends TransportMasterNodeReadOperationAction { +public class TransportGetAliasesAction extends TransportMasterNodeReadAction { @Inject public TransportGetAliasesAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequest.java index 323d8869024..cf471ab0c77 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,7 +28,7 @@ import java.io.IOException; /** * */ -public class ClearIndicesCacheRequest extends BroadcastOperationRequest { +public class ClearIndicesCacheRequest extends BroadcastRequest { private boolean filterCache = false; private boolean fieldDataCache = false; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java index a9f094892d4..cd3355cae87 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.List; * * */ -public class ClearIndicesCacheResponse extends BroadcastOperationResponse { +public class ClearIndicesCacheResponse extends BroadcastResponse { ClearIndicesCacheResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java index 46044b7862d..8aefde1eb70 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +29,7 @@ import java.io.IOException; /** * */ -class ShardClearIndicesCacheRequest extends BroadcastShardOperationRequest { +class ShardClearIndicesCacheRequest extends BroadcastShardRequest { private boolean filterCache = false; private boolean fieldDataCache = false; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java index d89bfe088f6..c2931df6003 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.indices.cache.clear; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.index.shard.ShardId; /** * */ -class ShardClearIndicesCacheResponse extends BroadcastShardOperationResponse { +class ShardClearIndicesCacheResponse extends BroadcastShardResponse { ShardClearIndicesCacheResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index 8e356f2fd84..88595c70647 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -47,7 +47,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Indices clear cache action. */ -public class TransportClearIndicesCacheAction extends TransportBroadcastOperationAction { +public class TransportClearIndicesCacheAction extends TransportBroadcastAction { private final IndicesService indicesService; private final IndicesQueryCache indicesQueryCache; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 43343e83894..26a7fa1ffdf 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.close; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** * Close index action */ -public class TransportCloseIndexAction extends TransportMasterNodeOperationAction { +public class TransportCloseIndexAction extends TransportMasterNodeAction { private final MetaDataIndexStateService indexStateService; private final DestructiveOperations destructiveOperations; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index eca8894e43d..9c671cc9505 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.create; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * Create index action. */ -public class TransportCreateIndexAction extends TransportMasterNodeOperationAction { +public class TransportCreateIndexAction extends TransportMasterNodeAction { private final MetaDataCreateIndexService createIndexService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java index c52fc57e653..3720f5fe78b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; @@ -37,7 +37,7 @@ import static org.elasticsearch.common.unit.TimeValue.readTimeValue; /** * A request to delete an index. Best created with {@link org.elasticsearch.client.Requests#deleteIndexRequest(String)}. */ -public class DeleteIndexRequest extends MasterNodeOperationRequest implements IndicesRequest.Replaceable { +public class DeleteIndexRequest extends MasterNodeRequest implements IndicesRequest.Replaceable { private String[] indices; // Delete index should work by default on both open and closed indices. diff --git a/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index 4c0de6d799b..a25b56eef47 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** * Delete index action. */ -public class TransportDeleteIndexAction extends TransportMasterNodeOperationAction { +public class TransportDeleteIndexAction extends TransportMasterNodeAction { private final MetaDataDeleteIndexService deleteIndexService; private final DestructiveOperations destructiveOperations; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsRequest.java index e104090e962..e822f45e7f6 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsRequest.java @@ -19,11 +19,10 @@ package org.elasticsearch.action.admin.indices.exists.indices; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +31,7 @@ import java.io.IOException; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class IndicesExistsRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class IndicesExistsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java index 0c360468e98..019e8c2f34b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.exists.indices; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; /** * Indices exists action. */ -public class TransportIndicesExistsAction extends TransportMasterNodeReadOperationAction { +public class TransportIndicesExistsAction extends TransportMasterNodeReadAction { @Inject public TransportIndicesExistsAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java index fb617f233d3..ef1dc16b190 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.exists.types; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -35,7 +35,7 @@ import org.elasticsearch.transport.TransportService; /** * Types exists transport action. */ -public class TransportTypesExistsAction extends TransportMasterNodeReadOperationAction { +public class TransportTypesExistsAction extends TransportMasterNodeReadAction { @Inject public TransportTypesExistsAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsRequest.java index 868f32a7a78..8eeb7422bf3 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsRequest.java @@ -18,11 +18,10 @@ */ package org.elasticsearch.action.admin.indices.exists.types; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +31,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** */ -public class TypesExistsRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class TypesExistsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices; private String[] types; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java index e34ac8cfab5..57d9455ff91 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java @@ -19,9 +19,8 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -38,7 +37,7 @@ import java.io.IOException; * @see org.elasticsearch.client.IndicesAdminClient#flush(FlushRequest) * @see FlushResponse */ -public class FlushRequest extends BroadcastOperationRequest { +public class FlushRequest extends BroadcastRequest { private boolean force = false; private boolean waitIfOngoing = false; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java index c4c52e7071c..a158b02611b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.flush; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.List; * * */ -public class FlushResponse extends BroadcastOperationResponse { +public class FlushResponse extends BroadcastResponse { FlushResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java index 55153562c15..0e38181fa61 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +29,7 @@ import java.io.IOException; /** * */ -class ShardFlushRequest extends BroadcastShardOperationRequest { +class ShardFlushRequest extends BroadcastShardRequest { private FlushRequest request = new FlushRequest(); ShardFlushRequest() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushResponse.java index 3ba91768732..6f2cc6a5522 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushResponse.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.index.shard.ShardId; /** * */ -class ShardFlushResponse extends BroadcastShardOperationResponse { +class ShardFlushResponse extends BroadcastShardResponse { ShardFlushResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index c9f637e4371..e546d6f616b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -45,7 +45,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Flush Action. */ -public class TransportFlushAction extends TransportBroadcastOperationAction { +public class TransportFlushAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 9772754d330..1f853b6b397 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.mapping.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; /** * Put mapping action. */ -public class TransportPutMappingAction extends TransportMasterNodeOperationAction { +public class TransportPutMappingAction extends TransportMasterNodeAction { private final MetaDataMappingService metaDataMappingService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index 83063492c07..1df33c56463 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.open; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** * Open index action */ -public class TransportOpenIndexAction extends TransportMasterNodeOperationAction { +public class TransportOpenIndexAction extends TransportMasterNodeAction { private final MetaDataIndexStateService indexStateService; private final DestructiveOperations destructiveOperations; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java index d5b822f58cb..3510a3b7f96 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.action.admin.indices.optimize; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -37,7 +36,7 @@ import java.io.IOException; * @see org.elasticsearch.client.IndicesAdminClient#optimize(OptimizeRequest) * @see OptimizeResponse */ -public class OptimizeRequest extends BroadcastOperationRequest { +public class OptimizeRequest extends BroadcastRequest { public static final class Defaults { public static final int MAX_NUM_SEGMENTS = -1; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeResponse.java index d4a189eb7f4..88341ef2619 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.optimize; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.List; * * */ -public class OptimizeResponse extends BroadcastOperationResponse { +public class OptimizeResponse extends BroadcastResponse { OptimizeResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java index 9113581038b..05aeabe21a6 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java @@ -20,8 +20,7 @@ package org.elasticsearch.action.admin.indices.optimize; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -31,7 +30,7 @@ import java.io.IOException; /** * */ -final class ShardOptimizeRequest extends BroadcastShardOperationRequest { +final class ShardOptimizeRequest extends BroadcastShardRequest { private OptimizeRequest request = new OptimizeRequest(); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java index 61adb62f1d6..1c9dc4482d9 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.indices.optimize; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.index.shard.ShardId; /** * */ -class ShardOptimizeResponse extends BroadcastShardOperationResponse { +class ShardOptimizeResponse extends BroadcastShardResponse { ShardOptimizeResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java index c4f276126fa..17a18bae971 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -45,7 +45,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Optimize index/indices action. */ -public class TransportOptimizeAction extends TransportBroadcastOperationAction { +public class TransportOptimizeAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequest.java index f2a97ef5fb5..8878713765b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequest.java @@ -19,17 +19,17 @@ package org.elasticsearch.action.admin.indices.recovery; -import java.io.IOException; - -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import java.io.IOException; + /** * Request for recovery information */ -public class RecoveryRequest extends BroadcastOperationRequest { +public class RecoveryRequest extends BroadcastRequest { private boolean detailed = false; // Provides extra details in the response private boolean activeOnly = false; // Only reports on active recoveries diff --git a/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java index 9fd15cd371e..fea33688c14 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.recovery; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -35,7 +35,7 @@ import java.util.Map; /** * Information regarding the recovery state of indices and their associated shards. */ -public class RecoveryResponse extends BroadcastOperationResponse implements ToXContent { +public class RecoveryResponse extends BroadcastResponse implements ToXContent { private boolean detailed = false; private Map> shardResponses = new HashMap<>(); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java index 2e12de4f39f..a4104fbc449 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.recovery; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +33,7 @@ import java.io.IOException; /** * Information regarding the recovery state of a shard. */ -public class ShardRecoveryResponse extends BroadcastShardOperationResponse implements ToXContent { +public class ShardRecoveryResponse extends BroadcastShardResponse implements ToXContent { RecoveryState recoveryState; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java b/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java index 2996247963f..2483efbc498 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -51,7 +51,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; * Transport action for shard recovery operation. This transport action does not actually * perform shard recovery, it only reports on recoveries (both active and complete). */ -public class TransportRecoveryAction extends TransportBroadcastOperationAction { +public class TransportRecoveryAction extends TransportBroadcastAction { private final IndicesService indicesService; @@ -149,7 +149,7 @@ public class TransportRecoveryAction extends TransportBroadcastOperationAction { +public class RefreshRequest extends BroadcastRequest { RefreshRequest() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java index 3130b0713da..28295fdd0a0 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.List; * * */ -public class RefreshResponse extends BroadcastOperationResponse { +public class RefreshResponse extends BroadcastResponse { RefreshResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshRequest.java index da3c5fea9eb..37ea2cc46de 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshRequest.java @@ -19,17 +19,13 @@ package org.elasticsearch.action.admin.indices.refresh; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.index.shard.ShardId; -import java.io.IOException; - /** * */ -class ShardRefreshRequest extends BroadcastShardOperationRequest { +class ShardRefreshRequest extends BroadcastShardRequest { ShardRefreshRequest() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshResponse.java index c2ab17890eb..4de0f5877dd 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshResponse.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.indices.refresh; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.index.shard.ShardId; /** * */ -class ShardRefreshResponse extends BroadcastShardOperationResponse { +class ShardRefreshResponse extends BroadcastShardResponse { ShardRefreshResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index 001793ecd17..e2fe442f951 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -45,7 +45,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Refresh action. */ -public class TransportRefreshAction extends TransportBroadcastOperationAction { +public class TransportRefreshAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java index 42cdc51ed32..2e8e3ac0cf8 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java @@ -19,14 +19,14 @@ package org.elasticsearch.action.admin.indices.seal; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import java.util.Arrays; /** * A request to seal one or more indices. */ -public class SealIndicesRequest extends BroadcastOperationRequest { +public class SealIndicesRequest extends BroadcastRequest { SealIndicesRequest() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java index 983e5350a5d..6b0dc8697f3 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -24,7 +24,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.lucene.util.Accountable; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -36,12 +36,11 @@ import org.elasticsearch.index.engine.Segment; import java.io.IOException; import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; -public class IndicesSegmentResponse extends BroadcastOperationResponse implements ToXContent { +public class IndicesSegmentResponse extends BroadcastResponse implements ToXContent { private ShardSegments[] shards; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java index cefc2ebc3bf..570fa89e026 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java @@ -19,15 +19,14 @@ package org.elasticsearch.action.admin.indices.segments; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -public class IndicesSegmentsRequest extends BroadcastOperationRequest { +public class IndicesSegmentsRequest extends BroadcastRequest { protected boolean verbose = false; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java b/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java index 09d20271776..d33df00fc8c 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.segments; import com.google.common.collect.ImmutableList; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +33,7 @@ import java.util.List; import static org.elasticsearch.cluster.routing.ImmutableShardRouting.readShardRoutingEntry; -public class ShardSegments extends BroadcastShardOperationResponse implements Iterable { +public class ShardSegments extends BroadcastShardResponse implements Iterable { private ShardRouting shardRouting; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index fa9639424b4..f043d8ebdb0 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,8 +36,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -51,7 +51,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * */ -public class TransportIndicesSegmentsAction extends TransportBroadcastOperationAction { +public class TransportIndicesSegmentsAction extends TransportBroadcastAction { private final IndicesService indicesService; @@ -122,7 +122,7 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastOperationA return new ShardSegments(indexShard.routingEntry(), indexShard.engine().segments(request.verbose)); } - static class IndexShardSegmentRequest extends BroadcastShardOperationRequest { + static class IndexShardSegmentRequest extends BroadcastShardRequest { boolean verbose; IndexShardSegmentRequest() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java index 2f01e28f969..daf011ece62 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java @@ -19,12 +19,11 @@ package org.elasticsearch.action.admin.indices.settings.get; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +32,7 @@ import java.io.IOException; /** */ -public class GetSettingsRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class GetSettingsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java index c3a8948d1bb..f48b2c4853f 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.settings.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -40,7 +40,7 @@ import java.util.Map; /** */ -public class TransportGetSettingsAction extends TransportMasterNodeReadOperationAction { +public class TransportGetSettingsAction extends TransportMasterNodeReadAction { private final SettingsFilter settingsFilter; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index 8185badb5af..1278b97934b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.settings.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportUpdateSettingsAction extends TransportMasterNodeOperationAction { +public class TransportUpdateSettingsAction extends TransportMasterNodeAction { private final MetaDataUpdateSettingsService updateSettingsService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java index b8a94ab4d51..a99c0f52def 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.stats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -34,7 +34,7 @@ import java.io.IOException; *

    All the stats to be returned can be cleared using {@link #clear()}, at which point, specific * stats can be enabled. */ -public class IndicesStatsRequest extends BroadcastOperationRequest { +public class IndicesStatsRequest extends BroadcastRequest { private CommonStatsFlags flags = new CommonStatsFlags(); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index f2f10c48f0a..2d9bf1e78f9 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -24,7 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.io.stream.StreamInput; @@ -41,7 +41,7 @@ import java.util.Set; /** */ -public class IndicesStatsResponse extends BroadcastOperationResponse implements ToXContent { +public class IndicesStatsResponse extends BroadcastResponse implements ToXContent { private ShardStats[] shards; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index 951c4b95223..fbba68cbee9 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -36,7 +36,7 @@ import static org.elasticsearch.cluster.routing.ImmutableShardRouting.readShardR /** */ -public class ShardStats extends BroadcastShardOperationResponse implements ToXContent { +public class ShardStats extends BroadcastShardResponse implements ToXContent { private ShardRouting shardRouting; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 75191bb903f..b4b0d6a4435 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -24,8 +24,8 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -52,7 +52,7 @@ import static com.google.common.collect.Lists.newArrayList; /** */ -public class TransportIndicesStatsAction extends TransportBroadcastOperationAction { +public class TransportIndicesStatsAction extends TransportBroadcastAction { private final IndicesService indicesService; @@ -190,7 +190,7 @@ public class TransportIndicesStatsAction extends TransportBroadcastOperationActi return new ShardStats(indexShard, indexShard.routingEntry(), flags); } - static class IndexShardStatsRequest extends BroadcastShardOperationRequest { + static class IndexShardStatsRequest extends BroadcastShardRequest { // TODO if there are many indices, the request might hold a large indices array..., we don't really need to serialize it IndicesStatsRequest request; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java index 08801434868..42907787803 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.template.delete; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,7 +30,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to delete an index template. */ -public class DeleteIndexTemplateRequest extends MasterNodeOperationRequest { +public class DeleteIndexTemplateRequest extends MasterNodeRequest { private String name; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java b/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java index c1a063e6662..d66b0bb0a96 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.template.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -34,7 +34,7 @@ import org.elasticsearch.transport.TransportService; /** * Delete index action. */ -public class TransportDeleteIndexTemplateAction extends TransportMasterNodeOperationAction { +public class TransportDeleteIndexTemplateAction extends TransportMasterNodeAction { private final MetaDataIndexTemplateService indexTemplateService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java index 949944fa61e..aeefc63bfa0 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java @@ -18,9 +18,8 @@ */ package org.elasticsearch.action.admin.indices.template.get; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +31,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Request that allows to retrieve index templates */ -public class GetIndexTemplatesRequest extends MasterNodeReadOperationRequest { +public class GetIndexTemplatesRequest extends MasterNodeReadRequest { private String[] names; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java b/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java index c2600112542..039ca1a726f 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java @@ -22,7 +22,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.collect.Lists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -39,7 +39,7 @@ import java.util.List; /** * */ -public class TransportGetIndexTemplatesAction extends TransportMasterNodeReadOperationAction { +public class TransportGetIndexTemplatesAction extends TransportMasterNodeReadAction { @Inject public TransportGetIndexTemplatesAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 6e21ba4753f..a4b10cb7783 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -50,7 +50,7 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; /** * A request to create an index template. */ -public class PutIndexTemplateRequest extends MasterNodeOperationRequest implements IndicesRequest { +public class PutIndexTemplateRequest extends MasterNodeRequest implements IndicesRequest { private String name; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java b/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java index 5c4979e6253..492dbf352c7 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.template.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -34,7 +34,7 @@ import org.elasticsearch.transport.TransportService; /** * Put index template action. */ -public class TransportPutIndexTemplateAction extends TransportMasterNodeOperationAction { +public class TransportPutIndexTemplateAction extends TransportMasterNodeAction { private final MetaDataIndexTemplateService indexTemplateService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java index 7b771f32091..648ab21afd6 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.validate.query; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -32,7 +32,7 @@ import java.io.IOException; /** * Internal validate request executed directly against a specific index shard. */ -class ShardValidateQueryRequest extends BroadcastShardOperationRequest { +class ShardValidateQueryRequest extends BroadcastShardRequest { private BytesReference source; private String[] types = Strings.EMPTY_ARRAY; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryResponse.java index d4e75578f1f..43d3ad82305 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.validate.query; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -31,7 +31,7 @@ import java.io.IOException; * * */ -class ShardValidateQueryResponse extends BroadcastShardOperationResponse { +class ShardValidateQueryResponse extends BroadcastShardResponse { private boolean valid; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index baa4949d29d..5d8e98beac1 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -63,7 +63,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * */ -public class TransportValidateQueryAction extends TransportBroadcastOperationAction { +public class TransportValidateQueryAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index d5612235995..3499852c515 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -44,7 +44,7 @@ import java.util.Map; *

    The request requires the query source to be set either using {@link #source(QuerySourceBuilder)}, * or {@link #source(byte[])}. */ -public class ValidateQueryRequest extends BroadcastOperationRequest { +public class ValidateQueryRequest extends BroadcastRequest { private BytesReference source; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java index 6c0a065d3d2..3d1ef78d2bf 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.validate.query; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -36,7 +36,7 @@ import static org.elasticsearch.action.admin.indices.validate.query.QueryExplana * * */ -public class ValidateQueryResponse extends BroadcastOperationResponse { +public class ValidateQueryResponse extends BroadcastResponse { private boolean valid; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java b/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java index bcf3bad07fc..9e135e9a715 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.warmer.delete; import com.google.common.collect.Lists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -47,7 +47,7 @@ import java.util.List; * * Note: this is an internal API and should not be used / called by any client code. */ -public class TransportDeleteWarmerAction extends TransportMasterNodeOperationAction { +public class TransportDeleteWarmerAction extends TransportMasterNodeAction { @Inject public TransportDeleteWarmerAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java b/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java index e92eb3195d7..0b11e0bcf5d 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -51,7 +51,7 @@ import java.util.List; * * Note: this is an internal API and should not be used / called by any client code. */ -public class TransportPutWarmerAction extends TransportMasterNodeOperationAction { +public class TransportPutWarmerAction extends TransportMasterNodeAction { private final TransportSearchAction searchAction; diff --git a/src/main/java/org/elasticsearch/action/count/CountRequest.java b/src/main/java/org/elasticsearch/action/count/CountRequest.java index dfee07b2ee9..1d35af42e53 100644 --- a/src/main/java/org/elasticsearch/action/count/CountRequest.java +++ b/src/main/java/org/elasticsearch/action/count/CountRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.count; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -52,7 +52,7 @@ import static org.elasticsearch.search.internal.SearchContext.DEFAULT_TERMINATE_ * @see org.elasticsearch.client.Client#count(CountRequest) * @see org.elasticsearch.client.Requests#countRequest(String...) */ -public class CountRequest extends BroadcastOperationRequest { +public class CountRequest extends BroadcastRequest { public static final float DEFAULT_MIN_SCORE = -1f; diff --git a/src/main/java/org/elasticsearch/action/count/CountResponse.java b/src/main/java/org/elasticsearch/action/count/CountResponse.java index 91fd77e596b..916c4ef9373 100644 --- a/src/main/java/org/elasticsearch/action/count/CountResponse.java +++ b/src/main/java/org/elasticsearch/action/count/CountResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.count; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; @@ -31,7 +31,7 @@ import java.util.Arrays; /** * The response of the count action. */ -public class CountResponse extends BroadcastOperationResponse { +public class CountResponse extends BroadcastResponse { private final boolean terminatedEarly; private final long count; diff --git a/src/main/java/org/elasticsearch/action/exists/ExistsRequest.java b/src/main/java/org/elasticsearch/action/exists/ExistsRequest.java index 84c5d32aaf5..32ff0b1e014 100644 --- a/src/main/java/org/elasticsearch/action/exists/ExistsRequest.java +++ b/src/main/java/org/elasticsearch/action/exists/ExistsRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.exists; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -38,7 +38,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Map; -public class ExistsRequest extends BroadcastOperationRequest { +public class ExistsRequest extends BroadcastRequest { public static final float DEFAULT_MIN_SCORE = -1f; private float minScore = DEFAULT_MIN_SCORE; diff --git a/src/main/java/org/elasticsearch/action/exists/ExistsResponse.java b/src/main/java/org/elasticsearch/action/exists/ExistsResponse.java index 6b1c58990dd..f271dc65f7f 100644 --- a/src/main/java/org/elasticsearch/action/exists/ExistsResponse.java +++ b/src/main/java/org/elasticsearch/action/exists/ExistsResponse.java @@ -20,14 +20,14 @@ package org.elasticsearch.action.exists; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; import java.util.List; -public class ExistsResponse extends BroadcastOperationResponse { +public class ExistsResponse extends BroadcastResponse { private boolean exists = false; diff --git a/src/main/java/org/elasticsearch/action/exists/ShardExistsRequest.java b/src/main/java/org/elasticsearch/action/exists/ShardExistsRequest.java index a8f8bff91fa..276e6ea117e 100644 --- a/src/main/java/org/elasticsearch/action/exists/ShardExistsRequest.java +++ b/src/main/java/org/elasticsearch/action/exists/ShardExistsRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.exists; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -29,7 +29,7 @@ import org.elasticsearch.index.shard.ShardId; import java.io.IOException; -class ShardExistsRequest extends BroadcastShardOperationRequest { +class ShardExistsRequest extends BroadcastShardRequest { private float minScore; diff --git a/src/main/java/org/elasticsearch/action/exists/ShardExistsResponse.java b/src/main/java/org/elasticsearch/action/exists/ShardExistsResponse.java index e94330c042f..25f813ee424 100644 --- a/src/main/java/org/elasticsearch/action/exists/ShardExistsResponse.java +++ b/src/main/java/org/elasticsearch/action/exists/ShardExistsResponse.java @@ -19,14 +19,14 @@ package org.elasticsearch.action.exists; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; -class ShardExistsResponse extends BroadcastShardOperationResponse { +class ShardExistsResponse extends BroadcastShardResponse { private boolean exists; diff --git a/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java b/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java index cf4e41ce965..f9118e8c05a 100644 --- a/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java +++ b/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -39,8 +39,8 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.script.ScriptService; @@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.action.exists.ExistsRequest.DEFAULT_MIN_SCORE; -public class TransportExistsAction extends TransportBroadcastOperationAction { +public class TransportExistsAction extends TransportBroadcastAction { private final IndicesService indicesService; private final ScriptService scriptService; diff --git a/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java b/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java index f1b7b571833..8a8eaee36cf 100644 --- a/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java +++ b/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.explain; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -35,7 +35,7 @@ import java.io.IOException; /** * Explain request encapsulating the explain query and document identifier to get an explanation for. */ -public class ExplainRequest extends SingleShardOperationRequest { +public class ExplainRequest extends SingleShardRequest { private String type = "_all"; private String id; diff --git a/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index c2ca4c7558a..8cfba3dd1ce 100644 --- a/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -33,13 +33,13 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.DefaultSearchContext; @@ -56,7 +56,7 @@ import java.io.IOException; * Explain transport action. Computes the explain on the targeted shard. */ // TODO: AggregatedDfs. Currently the idf can be different then when executing a normal search with explain. -public class TransportExplainAction extends TransportShardSingleOperationAction { +public class TransportExplainAction extends TransportSingleShardAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java index ff61fe88ee9..e157865ecdf 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.fieldstats; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,7 +29,7 @@ import java.io.IOException; /** */ -public class FieldStatsRequest extends BroadcastOperationRequest { +public class FieldStatsRequest extends BroadcastRequest { public final static String DEFAULT_LEVEL = "cluster"; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsResponse.java b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsResponse.java index e6f69e9791a..a8f66ca56ea 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsResponse.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.fieldstats; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.Map; /** */ -public class FieldStatsResponse extends BroadcastOperationResponse { +public class FieldStatsResponse extends BroadcastResponse { private Map> indicesMergedFieldStats; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardRequest.java b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardRequest.java index fb46ff66d3b..0ce83d99296 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardRequest.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.fieldstats; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -28,7 +28,7 @@ import java.io.IOException; /** */ -public class FieldStatsShardRequest extends BroadcastShardOperationRequest { +public class FieldStatsShardRequest extends BroadcastShardRequest { private String[] fields; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardResponse.java b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardResponse.java index ada4552e94c..c1094ce4d3e 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardResponse.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.fieldstats; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +30,7 @@ import java.util.Map; /** */ -public class FieldStatsShardResponse extends BroadcastShardOperationResponse { +public class FieldStatsShardResponse extends BroadcastShardResponse { private Map fieldStats; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java b/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java index 12a6f41e13d..43e78ec5b87 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -47,10 +47,13 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicReferenceArray; -public class TransportFieldStatsTransportAction extends TransportBroadcastOperationAction { +public class TransportFieldStatsTransportAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/get/GetRequest.java b/src/main/java/org/elasticsearch/action/get/GetRequest.java index 6a8497ace71..1d2769017bc 100644 --- a/src/main/java/org/elasticsearch/action/get/GetRequest.java +++ b/src/main/java/org/elasticsearch/action/get/GetRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.get; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -43,7 +43,7 @@ import java.io.IOException; * @see org.elasticsearch.client.Requests#getRequest(String) * @see org.elasticsearch.client.Client#get(GetRequest) */ -public class GetRequest extends SingleShardOperationRequest { +public class GetRequest extends SingleShardRequest { private String type; private String id; diff --git a/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java b/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java index eb3e25c1330..8a6d552807b 100644 --- a/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java +++ b/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java @@ -20,19 +20,15 @@ package org.elasticsearch.action.get; import com.carrotsearch.hppc.IntArrayList; -import com.carrotsearch.hppc.LongArrayList; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.VersionType; -import org.elasticsearch.search.fetch.source.FetchSourceContext; import java.io.IOException; import java.util.ArrayList; import java.util.List; -public class MultiGetShardRequest extends SingleShardOperationRequest { +public class MultiGetShardRequest extends SingleShardRequest { private int shardId; private String preference; diff --git a/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 2324835b9dc..08774a2b421 100644 --- a/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.get; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -40,7 +40,7 @@ import org.elasticsearch.transport.TransportService; /** * Performs the get operation. */ -public class TransportGetAction extends TransportShardSingleOperationAction { +public class TransportGetAction extends TransportSingleShardAction { private final IndicesService indicesService; private final boolean realtime; diff --git a/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index fb6bac8cdc8..fb1b751a9eb 100644 --- a/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -23,21 +23,21 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class TransportShardMultiGetAction extends TransportShardSingleOperationAction { +public class TransportShardMultiGetAction extends TransportSingleShardAction { private static final String ACTION_NAME = MultiGetAction.NAME + "[shard]"; diff --git a/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java b/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java index f68745e0adf..9c9a3859585 100644 --- a/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java +++ b/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -43,7 +43,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to execute a percolate operation. */ -public class PercolateRequest extends BroadcastOperationRequest implements CompositeIndicesRequest { +public class PercolateRequest extends BroadcastRequest implements CompositeIndicesRequest { private String documentType; private String routing; @@ -55,7 +55,7 @@ public class PercolateRequest extends BroadcastOperationRequest, ToXContent { +public class PercolateResponse extends BroadcastResponse implements Iterable, ToXContent { public static final Match[] EMPTY = new Match[0]; diff --git a/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java b/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java index 8364eb1610d..f0b1a96e1d5 100644 --- a/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java +++ b/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java @@ -19,9 +19,8 @@ package org.elasticsearch.action.percolate; -import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,7 +30,7 @@ import java.io.IOException; /** */ -public class PercolateShardRequest extends BroadcastShardOperationRequest { +public class PercolateShardRequest extends BroadcastShardRequest { private String documentType; private BytesReference source; diff --git a/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java b/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java index b731208def7..c626cda581e 100644 --- a/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java +++ b/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java @@ -19,9 +19,8 @@ package org.elasticsearch.action.percolate; import com.google.common.collect.ImmutableList; - import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -42,7 +41,7 @@ import java.util.Map; /** */ -public class PercolateShardResponse extends BroadcastShardOperationResponse { +public class PercolateShardResponse extends BroadcastShardResponse { private static final BytesRef[] EMPTY_MATCHES = new BytesRef[0]; private static final float[] EMPTY_SCORES = new float[0]; diff --git a/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java index d1ee7be3b19..622b541a0ea 100644 --- a/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java +++ b/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java @@ -26,7 +26,7 @@ import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -52,7 +52,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * */ -public class TransportPercolateAction extends TransportBroadcastOperationAction { +public class TransportPercolateAction extends TransportBroadcastAction { private final PercolatorService percolatorService; private final TransportGetAction getAction; diff --git a/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java b/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java index ce38859174f..adca1883470 100644 --- a/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java +++ b/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; @@ -49,7 +49,7 @@ import java.util.List; /** */ -public class TransportShardMultiPercolateAction extends TransportShardSingleOperationAction { +public class TransportShardMultiPercolateAction extends TransportSingleShardAction { private final PercolatorService percolatorService; @@ -108,7 +108,7 @@ public class TransportShardMultiPercolateAction extends TransportShardSingleOper } - public static class Request extends SingleShardOperationRequest implements IndicesRequest { + public static class Request extends SingleShardRequest implements IndicesRequest { private int shardId; private String preference; diff --git a/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java b/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java index d4c48e5034f..794dd9badf7 100644 --- a/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java +++ b/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.suggest; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,7 +30,7 @@ import java.io.IOException; /** * Internal suggest request executed directly against a specific index shard. */ -final class ShardSuggestRequest extends BroadcastShardOperationRequest { +final class ShardSuggestRequest extends BroadcastShardRequest { private BytesReference suggestSource; diff --git a/src/main/java/org/elasticsearch/action/suggest/ShardSuggestResponse.java b/src/main/java/org/elasticsearch/action/suggest/ShardSuggestResponse.java index f6495c60163..bca29800bd1 100644 --- a/src/main/java/org/elasticsearch/action/suggest/ShardSuggestResponse.java +++ b/src/main/java/org/elasticsearch/action/suggest/ShardSuggestResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.suggest; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +30,7 @@ import java.io.IOException; /** * Internal suggest response of a shard suggest request executed directly against a specific shard. */ -class ShardSuggestResponse extends BroadcastShardOperationResponse { +class ShardSuggestResponse extends BroadcastShardResponse { private final Suggest suggest; diff --git a/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java b/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java index fdb5d7a0117..c75e262bac4 100644 --- a/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java +++ b/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.suggest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -48,7 +48,7 @@ import java.util.Arrays; * @see org.elasticsearch.client.Requests#suggestRequest(String...) * @see org.elasticsearch.search.suggest.SuggestBuilders */ -public final class SuggestRequest extends BroadcastOperationRequest { +public final class SuggestRequest extends BroadcastRequest { @Nullable private String routing; diff --git a/src/main/java/org/elasticsearch/action/suggest/SuggestResponse.java b/src/main/java/org/elasticsearch/action/suggest/SuggestResponse.java index 24a8922c5f9..445e804b5b5 100644 --- a/src/main/java/org/elasticsearch/action/suggest/SuggestResponse.java +++ b/src/main/java/org/elasticsearch/action/suggest/SuggestResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.suggest; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,7 +35,7 @@ import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; /** * The response of the suggest action. */ -public final class SuggestResponse extends BroadcastOperationResponse { +public final class SuggestResponse extends BroadcastResponse { private final Suggest suggest; diff --git a/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 9874783ae79..ac046e6552d 100644 --- a/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,8 +36,8 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.suggest.stats.ShardSuggestService; import org.elasticsearch.indices.IndicesService; @@ -58,7 +58,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Defines the transport of a suggestion request across the cluster */ -public class TransportSuggestAction extends TransportBroadcastOperationAction { +public class TransportSuggestAction extends TransportBroadcastAction { private final IndicesService indicesService; private final SuggestPhase suggestPhase; diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java index 50fb7b097f2..6170d967002 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.client.ElasticsearchClient; /** */ -public abstract class BroadcastOperationRequestBuilder, Response extends BroadcastOperationResponse, RequestBuilder extends BroadcastOperationRequestBuilder> +public abstract class BroadcastOperationRequestBuilder, Response extends BroadcastResponse, RequestBuilder extends BroadcastOperationRequestBuilder> extends ActionRequestBuilder { protected BroadcastOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequest.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java similarity index 88% rename from src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java index 80eeacedbba..19adbdce01a 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java @@ -31,20 +31,20 @@ import java.io.IOException; /** * */ -public abstract class BroadcastOperationRequest extends ActionRequest implements IndicesRequest.Replaceable { +public abstract class BroadcastRequest extends ActionRequest implements IndicesRequest.Replaceable { protected String[] indices; private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); - protected BroadcastOperationRequest() { + protected BroadcastRequest() { } - protected BroadcastOperationRequest(ActionRequest originalRequest) { + protected BroadcastRequest(ActionRequest originalRequest) { super(originalRequest); } - protected BroadcastOperationRequest(String[] indices) { + protected BroadcastRequest(String[] indices) { this.indices = indices; } diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastResponse.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java rename to src/main/java/org/elasticsearch/action/support/broadcast/BroadcastResponse.java index e8e2a2aa0ce..560c7ec9869 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastResponse.java @@ -32,17 +32,17 @@ import static org.elasticsearch.action.support.DefaultShardOperationFailedExcept /** * Base class for all broadcast operation based responses. */ -public abstract class BroadcastOperationResponse extends ActionResponse { +public abstract class BroadcastResponse extends ActionResponse { private static final ShardOperationFailedException[] EMPTY = new ShardOperationFailedException[0]; private int totalShards; private int successfulShards; private int failedShards; private ShardOperationFailedException[] shardFailures = EMPTY; - protected BroadcastOperationResponse() { + protected BroadcastResponse() { } - protected BroadcastOperationResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { + protected BroadcastResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { this.totalShards = totalShards; this.successfulShards = successfulShards; this.failedShards = failedShards; @@ -73,7 +73,7 @@ public abstract class BroadcastOperationResponse extends ActionResponse { /** * The list of shard failures exception. */ - public ShardOperationFailedException[] getShardFailures() { + public ShardOperationFailedException[] getShardFailures() { return shardFailures; } diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationRequest.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java similarity index 86% rename from src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java index f6c917fbf9b..e416cd517e1 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java @@ -32,22 +32,22 @@ import java.io.IOException; /** * */ -public abstract class BroadcastShardOperationRequest extends TransportRequest implements IndicesRequest { +public abstract class BroadcastShardRequest extends TransportRequest implements IndicesRequest { private ShardId shardId; protected OriginalIndices originalIndices; - protected BroadcastShardOperationRequest() { + protected BroadcastShardRequest() { } - protected BroadcastShardOperationRequest(ShardId shardId, BroadcastOperationRequest request) { + protected BroadcastShardRequest(ShardId shardId, BroadcastRequest request) { super(request); this.shardId = shardId; this.originalIndices = new OriginalIndices(request); } - protected BroadcastShardOperationRequest(ShardId shardId, OriginalIndices originalIndices) { + protected BroadcastShardRequest(ShardId shardId, OriginalIndices originalIndices) { this.shardId = shardId; this.originalIndices = originalIndices; } diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationResponse.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java similarity index 89% rename from src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationResponse.java rename to src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java index cc0c64b532d..bf7d271bb6f 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java @@ -29,15 +29,15 @@ import java.io.IOException; /** * */ -public abstract class BroadcastShardOperationResponse extends TransportResponse { +public abstract class BroadcastShardResponse extends TransportResponse { ShardId shardId; - protected BroadcastShardOperationResponse() { + protected BroadcastShardResponse() { } - protected BroadcastShardOperationResponse(ShardId shardId) { + protected BroadcastShardResponse(ShardId shardId) { this.shardId = shardId; } diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java b/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java similarity index 95% rename from src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java rename to src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 6ff55467f7d..c77f3ec766b 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -43,7 +43,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public abstract class TransportBroadcastOperationAction +public abstract class TransportBroadcastAction extends HandledTransportAction { protected final ThreadPool threadPool; @@ -52,8 +52,8 @@ public abstract class TransportBroadcastOperationAction request, Class shardRequest, String shardExecutor) { + protected TransportBroadcastAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, + Class request, Class shardRequest, String shardExecutor) { super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; diff --git a/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index ea3a1e43bff..870b1077456 100644 --- a/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -33,7 +33,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; * Abstract class that allows to mark action requests that support acknowledgements. * Facilitates consistency across different api. */ -public abstract class AcknowledgedRequest extends MasterNodeOperationRequest implements AckedRequest { +public abstract class AcknowledgedRequest extends MasterNodeRequest implements AckedRequest { public static final TimeValue DEFAULT_ACK_TIMEOUT = timeValueSeconds(30); diff --git a/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java b/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java index 5d7af81927b..0b3b5af36d2 100644 --- a/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java @@ -22,15 +22,13 @@ package org.elasticsearch.action.support.master; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.unit.TimeValue; /** * Base request builder for master node operations */ -public abstract class MasterNodeOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeOperationRequestBuilder> +public abstract class MasterNodeOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeOperationRequestBuilder> extends ActionRequestBuilder { protected MasterNodeOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java b/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java index 02c83298c25..7955abfbe96 100644 --- a/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java @@ -21,14 +21,12 @@ package org.elasticsearch.action.support.master; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.client.IndicesAdminClient; /** * Base request builder for master node read operations that can be executed on the local node as well */ -public abstract class MasterNodeReadOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeReadOperationRequestBuilder> +public abstract class MasterNodeReadOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeReadOperationRequestBuilder> extends MasterNodeOperationRequestBuilder { protected MasterNodeReadOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequest.java b/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java index df47cae3f1a..b190a6e93c4 100644 --- a/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.support.master; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +27,7 @@ import java.io.IOException; /** * Base request for master based read operations that allows to read the cluster state from the local node if needed */ -public abstract class MasterNodeReadOperationRequest extends MasterNodeOperationRequest { +public abstract class MasterNodeReadRequest extends MasterNodeRequest { protected boolean local = false; diff --git a/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequest.java b/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index e1e90e932ff..d3621cac58c 100644 --- a/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -29,17 +29,17 @@ import java.io.IOException; /** * A based request for master based operation. */ -public abstract class MasterNodeOperationRequest extends ActionRequest { +public abstract class MasterNodeRequest extends ActionRequest { public static final TimeValue DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); protected TimeValue masterNodeTimeout = DEFAULT_MASTER_NODE_TIMEOUT; - protected MasterNodeOperationRequest() { + protected MasterNodeRequest() { } - protected MasterNodeOperationRequest(ActionRequest request) { + protected MasterNodeRequest(ActionRequest request) { super(request); } diff --git a/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java b/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java similarity index 94% rename from src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java rename to src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 9e1c662cf60..f8c0c07da58 100644 --- a/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java +++ b/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -36,20 +36,23 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportService; /** * A base class for operations that needs to be performed on the master node. */ -public abstract class TransportMasterNodeOperationAction extends HandledTransportAction { +public abstract class TransportMasterNodeAction extends HandledTransportAction { protected final TransportService transportService; protected final ClusterService clusterService; final String executor; - protected TransportMasterNodeOperationAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - Class request) { + protected TransportMasterNodeAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, + Class request) { super(settings, actionName, threadPool, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; diff --git a/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadOperationAction.java b/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java similarity index 80% rename from src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadOperationAction.java rename to src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java index 383de7ceb53..c33b9fde774 100644 --- a/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadOperationAction.java +++ b/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java @@ -30,13 +30,13 @@ import org.elasticsearch.transport.TransportService; * A base class for read operations that needs to be performed on the master node. * Can also be executed on the local node if needed. */ -public abstract class TransportMasterNodeReadOperationAction extends TransportMasterNodeOperationAction { +public abstract class TransportMasterNodeReadAction extends TransportMasterNodeAction { public static final String FORCE_LOCAL_SETTING = "action.master.force_local"; private Boolean forceLocal; - protected TransportMasterNodeReadOperationAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Class request) { + protected TransportMasterNodeReadAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Class request) { super(settings, actionName, transportService, clusterService, threadPool, actionFilters,request); this.forceLocal = settings.getAsBoolean(FORCE_LOCAL_SETTING, null); } diff --git a/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java b/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java index 5f0d34675b4..fbd095bb874 100644 --- a/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java +++ b/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.support.master.info; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,7 +30,7 @@ import java.io.IOException; /** */ -public abstract class ClusterInfoRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public abstract class ClusterInfoRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices = Strings.EMPTY_ARRAY; private String[] types = Strings.EMPTY_ARRAY; diff --git a/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java b/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java index fdf6352ef18..d1bdb86e1bb 100644 --- a/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java +++ b/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.support.master.info; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; @@ -30,7 +30,7 @@ import org.elasticsearch.transport.TransportService; /** */ -public abstract class TransportClusterInfoAction extends TransportMasterNodeReadOperationAction { +public abstract class TransportClusterInfoAction extends TransportMasterNodeReadAction { public TransportClusterInfoAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Class request) { super(settings, actionName, transportService, clusterService, threadPool, actionFilters, request); diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationRequest.java b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java similarity index 88% rename from src/main/java/org/elasticsearch/action/support/nodes/NodeOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java index 4d8a42619ad..e25577e2f70 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java @@ -28,15 +28,15 @@ import java.io.IOException; /** * */ -public abstract class NodeOperationRequest extends TransportRequest { +public abstract class BaseNodeRequest extends TransportRequest { private String nodeId; - protected NodeOperationRequest() { + protected BaseNodeRequest() { } - protected NodeOperationRequest(NodesOperationRequest request, String nodeId) { + protected BaseNodeRequest(BaseNodesRequest request, String nodeId) { super(request); this.nodeId = nodeId; } diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeResponse.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java rename to src/main/java/org/elasticsearch/action/support/nodes/BaseNodeResponse.java index 3415a07c8a2..33e4596e9b2 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeResponse.java @@ -29,14 +29,14 @@ import java.io.IOException; /** * A base class for node level operations. */ -public abstract class NodeOperationResponse extends TransportResponse { +public abstract class BaseNodeResponse extends TransportResponse { private DiscoveryNode node; - protected NodeOperationResponse() { + protected BaseNodeResponse() { } - protected NodeOperationResponse(DiscoveryNode node) { + protected BaseNodeResponse(DiscoveryNode node) { assert node != null; this.node = node; } diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequest.java b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java index 113e03f1f3c..462c873c633 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java @@ -31,7 +31,7 @@ import java.io.IOException; /** * */ -public abstract class NodesOperationRequest extends ActionRequest { +public abstract class BaseNodesRequest extends ActionRequest { public static String[] ALL_NODES = Strings.EMPTY_ARRAY; @@ -39,16 +39,16 @@ public abstract class NodesOperationRequest ext private TimeValue timeout; - protected NodesOperationRequest() { + protected BaseNodesRequest() { } - protected NodesOperationRequest(ActionRequest request, String... nodesIds) { + protected BaseNodesRequest(ActionRequest request, String... nodesIds) { super(request); this.nodesIds = nodesIds; } - protected NodesOperationRequest(String... nodesIds) { + protected BaseNodesRequest(String... nodesIds) { this.nodesIds = nodesIds; } diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationResponse.java b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java similarity index 79% rename from src/main/java/org/elasticsearch/action/support/nodes/NodesOperationResponse.java rename to src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java index b2a699529bb..db0a2a89aa7 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java @@ -34,16 +34,16 @@ import java.util.Map; /** * */ -public abstract class NodesOperationResponse extends ActionResponse implements Iterable { +public abstract class BaseNodesResponse extends ActionResponse implements Iterable { private ClusterName clusterName; - protected NodeResponse[] nodes; - private Map nodesMap; + protected TNodeResponse[] nodes; + private Map nodesMap; - protected NodesOperationResponse() { + protected BaseNodesResponse() { } - protected NodesOperationResponse(ClusterName clusterName, NodeResponse[] nodes) { + protected BaseNodesResponse(ClusterName clusterName, TNodeResponse[] nodes) { this.clusterName = clusterName; this.nodes = nodes; } @@ -64,23 +64,23 @@ public abstract class NodesOperationResponse iterator() { + public Iterator iterator() { return getNodesMap().values().iterator(); } - public Map getNodesMap() { + public Map getNodesMap() { if (nodesMap == null) { nodesMap = Maps.newHashMap(); - for (NodeResponse nodeResponse : nodes) { + for (TNodeResponse nodeResponse : nodes) { nodesMap.put(nodeResponse.getNode().id(), nodeResponse); } } diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java b/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java index 303671db595..cf8190f2c32 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.unit.TimeValue; /** */ -public abstract class NodesOperationRequestBuilder, Response extends NodesOperationResponse, RequestBuilder extends NodesOperationRequestBuilder> +public abstract class NodesOperationRequestBuilder, Response extends BaseNodesResponse, RequestBuilder extends NodesOperationRequestBuilder> extends ActionRequestBuilder { protected NodesOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java b/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java similarity index 87% rename from src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java rename to src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 78503930357..065f4ad744a 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -39,7 +39,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public abstract class TransportNodesOperationAction extends HandledTransportAction { +public abstract class TransportNodesAction extends HandledTransportAction { protected final ClusterName clusterName; protected final ClusterService clusterService; @@ -47,9 +47,9 @@ public abstract class TransportNodesOperationAction request, Class nodeRequest, String nodeExecutor) { + protected TransportNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, + ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, + Class request, Class nodeRequest, String nodeExecutor) { super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterName = clusterName; this.clusterService = clusterService; @@ -61,7 +61,7 @@ public abstract class TransportNodesOperationAction listener) { + protected void doExecute(NodesRequest request, ActionListener listener) { new AsyncAction(request, listener).start(); } @@ -69,9 +69,9 @@ public abstract class TransportNodesOperationAction listener; + private final ActionListener listener; private final ClusterState clusterState; private final AtomicReferenceArray responses; private final AtomicInteger counter = new AtomicInteger(); - private AsyncAction(Request request, ActionListener listener) { + private AsyncAction(NodesRequest request, ActionListener listener) { this.request = request; this.listener = listener; clusterState = clusterService.state(); @@ -179,7 +179,7 @@ public abstract class TransportNodesOperationAction, Response extends ActionResponse, RequestBuilder extends SingleShardOperationRequestBuilder> +public abstract class SingleShardOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends SingleShardOperationRequestBuilder> extends ActionRequestBuilder { protected SingleShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequest.java b/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java similarity index 89% rename from src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java index 74db0435709..180ea877618 100644 --- a/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java @@ -33,7 +33,7 @@ import java.io.IOException; /** * */ -public abstract class SingleShardOperationRequest extends ActionRequest implements IndicesRequest { +public abstract class SingleShardRequest extends ActionRequest implements IndicesRequest { ShardId internalShardId; @@ -41,18 +41,18 @@ public abstract class SingleShardOperationRequest extends TransportAction { +public abstract class TransportSingleShardAction extends TransportAction { protected final ClusterService clusterService; @@ -53,8 +53,8 @@ public abstract class TransportShardSingleOperationAction request, String executor) { + protected TransportSingleShardAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, + Class request, String executor) { super(settings, actionName, threadPool, actionFilters); this.clusterService = clusterService; this.transportService = transportService; diff --git a/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java b/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java index 96fdd91c84f..c33e32eabb4 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java +++ b/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.termvectors; import com.carrotsearch.hppc.IntArrayList; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -public class MultiTermVectorsShardRequest extends SingleShardOperationRequest { +public class MultiTermVectorsShardRequest extends SingleShardRequest { private int shardId; private String preference; diff --git a/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index 9198013d307..bc1332e4da3 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocumentRequest; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.get.MultiGetRequest; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -50,7 +50,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; * Note, the {@link #index()}, {@link #type(String)} and {@link #id(String)} are * required. */ -public class TermVectorsRequest extends SingleShardOperationRequest implements DocumentRequest { +public class TermVectorsRequest extends SingleShardRequest implements DocumentRequest { private String type; diff --git a/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index f2e9a16d989..9aad9cb479b 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; @@ -36,7 +36,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class TransportShardMultiTermsVectorAction extends TransportShardSingleOperationAction { +public class TransportShardMultiTermsVectorAction extends TransportSingleShardAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index e03abfee230..623ced5d5ef 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -22,15 +22,15 @@ package org.elasticsearch.action.termvectors; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** * Performs the get operation. */ -public class TransportTermVectorsAction extends TransportShardSingleOperationAction { +public class TransportTermVectorsAction extends TransportSingleShardAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyRequest.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyRequest.java index db4624d7464..0171a90ec95 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyRequest.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyRequest.java @@ -24,7 +24,7 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; @@ -38,7 +38,7 @@ import java.util.Set; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -public class DfsOnlyRequest extends BroadcastOperationRequest { +public class DfsOnlyRequest extends BroadcastRequest { private SearchRequest searchRequest = new SearchRequest(); diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyResponse.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyResponse.java index 150e7e2289b..db1cddff046 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyResponse.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.termvectors.dfs; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; @@ -32,7 +32,7 @@ import java.util.List; /** * A response of a dfs only request. */ -public class DfsOnlyResponse extends BroadcastOperationResponse { +public class DfsOnlyResponse extends BroadcastResponse { private AggregatedDfs dfs; private long tookInMillis; diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyRequest.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyRequest.java index c18892aed67..687910c5922 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyRequest.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.termvectors.dfs; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,7 +29,7 @@ import org.elasticsearch.search.internal.ShardSearchTransportRequest; import java.io.IOException; -class ShardDfsOnlyRequest extends BroadcastShardOperationRequest { +class ShardDfsOnlyRequest extends BroadcastShardRequest { private ShardSearchTransportRequest shardSearchRequest = new ShardSearchTransportRequest(); diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyResponse.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyResponse.java index 8f414467972..688a475ea64 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyResponse.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.termvectors.dfs; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +30,7 @@ import java.io.IOException; /** * */ -class ShardDfsOnlyResponse extends BroadcastShardOperationResponse { +class ShardDfsOnlyResponse extends BroadcastShardResponse { private DfsSearchResult dfsSearchResult = new DfsSearchResult(); diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java index c1d698725e9..682ff47377c 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -51,7 +51,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Get the dfs only with no fetch phase. This is for internal use only. */ -public class TransportDfsOnlyAction extends TransportBroadcastOperationAction { +public class TransportDfsOnlyAction extends TransportBroadcastAction { public static final String NAME = "internal:index/termvectors/dfs"; diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java index 5b66fafec12..3d5d938bde4 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java @@ -19,7 +19,7 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.TimeoutClusterStateUpdateTask; @@ -223,7 +223,7 @@ public class MetaDataDeleteIndexService extends AbstractComponent { final String index; TimeValue timeout = TimeValue.timeValueSeconds(10); - TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; public Request(String index) { this.index = index; diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 3e945b161eb..840532e8637 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -23,7 +23,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.TimeoutClusterStateUpdateTask; @@ -231,7 +231,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { List aliases = Lists.newArrayList(); Map customs = Maps.newHashMap(); - TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; public PutRequest(String cause, String name) { this.cause = cause; @@ -304,7 +304,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { public static class RemoveRequest { final String name; - TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; public RemoveRequest(String name) { this.name = name; diff --git a/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java index 3b7c765d7d1..4573e2dac12 100644 --- a/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java +++ b/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java @@ -24,8 +24,8 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -48,29 +48,29 @@ import java.util.*; * and once the results are back, it makes sure to schedule a reroute to make sure those results will * be taken into account. */ -public abstract class AsyncShardFetch implements Releasable { +public abstract class AsyncShardFetch implements Releasable { /** * An action that lists the relevant shard data that needs to be fetched. */ - public interface List, NodeResponse extends NodeOperationResponse> { + public interface List, NodeResponse extends BaseNodeResponse> { void list(ShardId shardId, IndexMetaData indexMetaData, String[] nodesIds, ActionListener listener); } protected final ESLogger logger; protected final String type; private final ShardId shardId; - private final List, T> action; + private final List, T> action; private final Map> cache = new HashMap<>(); private final Set nodesToIgnore = new HashSet<>(); private boolean closed; @SuppressWarnings("unchecked") - protected AsyncShardFetch(ESLogger logger, String type, ShardId shardId, List, T> action) { + protected AsyncShardFetch(ESLogger logger, String type, ShardId shardId, List, T> action) { this.logger = logger; this.type = type; this.shardId = shardId; - this.action = (List, T>) action; + this.action = (List, T>) action; } public synchronized void close() { @@ -253,9 +253,9 @@ public abstract class AsyncShardFetch implement // visible for testing void asyncFetch(final ShardId shardId, final String[] nodesIds, final MetaData metaData) { IndexMetaData indexMetaData = metaData.index(shardId.getIndex()); - action.list(shardId, indexMetaData, nodesIds, new ActionListener>() { + action.list(shardId, indexMetaData, nodesIds, new ActionListener>() { @Override - public void onResponse(NodesOperationResponse response) { + public void onResponse(BaseNodesResponse response) { processAsyncFetch(shardId, response.getNodes(), response.failures()); } @@ -274,7 +274,7 @@ public abstract class AsyncShardFetch implement * The result of a fetch operation. Make sure to first check {@link #hasData()} before * fetching the actual data. */ - public static class FetchResult { + public static class FetchResult { private final ShardId shardId; private final Map data; diff --git a/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index a7647f73e50..f4385947dc8 100644 --- a/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -25,8 +25,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -513,12 +513,12 @@ public class GatewayAllocator extends AbstractComponent { return changed; } - static class InternalAsyncFetch extends AsyncShardFetch { + static class InternalAsyncFetch extends AsyncShardFetch { private final ClusterService clusterService; private final AllocationService allocationService; - public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List, T> action, + public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List, T> action, ClusterService clusterService, AllocationService allocationService) { super(logger, type, shardId, action); this.clusterService = clusterService; diff --git a/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index 900a2e7ffc7..6fa20433283 100644 --- a/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -45,7 +45,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesListGatewayMetaState extends TransportNodesOperationAction { +public class TransportNodesListGatewayMetaState extends TransportNodesAction { public static final String ACTION_NAME = "internal:gateway/local/meta_state"; @@ -113,7 +113,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesOperationA return true; } - static class Request extends NodesOperationRequest { + static class Request extends BaseNodesRequest { public Request() { } @@ -133,7 +133,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesOperationA } } - public static class NodesGatewayMetaState extends NodesOperationResponse { + public static class NodesGatewayMetaState extends BaseNodesResponse { private FailedNodeException[] failures; @@ -170,7 +170,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesOperationA } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { NodeRequest() { } @@ -190,7 +190,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesOperationA } } - public static class NodeGatewayMetaState extends NodeOperationResponse { + public static class NodeGatewayMetaState extends BaseNodeResponse { private MetaData metaData; diff --git a/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 649e0a1c31a..d5692b3a5aa 100644 --- a/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -50,7 +50,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; * We use this to find out which node holds the latest shard version and which of them used to be a primary in order to allocate * shards after node or cluster restarts. */ -public class TransportNodesListGatewayStartedShards extends TransportNodesOperationAction +public class TransportNodesListGatewayStartedShards extends TransportNodesAction implements AsyncShardFetch.List { public static final String ACTION_NAME = "internal:gateway/local/started_shards"; @@ -148,7 +148,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesOperat return true; } - static class Request extends NodesOperationRequest { + static class Request extends BaseNodesRequest { private ShardId shardId; private String indexUUID; @@ -186,7 +186,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesOperat } } - public static class NodesGatewayStartedShards extends NodesOperationResponse { + public static class NodesGatewayStartedShards extends BaseNodesResponse { private FailedNodeException[] failures; @@ -221,7 +221,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesOperat } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { private ShardId shardId; private String indexUUID; @@ -258,7 +258,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesOperat } } - public static class NodeGatewayStartedShards extends NodeOperationResponse { + public static class NodeGatewayStartedShards extends BaseNodeResponse { private long version = -1; diff --git a/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 04cfa6bda3d..3490f6d2d8c 100644 --- a/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -58,7 +58,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesListShardStoreMetaData extends TransportNodesOperationAction +public class TransportNodesListShardStoreMetaData extends TransportNodesAction implements AsyncShardFetch.List { public static final String ACTION_NAME = "internal:cluster/nodes/indices/shard/store"; @@ -241,7 +241,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio } - static class Request extends NodesOperationRequest { + static class Request extends BaseNodesRequest { private ShardId shardId; @@ -277,7 +277,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio } } - public static class NodesStoreFilesMetaData extends NodesOperationResponse { + public static class NodesStoreFilesMetaData extends BaseNodesResponse { private FailedNodeException[] failures; @@ -314,7 +314,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { private ShardId shardId; @@ -344,7 +344,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio } } - public static class NodeStoreFilesMetaData extends NodeOperationResponse { + public static class NodeStoreFilesMetaData extends BaseNodeResponse { private StoreFilesMetaData storeFilesMetaData; diff --git a/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index 67ceb83c33b..bd17c1d5944 100644 --- a/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -22,7 +22,7 @@ package org.elasticsearch.rest.action.support; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; @@ -62,7 +62,7 @@ public class RestActions { static final XContentBuilderString FAILURES = new XContentBuilderString("failures"); } - public static void buildBroadcastShardsHeader(XContentBuilder builder, ToXContent.Params params, BroadcastOperationResponse response) throws IOException { + public static void buildBroadcastShardsHeader(XContentBuilder builder, ToXContent.Params params, BroadcastResponse response) throws IOException { buildBroadcastShardsHeader(builder, params, response.getTotalShards(), response.getSuccessfulShards(), response.getFailedShards(), response.getShardFailures()); } diff --git a/src/main/java/org/elasticsearch/tribe/TribeService.java b/src/main/java/org/elasticsearch/tribe/TribeService.java index 540d3254e5c..546977c3aca 100644 --- a/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -99,7 +99,7 @@ public class TribeService extends AbstractLifecycleComponent { if (sb.get("cluster.name") == null) { sb.put("cluster.name", "tribe_" + Strings.randomBase64UUID()); // make sure it won't join other tribe nodes in the same JVM } - sb.put(TransportMasterNodeReadOperationAction.FORCE_LOCAL_SETTING, true); + sb.put(TransportMasterNodeReadAction.FORCE_LOCAL_SETTING, true); return sb.build(); } diff --git a/src/test/java/org/elasticsearch/action/admin/HotThreadsTest.java b/src/test/java/org/elasticsearch/action/admin/HotThreadsTest.java index 2c27101c5d2..4b3ad487495 100644 --- a/src/test/java/org/elasticsearch/action/admin/HotThreadsTest.java +++ b/src/test/java/org/elasticsearch/action/admin/HotThreadsTest.java @@ -32,14 +32,9 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.index.query.QueryBuilders.andQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.Matchers.lessThan; /** @@ -90,7 +85,7 @@ public class HotThreadsTest extends ElasticsearchIntegrationTest { boolean success = false; try { assertThat(nodeHotThreads, notNullValue()); - Map nodesMap = nodeHotThreads.getNodesMap(); + Map nodesMap = nodeHotThreads.getNodesMap(); assertThat(nodesMap.size(), equalTo(cluster().size())); for (NodeHotThreads ht : nodeHotThreads) { assertNotNull(ht.getHotThreads()); diff --git a/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index b8e97efe31d..ece5757355b 100644 --- a/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -22,7 +22,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.ImmutableSet; import org.elasticsearch.Version; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -314,7 +314,7 @@ public class AsyncShardFetchTests extends ElasticsearchTestCase { } - static class Response extends NodeOperationResponse { + static class Response extends BaseNodeResponse { public Response(DiscoveryNode node) { super(node); diff --git a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 3cc47378116..819c1d5ab1d 100644 --- a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -50,7 +50,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.block.ClusterBlock; @@ -152,7 +152,7 @@ public class ElasticsearchAssertions { } } - public static String formatShardStatus(BroadcastOperationResponse response) { + public static String formatShardStatus(BroadcastResponse response) { String msg = " Total shards: " + response.getTotalShards() + " Successful shards: " + response.getSuccessfulShards() + " & " + response.getFailedShards() + " shard failures:"; for (ShardOperationFailedException failure : response.getShardFailures()) { @@ -321,12 +321,12 @@ public class ElasticsearchAssertions { assertVersionSerializable(percolateResponse); } - public static void assertNoFailures(BroadcastOperationResponse response) { + public static void assertNoFailures(BroadcastResponse response) { assertThat("Unexpected ShardFailures: " + Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); assertVersionSerializable(response); } - public static void assertAllSuccessful(BroadcastOperationResponse response) { + public static void assertAllSuccessful(BroadcastResponse response) { assertNoFailures(response); assertThat("Expected all shards successful but got successful [" + response.getSuccessfulShards() + "] total [" + response.getTotalShards() + "]", response.getTotalShards(), equalTo(response.getSuccessfulShards())); From 6d269cbf4d808f50a4953a0c60e1de08cb911276 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 27 May 2015 10:29:37 +0300 Subject: [PATCH 023/123] feedback --- docs/reference/indices/flush.asciidoc | 19 +++++------ docs/reference/setup/upgrade.asciidoc | 6 ++-- .../flush/IndicesSyncedFlushResult.java | 5 +++ .../flush/ShardsSyncedFlushResult.java | 32 ------------------- .../indices/flush/SyncedFlushService.java | 8 +++++ .../indices/flush/RestSyncedFlushAction.java | 2 +- .../indices/flush/SyncedFlushUnitTests.java | 2 ++ 7 files changed, 29 insertions(+), 45 deletions(-) diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index 99aa589d866..a1ebfd1d866 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -50,7 +50,7 @@ POST /_flush === Synced Flush Elasticsearch tracks the indexing activity of each shards. Shards that have not -received any indexing operations for, by default, 30m are automatically marked as inactive. This presents +received any indexing operations for 30 minutes (configurable) are automatically marked as inactive. This presents an opportunity for Elasticsearch to reduce shard resources and also perform a special kind of flush, called `synced flush`. A synced flush performs normal flushing and adds a special uniquely generated marker (`sync_id`) to all shards. @@ -79,18 +79,18 @@ GET /twitter/_stats/commit?level=shards [float] === Synced Flush API -The Synced Flush API allows an administrator to initiate a synced flush manually. This can particularly useful for -a planned (rolling) cluster restart where one can stop indexing and doesn't want to wait for the default 30m to pass +The Synced Flush API allows an administrator to initiate a synced flush manually. This can be particularly useful for +a planned (rolling) cluster restart where one can stop indexing and doesn't want to wait for the default 30 minutes to pass when the synced flush will be performed automatically. While handy, there are a couple of caveats for this API: 1. Synced flush is a best effort operation. Any ongoing indexing operations will cause the synced flush to fail. This means that some shards may be synced flushed while others aren't. See below for more. -2. The `sync_id` marker is removed as soon as the shard is flushed again. Uncommitted -operations in the transaction log do not remove the marker. That is because the marker is store as part -of a low level lucene commit, representing a point in time snapshot of the segments. In practice, one should consider -any indexing operation on an index as removing the marker. +2. The `sync_id` marker is removed as soon as the shard is flushed again. That is because a flush replaces the low level +lucene commit point where the marker is stored. Uncommitted operations in the transaction log do not remove the marker. +In practice, one should consider any indexing operation on an index as removing the marker as a flush can be triggered by Elasticsearch +at any time. [source,bash] @@ -99,7 +99,7 @@ POST /twitter/_flush/synced -------------------------------------------------- // AUTOSENSE -The response contains details about how many shards were successfully synced-flushed and information about any failure. +The response contains details about how many shards were successfully sync-flushed and information about any failure. Here is what it looks like when all shards of a two shards and one replica index successfully sync-flushed: @@ -146,7 +146,8 @@ Here is what it looks like when one shard group failed due to pending operations -------------------------------------------------- -Sometimes the failures are specific to a shard copy, in which case they will be reported as follows: +Sometimes the failures are specific to a shard copy. The copies that failed will not be eligible for +fast recovery but those that succeeded still will be. This case is reported as follows: [source,js] -------------------------------------------------- diff --git a/docs/reference/setup/upgrade.asciidoc b/docs/reference/setup/upgrade.asciidoc index 61708755006..9f46fbaf059 100644 --- a/docs/reference/setup/upgrade.asciidoc +++ b/docs/reference/setup/upgrade.asciidoc @@ -92,10 +92,10 @@ curl -XPUT localhost:9200/_cluster/settings -d '{ }' -------------------------------------------------- -* There is no problem to continue indexing while doing the upgrade. However, you can speed the process considerably -by stopping indexing temporarily to non-essential indices and issuing a manual <>. +* There is no problem continuing to index while doing the upgrade. However, you can speed the process considerably +by *temporarily* stopping non-essential indexing and issuing a manual <>. A synced flush is special kind of flush which can seriously speed up recovery of shards. Elasticsearch automatically -uses it when an index has been inactive for a while (default is `30m`) but you can manually trigger it using the following command: +uses it when an index has been inactive for a while (default is `30m`) but you can manuallky trigger it using the following command: [source,sh] -------------------------------------------------- diff --git a/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java b/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java index 7c0a680b383..f625f04484a 100644 --- a/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java +++ b/src/main/java/org/elasticsearch/indices/flush/IndicesSyncedFlushResult.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.List; @@ -58,6 +59,10 @@ public class IndicesSyncedFlushResult implements ToXContent { return shardCounts.successful; } + public RestStatus restStatus() { + return failedShards() == 0 ? RestStatus.OK : RestStatus.CONFLICT; + } + public Map> getShardsResultPerIndex() { return shardsResultPerIndex; } diff --git a/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java b/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java index cdf8a2495d8..1388373ff36 100644 --- a/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java +++ b/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java @@ -126,38 +126,6 @@ public class ShardsSyncedFlushResult { return shardResponses; } -// @Override -// public void writeTo(StreamOutput out) throws IOException { -// super.writeTo(out); -// out.writeOptionalString(failureReason); -// out.writeOptionalString(syncId); -// out.writeVInt(totalShards); -// out.writeVInt(shardResponses.size()); -// for (Map.Entry result : shardResponses.entrySet()) { -// result.getKey().writeTo(out); -// result.getValue().writeTo(out); -// } -// shardId.writeTo(out); -// } - -// @Override -// public void readFrom(StreamInput in) throws IOException { -// super.readFrom(in); -// failureReason = in.readOptionalString(); -// syncId = in.readOptionalString(); -// totalShards = in.readVInt(); -// int size = in.readVInt(); -// ImmutableMap.Builder builder = ImmutableMap.builder(); -// for (int i = 0; i < size; i++) { -// ImmutableShardRouting shardRouting = ImmutableShardRouting.readShardRoutingEntry(in); -// SyncedFlushService.SyncedFlushResponse syncedFlushRsponse = new SyncedFlushService.SyncedFlushResponse(); -// syncedFlushRsponse.readFrom(in); -// builder.put(shardRouting, syncedFlushRsponse); -// } -// shardResponses = builder.build(); -// shardId = ShardId.readShardId(in); -// } - public ShardId shardId() { return shardId; } diff --git a/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index b9447ed01f6..537392a9c98 100644 --- a/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -98,6 +98,10 @@ public class SyncedFlushService extends AbstractComponent { }); } + /** + * a utility method to perform a synced flush for all shards of multiple indices. see {@link #attemptSyncedFlush(ShardId, ActionListener)} + * for more details. + */ public void attemptSyncedFlush(final String[] aliasesOrIndices, IndicesOptions indicesOptions, final ActionListener listener) { final ClusterState state = clusterService.state(); final String[] concreteIndices = state.metaData().concreteIndices(indicesOptions, aliasesOrIndices); @@ -111,6 +115,10 @@ public class SyncedFlushService extends AbstractComponent { results.put(index, Collections.synchronizedList(new ArrayList())); } + if (numberOfShards == 0) { + listener.onResponse(new IndicesSyncedFlushResult(results)); + return; + } final int finalTotalNumberOfShards = totalNumberOfShards; final CountDown countDown = new CountDown(numberOfShards); diff --git a/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java b/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java index 82a1d5f8fd3..9a3f844abb1 100644 --- a/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java +++ b/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java @@ -62,7 +62,7 @@ public class RestSyncedFlushAction extends BaseRestHandler { builder.startObject(); results.toXContent(builder, request); builder.endObject(); - return new BytesRestResponse(RestStatus.OK, builder); + return new BytesRestResponse(results.restStatus(), builder); } }); } diff --git a/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java b/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java index fcf80c19d67..426ec36d608 100644 --- a/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java +++ b/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUnitTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.flush.IndicesSyncedFlushResult.ShardCounts; import org.elasticsearch.indices.flush.SyncedFlushService.SyncedFlushResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ElasticsearchTestCase; import java.io.IOException; @@ -56,6 +57,7 @@ public class SyncedFlushUnitTests extends ElasticsearchTestCase { assertThat(testPlan.result.totalShards(), equalTo(testPlan.totalCounts.total)); assertThat(testPlan.result.successfulShards(), equalTo(testPlan.totalCounts.successful)); assertThat(testPlan.result.failedShards(), equalTo(testPlan.totalCounts.failed)); + assertThat(testPlan.result.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK)); Map asMap = convertToMap(testPlan.result); assertShardCount("_shards header", (Map) asMap.get("_shards"), testPlan.totalCounts); From 8ec6bf7340c23ae4c24d08ef8ff6538a5304f0ab Mon Sep 17 00:00:00 2001 From: Ryan Schneider Date: Wed, 1 Apr 2015 13:51:07 -0400 Subject: [PATCH 024/123] [DOCS] Update get.asciidoc Updated to not mislead the reader that the data is actually gone when a document is updated. For example if you have 100GB of docs and update each one you'll only be able to access 100GB of the data, but there would theoretically be 200GB of doc data. Closes #10375 --- docs/reference/docs/get.asciidoc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index cf2db23056c..ea1b6176578 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -228,5 +228,7 @@ it's current version is equal to the specified one. This behavior is the same for all version types with the exception of version type `FORCE` which always retrieves the document. -Note that Elasticsearch do not store older versions of documents. Only the current version can be retrieved. - +Internally, Elasticsearch has marked the old document as deleted and added an +entirely new document. The old version of the document doesn’t disappear +immediately, although you won’t be able to access it. Elasticsearch cleans up +deleted documents in the background as you continue to index more data. From fc28bc73f8c985669bdcf1b6bd2905b131d2e0ea Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 27 May 2015 10:28:53 +0200 Subject: [PATCH 025/123] [DOCS] add kopf to site plugins --- docs/reference/modules/plugins.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/modules/plugins.asciidoc b/docs/reference/modules/plugins.asciidoc index 3a9f867a4dd..40c288280cc 100644 --- a/docs/reference/modules/plugins.asciidoc +++ b/docs/reference/modules/plugins.asciidoc @@ -293,6 +293,7 @@ deprecated[1.5.0,Rivers have been deprecated. See https://www.elastic.co/blog/d * https://github.com/karmi/elasticsearch-paramedic[Paramedic Plugin] (by Karel Minařík) * https://github.com/polyfractal/elasticsearch-segmentspy[SegmentSpy Plugin] (by Zachary Tong) * https://github.com/xyu/elasticsearch-whatson[Whatson Plugin] (by Xiao Yu) +* https://github.com/lmenezes/elasticsearch-kopf[Kopf Plugin] (by lmenezes) [float] [[repository-plugins]] From ee26ab8bb4258cb90cb101ebd280f18d2a8f42d7 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 27 May 2015 11:22:37 +0200 Subject: [PATCH 026/123] Java api: add name setter to delete index template request and make default constructor public for AnalyzeRequest and DeleteIndexTemplateRequest Closes #8122 Closes #8123 --- .../action/admin/indices/analyze/AnalyzeRequest.java | 5 +---- .../template/delete/DeleteIndexTemplateRequest.java | 10 +++++++++- .../delete/DeleteIndexTemplateRequestBuilder.java | 8 ++++++++ 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java index d631f8b8d0a..655defeddaf 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java @@ -18,10 +18,8 @@ */ package org.elasticsearch.action.admin.indices.analyze; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.single.custom.SingleCustomOperationRequest; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -48,8 +46,7 @@ public class AnalyzeRequest extends SingleCustomOperationRequest private String field; - AnalyzeRequest() { - + public AnalyzeRequest() { } /** diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java index 42907787803..f33d35b66a5 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java @@ -34,7 +34,7 @@ public class DeleteIndexTemplateRequest extends MasterNodeRequest Date: Wed, 22 Apr 2015 16:02:20 +0200 Subject: [PATCH 027/123] Add common SystemD file for RPM/DEB package --- docs/reference/setup/repositories.asciidoc | 11 +- pom.xml | 30 +++- .../common/systemd/elasticsearch.conf | 1 + .../common/systemd/elasticsearch.service | 50 ++++++ .../common/systemd/sysctl/elasticsearch.conf | 1 + src/packaging/deb/init.d/elasticsearch | 5 +- .../deb/systemd/elasticsearch.service | 30 ---- src/packaging/rpm/init.d/elasticsearch | 3 +- src/packaging/rpm/systemd/elasticsearch.conf | 1 - .../rpm/systemd/elasticsearch.service | 25 --- .../rpm/systemd/sysctl.d/elasticsearch.conf | 1 - .../packaging/scripts/50_systemd.bats | 146 ++++++++++++++++++ .../scripts/packaging_test_utils.bash | 11 +- 13 files changed, 247 insertions(+), 68 deletions(-) create mode 100644 src/packaging/common/systemd/elasticsearch.conf create mode 100644 src/packaging/common/systemd/elasticsearch.service create mode 100644 src/packaging/common/systemd/sysctl/elasticsearch.conf delete mode 100644 src/packaging/deb/systemd/elasticsearch.service delete mode 100644 src/packaging/rpm/systemd/elasticsearch.conf delete mode 100644 src/packaging/rpm/systemd/elasticsearch.service delete mode 100644 src/packaging/rpm/systemd/sysctl.d/elasticsearch.conf create mode 100644 src/test/resources/packaging/scripts/50_systemd.bats diff --git a/docs/reference/setup/repositories.asciidoc b/docs/reference/setup/repositories.asciidoc index 964913be94a..5e1f5eeb671 100644 --- a/docs/reference/setup/repositories.asciidoc +++ b/docs/reference/setup/repositories.asciidoc @@ -51,13 +51,22 @@ Run apt-get update and the repository is ready for use. You can install it with: sudo apt-get update && sudo apt-get install elasticsearch -------------------------------------------------- -Configure Elasticsearch to automatically start during bootup: +Configure Elasticsearch to automatically start during bootup. If your +distribution is using SysV init, then you will need to run: [source,sh] -------------------------------------------------- sudo update-rc.d elasticsearch defaults 95 10 -------------------------------------------------- +Otherwise if your distribution is using systemd: + +[source,sh] +-------------------------------------------------- +sudo /bin/systemctl daemon-reload +sudo /bin/systemctl enable elasticsearch.service +-------------------------------------------------- + [float] === YUM diff --git a/pom.xml b/pom.xml index f644ff91bbf..b6df03658ef 100644 --- a/pom.xml +++ b/pom.xml @@ -43,6 +43,9 @@ /var/log/elasticsearch ${packaging.elasticsearch.home.dir}/plugins /var/run/elasticsearch + /usr/lib/systemd/system + /usr/lib/sysctl.d + /usr/lib/tmpfiles.d false dpkg-sig @@ -800,7 +803,19 @@ ${project.build.directory}/generated-packaging/deb/systemd/elasticsearch.service - /usr/lib/systemd/system/elasticsearch.service + ${packaging.elasticsearch.systemd.dir}/elasticsearch.service + file + + + + ${project.build.directory}/generated-packaging/deb/systemd/sysctl/elasticsearch.conf + ${packaging.elasticsearch.systemd.sysctl.dir}/elasticsearch.conf + file + + + + ${project.build.directory}/generated-packaging/deb/systemd/elasticsearch.conf + ${packaging.elasticsearch.tmpfilesd.dir}/elasticsearch.conf file @@ -978,8 +993,8 @@ - /usr/lib/systemd/system/ - 755 + ${packaging.elasticsearch.systemd.dir} + false true @@ -990,21 +1005,22 @@ + - /usr/lib/sysctl.d/ - 755 + ${packaging.elasticsearch.systemd.sysctl.dir} true - ${project.build.directory}/generated-packaging/rpm/systemd/sysctl.d + ${project.build.directory}/generated-packaging/rpm/systemd/sysctl elasticsearch.conf + - /usr/lib/tmpfiles.d/ + ${packaging.elasticsearch.tmpfilesd.dir} true diff --git a/src/packaging/common/systemd/elasticsearch.conf b/src/packaging/common/systemd/elasticsearch.conf new file mode 100644 index 00000000000..98dd5e61c25 --- /dev/null +++ b/src/packaging/common/systemd/elasticsearch.conf @@ -0,0 +1 @@ +d ${packaging.elasticsearch.pid.dir} 0755 ${packaging.elasticsearch.user} ${packaging.elasticsearch.group} - - diff --git a/src/packaging/common/systemd/elasticsearch.service b/src/packaging/common/systemd/elasticsearch.service new file mode 100644 index 00000000000..a4c269973bf --- /dev/null +++ b/src/packaging/common/systemd/elasticsearch.service @@ -0,0 +1,50 @@ +[Unit] +Description=Elasticsearch +Documentation=http://www.elastic.co +Wants=network-online.target +After=network-online.target + +[Service] +Environment=ES_HOME=${packaging.elasticsearch.home.dir} +Environment=CONF_DIR=${packaging.elasticsearch.conf.dir} +Environment=CONF_FILE=${packaging.elasticsearch.conf.dir}/elasticsearch.yml +Environment=DATA_DIR=${packaging.elasticsearch.data.dir} +Environment=LOG_DIR=${packaging.elasticsearch.log.dir} +Environment=PID_DIR=${packaging.elasticsearch.pid.dir} +EnvironmentFile=-${packaging.env.file} + +User=${packaging.elasticsearch.user} +Group=${packaging.elasticsearch.group} + +ExecStart=${packaging.elasticsearch.bin.dir}/elasticsearch \ + -Des.pidfile=$PID_DIR/elasticsearch.pid \ + -Des.default.path.home=$ES_HOME \ + -Des.default.path.logs=$LOG_DIR \ + -Des.default.path.data=$DATA_DIR \ + -Des.default.config=$CONF_FILE \ + -Des.default.path.conf=$CONF_DIR + +# Connects standard output to /dev/null +StandardOutput=null + +# Connects standard error to journal +StandardError=journal + +# When a JVM receives a SIGTERM signal it exits with code 143 +SuccessExitStatus=143 + +# Specifies the maximum file descriptor number that can be opened by this process +LimitNOFILE=${packaging.os.max.open.files} + +# Specifies the maximum number of bytes of memory that may be locked into RAM +# Set to "infinity" if you use the 'bootstrap.mlockall: true' option +# in elasticsearch.yml and 'MAX_LOCKED_MEMORY=unlimited' in ${packaging.env.file} +#LimitMEMLOCK=infinity + +# Shutdown delay in seconds, before process is tried to be killed with KILL (if configured) +TimeoutStopSec=20 + +[Install] +WantedBy=multi-user.target + +# Built for ${project.name}-${project.version} (${packaging.type}) diff --git a/src/packaging/common/systemd/sysctl/elasticsearch.conf b/src/packaging/common/systemd/sysctl/elasticsearch.conf new file mode 100644 index 00000000000..052cd89cf0b --- /dev/null +++ b/src/packaging/common/systemd/sysctl/elasticsearch.conf @@ -0,0 +1 @@ +vm.max_map_count=${packaging.os.max.map.count} diff --git a/src/packaging/deb/init.d/elasticsearch b/src/packaging/deb/init.d/elasticsearch index 336030310cc..ad192157231 100755 --- a/src/packaging/deb/init.d/elasticsearch +++ b/src/packaging/deb/init.d/elasticsearch @@ -94,6 +94,9 @@ MAX_MAP_COUNT=262144 # Path to the GC log file #ES_GC_LOG_FILE=/var/log/elasticsearch/gc.log +# Elasticsearch PID file directory +PID_DIR="${packaging.elasticsearch.pid.dir}" + # End of variables that can be overwritten in $DEFAULT # overwrite settings from default file @@ -102,7 +105,7 @@ if [ -f "$DEFAULT" ]; then fi # Define other required variables -PID_FILE=/var/run/$NAME.pid +PID_FILE="$PID_DIR/$NAME.pid" DAEMON=$ES_HOME/bin/elasticsearch DAEMON_OPTS="-d -p $PID_FILE --default.config=$CONF_FILE --default.path.home=$ES_HOME --default.path.logs=$LOG_DIR --default.path.data=$DATA_DIR --default.path.conf=$CONF_DIR" diff --git a/src/packaging/deb/systemd/elasticsearch.service b/src/packaging/deb/systemd/elasticsearch.service deleted file mode 100644 index 1945f9fb12a..00000000000 --- a/src/packaging/deb/systemd/elasticsearch.service +++ /dev/null @@ -1,30 +0,0 @@ -[Unit] -Description=Starts and stops a single elasticsearch instance on this system -Documentation=http://www.elasticsearch.org -Wants=network-online.target -After=network-online.target - -[Service] -Environment=CONF_FILE=${packaging.elasticsearch.conf.dir}/elasticsearch.yml -Environment=ES_HOME=${packaging.elasticsearch.home.dir} -Environment=LOG_DIR=${packaging.elasticsearch.log.dir} -Environment=DATA_DIR=${packaging.elasticsearch.data.dir} -Environment=CONF_DIR=${packaging.elasticsearch.conf.dir} -EnvironmentFile=-${packaging.env.file} -User=elasticsearch -Group=elasticsearch -ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ - -Des.default.config=$CONF_FILE \ - -Des.default.path.home=$ES_HOME \ - -Des.default.path.logs=$LOG_DIR \ - -Des.default.path.data=$DATA_DIR \ - -Des.default.path.conf=$CONF_DIR -# See MAX_OPEN_FILES in sysconfig -LimitNOFILE=65535 -# See MAX_LOCKED_MEMORY in sysconfig, use "infinity" when MAX_LOCKED_MEMORY=unlimited and using bootstrap.mlockall: true -#LimitMEMLOCK=infinity -# Shutdown delay in seconds, before process is tried to be killed with KILL (if configured) -TimeoutStopSec=20 - -[Install] -WantedBy=multi-user.target diff --git a/src/packaging/rpm/init.d/elasticsearch b/src/packaging/rpm/init.d/elasticsearch index 1b666c64578..5fe9d748952 100644 --- a/src/packaging/rpm/init.d/elasticsearch +++ b/src/packaging/rpm/init.d/elasticsearch @@ -41,6 +41,7 @@ LOG_DIR="${packaging.elasticsearch.log.dir}" DATA_DIR="${packaging.elasticsearch.data.dir}" CONF_DIR="${packaging.elasticsearch.conf.dir}" CONF_FILE="${packaging.elasticsearch.conf.dir}/elasticsearch.yml" +PID_DIR="${packaging.elasticsearch.pid.dir}" # Source the default env file ES_ENV_FILE="${packaging.env.file}" @@ -50,7 +51,7 @@ fi exec="$ES_HOME/bin/elasticsearch" prog="elasticsearch" -pidfile=/var/run/elasticsearch/${prog}.pid +pidfile="$PID_DIR/${prog}.pid" export ES_HEAP_SIZE export ES_HEAP_NEWSIZE diff --git a/src/packaging/rpm/systemd/elasticsearch.conf b/src/packaging/rpm/systemd/elasticsearch.conf deleted file mode 100644 index 9db225e74a7..00000000000 --- a/src/packaging/rpm/systemd/elasticsearch.conf +++ /dev/null @@ -1 +0,0 @@ -d /run/elasticsearch 0755 elasticsearch elasticsearch - - diff --git a/src/packaging/rpm/systemd/elasticsearch.service b/src/packaging/rpm/systemd/elasticsearch.service deleted file mode 100644 index f5bb286b8fa..00000000000 --- a/src/packaging/rpm/systemd/elasticsearch.service +++ /dev/null @@ -1,25 +0,0 @@ -[Unit] -Description=Starts and stops a single elasticsearch instance on this system -Documentation=http://www.elasticsearch.org - -[Service] -Type=forking -Environment=CONF_FILE=${packaging.elasticsearch.conf.dir}/elasticsearch.yml -Environment=ES_HOME=${packaging.elasticsearch.home.dir} -Environment=LOG_DIR=${packaging.elasticsearch.log.dir} -Environment=DATA_DIR=${packaging.elasticsearch.data.dir} -Environment=CONF_DIR=${packaging.elasticsearch.conf.dir} -EnvironmentFile=-${packaging.env.file} -User=elasticsearch -Group=elasticsearch -PIDFile=/var/run/elasticsearch/elasticsearch.pid -ExecStart=/usr/share/elasticsearch/bin/elasticsearch -d -p /var/run/elasticsearch/elasticsearch.pid -Des.default.config=$CONF_FILE -Des.default.path.home=$ES_HOME -Des.default.path.logs=$LOG_DIR -Des.default.path.data=$DATA_DIR -Des.default.path.conf=$CONF_DIR -# See MAX_OPEN_FILES in sysconfig -LimitNOFILE=65535 -# See MAX_LOCKED_MEMORY in sysconfig, use "infinity" when MAX_LOCKED_MEMORY=unlimited and using bootstrap.mlockall: true -#LimitMEMLOCK=infinity -# Shutdown delay in seconds, before process is tried to be killed with KILL (if configured) -TimeoutStopSec=20 - -[Install] -WantedBy=multi-user.target diff --git a/src/packaging/rpm/systemd/sysctl.d/elasticsearch.conf b/src/packaging/rpm/systemd/sysctl.d/elasticsearch.conf deleted file mode 100644 index 62ea54d8697..00000000000 --- a/src/packaging/rpm/systemd/sysctl.d/elasticsearch.conf +++ /dev/null @@ -1 +0,0 @@ -vm.max_map_count=262144 diff --git a/src/test/resources/packaging/scripts/50_systemd.bats b/src/test/resources/packaging/scripts/50_systemd.bats new file mode 100644 index 00000000000..addd4ff1ac7 --- /dev/null +++ b/src/test/resources/packaging/scripts/50_systemd.bats @@ -0,0 +1,146 @@ +#!/usr/bin/env bats + +# This file is used to test the elasticsearch Systemd setup. + +# WARNING: This testing file must be executed as root and can +# dramatically change your system. It removes the 'elasticsearch' +# user/group and also many directories. Do not execute this file +# unless you know exactly what you are doing. + +# The test case can be executed with the Bash Automated +# Testing System tool available at https://github.com/sstephenson/bats +# Thanks to Sam Stephenson! + +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Load test utilities +load packaging_test_utils + +# Cleans everything for the 1st execution +setup() { + if [ "$BATS_TEST_NUMBER" -eq 1 ]; then + clean_before_test + fi + + + # Installs a package before test + if is_dpkg; then + dpkg -i elasticsearch*.deb >&2 || true + fi + if is_rpm; then + rpm -i elasticsearch*.rpm >&2 || true + fi +} + +@test "[SYSTEMD] daemon reload" { + skip_not_systemd + + run systemctl daemon-reload + [ "$status" -eq 0 ] +} + +@test "[SYSTEMD] enable" { + skip_not_systemd + + run systemctl enable elasticsearch.service + [ "$status" -eq 0 ] + + run systemctl is-enabled elasticsearch.service + [ "$status" -eq 0 ] +} + +@test "[SYSTEMD] start" { + skip_not_systemd + + run systemctl start elasticsearch.service + [ "$status" -eq 0 ] + + wait_for_elasticsearch_status + + assert_file_exist "/var/run/elasticsearch/elasticsearch.pid" +} + +@test "[SYSTEMD] start (running)" { + skip_not_systemd + + run systemctl start elasticsearch.service + [ "$status" -eq 0 ] +} + +@test "[SYSTEMD] is active (running)" { + skip_not_systemd + + run systemctl is-active elasticsearch.service + [ "$status" -eq 0 ] + [ "$output" = "active" ] +} + +@test "[SYSTEMD] status (running)" { + skip_not_systemd + + run systemctl status elasticsearch.service + [ "$status" -eq 0 ] +} + +################################## +# Check that Elasticsearch is working +################################## +@test "[SYSTEMD] test elasticsearch" { + skip_not_systemd + + run_elasticsearch_tests +} + +@test "[SYSTEMD] restart" { + skip_not_systemd + + run systemctl restart elasticsearch.service + [ "$status" -eq 0 ] + + wait_for_elasticsearch_status + + run service elasticsearch status + [ "$status" -eq 0 ] +} + +@test "[SYSTEMD] stop (running)" { + skip_not_systemd + + run systemctl stop elasticsearch.service + [ "$status" -eq 0 ] + + run systemctl status elasticsearch.service + echo "$output" | grep "Active:" | grep "inactive" +} + +@test "[SYSTEMD] stop (stopped)" { + skip_not_systemd + + run systemctl stop elasticsearch.service + [ "$status" -eq 0 ] + + run systemctl status elasticsearch.service + echo "$output" | grep "Active:" | grep "inactive" +} + +@test "[SYSTEMD] status (stopped)" { + skip_not_systemd + + run systemctl status elasticsearch.service + echo "$output" | grep "Active:" | grep "inactive" +} diff --git a/src/test/resources/packaging/scripts/packaging_test_utils.bash b/src/test/resources/packaging/scripts/packaging_test_utils.bash index b1058c641da..b5fe262efd1 100644 --- a/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -217,6 +217,12 @@ verify_package_installation() { # License file assert_file "/usr/share/elasticsearch/LICENSE.txt" f root 644 fi + + if is_systemd; then + assert_file "/usr/lib/systemd/system/elasticsearch.service" f root 644 + assert_file "/usr/lib/tmpfiles.d/elasticsearch.conf" f root 644 + assert_file "/usr/lib/sysctl.d/elasticsearch.conf" f root 644 + fi } @@ -294,7 +300,10 @@ clean_before_test() { "/etc/sysconfig/elasticsearch" \ "/var/run/elasticsearch" \ "/usr/share/doc/elasticsearch" \ - "/tmp/elasticsearch") + "/tmp/elasticsearch" \ + "/usr/lib/systemd/system/elasticsearch.conf" \ + "/usr/lib/tmpfiles.d/elasticsearch.conf" \ + "/usr/lib/sysctl.d/elasticsearch.conf") if [ "$ES_CLEAN_BEFORE_TEST" = "true" ]; then # Kills all processes of user elasticsearch From 9bb56c0e001869b4a017e0e4a0d1eefc06262996 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Tue, 26 May 2015 16:26:03 +0200 Subject: [PATCH 028/123] Ignore dependency-reduced-pom.xml even if used in submodules --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 3b0aecc1081..a31eff71e7d 100644 --- a/.gitignore +++ b/.gitignore @@ -37,4 +37,4 @@ eclipse-build nb-configuration.xml nbactions.xml -/dependency-reduced-pom.xml +dependency-reduced-pom.xml From 5384e4712ae6f7c89074e1054dd3f58b8bdb433d Mon Sep 17 00:00:00 2001 From: David Pilato Date: Tue, 26 May 2015 16:27:32 +0200 Subject: [PATCH 029/123] Fix maven assembly warning about using root dir It's a bad practice in Maven to define `/` as the output dir. It's better to leave it empty. See also http://stackoverflow.com/questions/28500401/maven-assembly-plugin-warning-the-assembly-descriptor-contains-a-filesystem-roo --- src/main/assemblies/common-bin.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/assemblies/common-bin.xml b/src/main/assemblies/common-bin.xml index f9b0a0ae215..49a00eab4ee 100644 --- a/src/main/assemblies/common-bin.xml +++ b/src/main/assemblies/common-bin.xml @@ -74,15 +74,15 @@ README.textile - / + LICENSE.txt - / + NOTICE.txt - / + From 37bdbe074a6f89c1da9e0b58013dad79872580fe Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 27 May 2015 15:40:02 +0300 Subject: [PATCH 030/123] doc feedback --- docs/reference/indices/flush.asciidoc | 65 ++++++++++++++++++++++----- 1 file changed, 55 insertions(+), 10 deletions(-) diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index a1ebfd1d866..118a68b7128 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -49,25 +49,25 @@ POST /_flush [[indices-synced-flush]] === Synced Flush -Elasticsearch tracks the indexing activity of each shards. Shards that have not -received any indexing operations for 30 minutes (configurable) are automatically marked as inactive. This presents +Elasticsearch tracks the indexing activity of each shard. Shards that have not +received any indexing operations for 30 minutes are automatically marked as inactive. This presents an opportunity for Elasticsearch to reduce shard resources and also perform -a special kind of flush, called `synced flush`. A synced flush performs normal -flushing and adds a special uniquely generated marker (`sync_id`) to all shards. +a special kind of flush, called `synced flush`. A synced flush performs a normal flush, then adds +a generated unique marker (sync_id) to all shards. Since the sync id marker was added when there were no ongoing indexing operations, it can -be used as a quick way to check if two shards indices are identical. This quick sync id +be used as a quick way to check if the two shards' lucene indices are identical. This quick sync id comparison (if present) is used during recovery or restarts to skip the first and most costly phase of the process. In that case, no segment files need to be copied and the transaction log replay phase of the recovery can start immediately. Note that since the sync id -marker was applied together with a flush, it is highly likely that the transaction log will be empty, +marker was applied together with a flush, it is very likely that the transaction log will be empty, speeding up recoveries even more. This is particularly useful for use cases having lots of indices which are never or very rarely updated, such as time based data. This use case typically generates lots of indices whose recovery without the synced flush marker would take a long time. -To check whether a shard has a marker or not, one can use the `commit` section of shard stats returned by +To check whether a shard has a marker or not, look for the `commit` section of shard stats returned by the <> API: [source,bash] @@ -76,23 +76,64 @@ GET /twitter/_stats/commit?level=shards -------------------------------------------------- // AUTOSENSE + +which returns something similar to: + +[source,js] +-------------------------------------------------- +{ + ... + "indices": { + "twitter": { + "primaries": {}, + "total": {}, + "shards": { + "0": [ + { + "routing": { + ... + }, + "commit": { + "id": "te7zF7C4UsirqvL6jp/vUg==", + "generation": 2, + "user_data": { + "sync_id": "AU2VU0meX-VX2aNbEUsD" <1>, + ... + }, + "num_docs": 0 + } + } + ... + ], + ... + } + } + } +} +-------------------------------------------------- +<1> the `sync id` marker + [float] === Synced Flush API The Synced Flush API allows an administrator to initiate a synced flush manually. This can be particularly useful for -a planned (rolling) cluster restart where one can stop indexing and doesn't want to wait for the default 30 minutes to pass -when the synced flush will be performed automatically. +a planned (rolling) cluster restart where you can stop indexing and don't want to wait the default 30 minutes for +idle indices to be sync-flushed automatically. While handy, there are a couple of caveats for this API: 1. Synced flush is a best effort operation. Any ongoing indexing operations will cause -the synced flush to fail. This means that some shards may be synced flushed while others aren't. See below for more. +the synced flush to fail on that shard. This means that some shards may be synced flushed while others aren't. See below for more. 2. The `sync_id` marker is removed as soon as the shard is flushed again. That is because a flush replaces the low level lucene commit point where the marker is stored. Uncommitted operations in the transaction log do not remove the marker. In practice, one should consider any indexing operation on an index as removing the marker as a flush can be triggered by Elasticsearch at any time. +NOTE: It is harmless to request a synced flush while there is ongoing indexing. Shards that are idle will succeed and shards + that are not will fail. Any shards that succeeded will have faster recovery times. + + [source,bash] -------------------------------------------------- POST /twitter/_flush/synced @@ -145,6 +186,8 @@ Here is what it looks like when one shard group failed due to pending operations } -------------------------------------------------- +NOTE: The above error is shown when the synced flush failes due to concurrent indexing operations. The HTTP +status code in that case will be `409 CONFLICT`. Sometimes the failures are specific to a shard copy. The copies that failed will not be eligible for fast recovery but those that succeeded still will be. This case is reported as follows: @@ -180,6 +223,8 @@ fast recovery but those that succeeded still will be. This case is reported as f -------------------------------------------------- +NOTE: When a shard copy fails to sync-flush, the HTTP status code returned will be `409 CONFLICT`. + The synced flush API can be applied to more than one index with a single call, or even on `_all` the indices. From fc224a0de8153652ed2d0e409bc2dbd8746068e8 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 27 May 2015 12:05:32 +0200 Subject: [PATCH 031/123] Cat API: Add wildcard support for header names This adds wildcard support (simple regexes) for specifying header names. Aliases are supported as well. Closes #10811 --- docs/reference/cat.asciidoc | 6 +- .../test/cat.thread_pool/10_basic.yaml | 12 +++ .../rest/action/support/RestTable.java | 48 +++++++++-- .../rest/action/support/RestTableTest.java | 82 +++++++++++++++++++ 4 files changed, 141 insertions(+), 7 deletions(-) create mode 100644 src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc index d857006d2b1..bc29cc92d64 100644 --- a/docs/reference/cat.asciidoc +++ b/docs/reference/cat.asciidoc @@ -66,6 +66,10 @@ only those columns to appear. 192.168.56.30 9300 43.9 Ramsey, Doug -------------------------------------------------- +You can also request multiple columns using simple wildcards like +`/_cat/thread_pool?h=ip,bulk.*` to get all headers (or aliases) starting +with `bulk.`. + [float] [[numeric-formats]] === Numeric formats @@ -120,4 +124,4 @@ include::cat/thread_pool.asciidoc[] include::cat/shards.asciidoc[] -include::cat/segments.asciidoc[] \ No newline at end of file +include::cat/segments.asciidoc[] diff --git a/rest-api-spec/test/cat.thread_pool/10_basic.yaml b/rest-api-spec/test/cat.thread_pool/10_basic.yaml index edb87ce27b9..37994201191 100755 --- a/rest-api-spec/test/cat.thread_pool/10_basic.yaml +++ b/rest-api-spec/test/cat.thread_pool/10_basic.yaml @@ -29,6 +29,18 @@ / #pid id host ip port ^ (\d+ \s+ \S{4} \s+ \S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ (\d+|-) \s+ \n)+ $/ + + - do: + cat.thread_pool: + h: bulk.m* + + - match: + $body: | + /^ bulk.max \s+ bulk.min \s+ \n + (\s+ \d+ \s+ \d+ \s+ \n)+ $/ + +#(\s+ \d+ \s+ \d+ \n)+ $/ + - do: cat.thread_pool: h: id,ba,fa,gea,ga,ia,maa,ma,oa,pa diff --git a/src/main/java/org/elasticsearch/rest/action/support/RestTable.java b/src/main/java/org/elasticsearch/rest/action/support/RestTable.java index 27141aa08df..0b5b44112f4 100644 --- a/src/main/java/org/elasticsearch/rest/action/support/RestTable.java +++ b/src/main/java/org/elasticsearch/rest/action/support/RestTable.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.io.UTF8StreamWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -32,8 +33,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.*; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; +import java.util.*; /** */ @@ -96,11 +96,12 @@ public class RestTable { return new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, bytesOut.bytes()); } - private static List buildDisplayHeaders(Table table, RestRequest request) { - String pHeaders = request.param("h"); + static List buildDisplayHeaders(Table table, RestRequest request) { List display = new ArrayList<>(); - if (pHeaders != null) { - for (String possibility : Strings.splitStringByCommaToArray(pHeaders)) { + if (request.hasParam("h")) { + Set headers = expandHeadersFromRequest(table, request); + + for (String possibility : headers) { DisplayHeader dispHeader = null; if (table.getAsMap().containsKey(possibility)) { @@ -147,6 +148,41 @@ public class RestTable { return display; } + /** + * Extracts all the required fields from the RestRequest 'h' parameter. In order to support wildcards like + * 'bulk.*' this needs potentially parse all the configured headers and its aliases and needs to ensure + * that everything is only added once to the returned headers, even if 'h=bulk.*.bulk.*' is specified + * or some headers are contained twice due to matching aliases + */ + private static Set expandHeadersFromRequest(Table table, RestRequest request) { + Set headers = new LinkedHashSet<>(table.getHeaders().size()); + + Map headerMap = table.getHeaderMap(); + // check headers and aliases + for (String header : Strings.splitStringByCommaToArray(request.param("h"))) { + if (Regex.isSimpleMatchPattern(header)) { + for (Map.Entry configuredHeaderEntry : headerMap.entrySet()) { + String configuredHeader = configuredHeaderEntry.getKey(); + if (Regex.simpleMatch(header, configuredHeader)) { + headers.add(configuredHeader); + } else if (configuredHeaderEntry.getValue().attr.containsKey("alias")) { + String[] aliases = Strings.splitStringByCommaToArray(configuredHeaderEntry.getValue().attr.get("alias")); + for (String alias : aliases) { + if (Regex.simpleMatch(header, alias)) { + headers.add(configuredHeader); + break; + } + } + } + } + } else { + headers.add(header); + } + } + + return headers; + } + public static int[] buildHelpWidths(Table table, RestRequest request) { int[] width = new int[3]; for (Table.Cell cell : table.getHeaders()) { diff --git a/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java b/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java new file mode 100644 index 00000000000..bdf0a07cbf5 --- /dev/null +++ b/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.support; + +import org.elasticsearch.common.Table; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.junit.Before; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.rest.action.support.RestTable.buildDisplayHeaders; +import static org.hamcrest.Matchers.*; + +public class RestTableTest extends ElasticsearchTestCase { + + private Table table = new Table(); + private FakeRestRequest restRequest = new FakeRestRequest(); + + @Before + public void setup() { + table.startHeaders(); + table.addCell("bulk.foo", "alias:f;desc:foo"); + table.addCell("bulk.bar", "alias:b;desc:bar"); + // should be matched as well due to the aliases + table.addCell("aliasedBulk", "alias:bulkWhatever;desc:bar"); + table.addCell("aliasedSecondBulk", "alias:foobar,bulkolicious,bulkotastic;desc:bar"); + // no match + table.addCell("unmatched", "alias:un.matched;desc:bar"); + // invalid alias + table.addCell("invalidAliasesBulk", "alias:,,,;desc:bar"); + table.endHeaders(); + } + + @Test + public void testThatDisplayHeadersSupportWildcards() throws Exception { + restRequest.params().put("h", "bulk*"); + List headers = buildDisplayHeaders(table, restRequest); + + List headerNames = getHeaderNames(headers); + assertThat(headerNames, containsInAnyOrder("bulk.foo", "bulk.bar", "aliasedBulk", "aliasedSecondBulk")); + assertThat(headerNames, not(hasItem("unmatched"))); + } + + @Test + public void testThatDisplayHeadersAreNotAddedTwice() throws Exception { + restRequest.params().put("h", "nonexistent,bulk*,bul*"); + List headers = buildDisplayHeaders(table, restRequest); + + List headerNames = getHeaderNames(headers); + assertThat(headerNames, containsInAnyOrder("bulk.foo", "bulk.bar", "aliasedBulk", "aliasedSecondBulk")); + assertThat(headerNames, not(hasItem("unmatched"))); + } + + private List getHeaderNames(List headers) { + List headerNames = new ArrayList<>(); + for (RestTable.DisplayHeader header : headers) { + headerNames.add(header.name); + } + + return headerNames; + } +} From 7fbd86aa97fabf6535f9b1ea3d79035ea4927f42 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Wed, 27 May 2015 14:34:05 +0100 Subject: [PATCH 032/123] Aggregations: Fixed Moving Average prediction to calculate the correct keys The Moving average predict code generated incorrect keys if the key for the first bucket of the histogram was < 0. This fix makes the moving average use the rounding class from the histogram to generate the keys for the new buckets. Closes #11369 --- .../movavg/MovAvgPipelineAggregator.java | 8 +-- .../pipeline/moving/avg/MovAvgTests.java | 64 ++++++++++++++++++- 2 files changed, 64 insertions(+), 8 deletions(-) diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java index c6e3d943bb8..cc1e6682e70 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java @@ -111,7 +111,6 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { EvictingQueue values = EvictingQueue.create(this.window); long lastKey = 0; - long interval = Long.MAX_VALUE; Object currentKey; for (InternalHistogram.Bucket bucket : buckets) { @@ -135,10 +134,8 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { if (predict > 0) { if (currentKey instanceof Number) { - interval = Math.min(interval, ((Number) bucket.getKey()).longValue() - lastKey); lastKey = ((Number) bucket.getKey()).longValue(); } else if (currentKey instanceof DateTime) { - interval = Math.min(interval, ((DateTime) bucket.getKey()).getMillis() - lastKey); lastKey = ((DateTime) bucket.getKey()).getMillis(); } else { throw new AggregationExecutionException("Expected key of type Number or DateTime but got [" + currentKey + "]"); @@ -147,7 +144,6 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { } - if (buckets.size() > 0 && predict > 0) { boolean keyed; @@ -159,9 +155,11 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { for (int i = 0; i < predictions.length; i++) { List aggs = new ArrayList<>(); aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList(), metaData())); - InternalHistogram.Bucket newBucket = factory.createBucket(lastKey + (interval * (i + 1)), 0, new InternalAggregations( + long newKey = histo.getRounding().nextRoundingValue(lastKey); + InternalHistogram.Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations( aggs), keyed, formatter); newBuckets.add(newBucket); + lastKey = newKey; } } diff --git a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java index 38da141ad5c..0e0eb239ce0 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.moving.avg; import com.google.common.collect.EvictingQueue; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; @@ -32,6 +31,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.Bucket; import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; @@ -51,7 +51,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -59,8 +58,12 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogra import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.min; import static org.elasticsearch.search.aggregations.AggregationBuilders.range; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; @@ -154,6 +157,11 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { .field(INTERVAL_FIELD, 49) .field(GAP_FIELD, 1).endObject())); + for (int i = -10; i < 10; i++) { + builders.add(client().prepareIndex("neg_idx", "type").setSource( + jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject())); + } + indexRandom(true, builders); ensureSearchable(); } @@ -514,6 +522,56 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { } } + @Test + public void testPredictNegativeKeysAtStart() { + + SearchResponse response = client() + .prepareSearch("neg_idx") + .setTypes("type") + .addAggregation( + histogram("histo") + .field(INTERVAL_FIELD) + .interval(1) + .subAggregation(avg("avg").field(VALUE_FIELD)) + .subAggregation( + movingAvg("movavg_values").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(gapPolicy).predict(5).setBucketsPaths("avg"))).execute().actionGet(); + + assertSearchResponse(response); + + InternalHistogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(25)); + + for (int i = 0; i < 20; i++) { + Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat((long) bucket.getKey(), equalTo((long) i - 10)); + assertThat(bucket.getDocCount(), equalTo(1l)); + Avg avgAgg = bucket.getAggregations().get("avg"); + assertThat(avgAgg, notNullValue()); + assertThat(avgAgg.value(), equalTo(10d)); + SimpleValue movAvgAgg = bucket.getAggregations().get("movavg_values"); + assertThat(movAvgAgg, notNullValue()); + assertThat(movAvgAgg.value(), equalTo(10d)); + } + + for (int i = 20; i < 25; i++) { + System.out.println(i); + Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat((long) bucket.getKey(), equalTo((long) i - 10)); + assertThat(bucket.getDocCount(), equalTo(0l)); + Avg avgAgg = bucket.getAggregations().get("avg"); + assertThat(avgAgg, nullValue()); + SimpleValue movAvgAgg = bucket.getAggregations().get("movavg_values"); + assertThat(movAvgAgg, notNullValue()); + assertThat(movAvgAgg.value(), equalTo(10d)); + } + } + @Test public void testSizeZeroWindow() { try { From 57653df7133d02fa09b67343232f33c096100e16 Mon Sep 17 00:00:00 2001 From: markharwood Date: Wed, 27 May 2015 12:52:13 +0100 Subject: [PATCH 033/123] Search fix: fielddata_fields query string parameter was ignored. The RestSearchAction did not parse the fielddata_fields parameter. Added test case and missing parser code. Closes #11025 --- rest-api-spec/test/search/10_source_filtering.yaml | 6 ++++++ .../rest/action/search/RestSearchAction.java | 14 ++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/rest-api-spec/test/search/10_source_filtering.yaml b/rest-api-spec/test/search/10_source_filtering.yaml index 40a67ba7e1f..1a2a79a80e4 100644 --- a/rest-api-spec/test/search/10_source_filtering.yaml +++ b/rest-api-spec/test/search/10_source_filtering.yaml @@ -89,4 +89,10 @@ query: { match_all: {} } - match: { hits.hits.0.fields: { include.field2 : [v2] }} - is_true: hits.hits.0._source + + + - do: + search: + fielddata_fields: ["count"] + - match: { hits.hits.0.fields.count: [1] } diff --git a/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 0d06031a92e..550b0f63978 100644 --- a/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -195,6 +195,20 @@ public class RestSearchAction extends BaseRestHandler { } } } + String sFieldDataFields = request.param("fielddata_fields"); + if (sFieldDataFields != null) { + if (searchSourceBuilder == null) { + searchSourceBuilder = new SearchSourceBuilder(); + } + if (Strings.hasText(sFieldDataFields)) { + String[] sFields = Strings.splitStringByCommaToArray(sFieldDataFields); + if (sFields != null) { + for (String field : sFields) { + searchSourceBuilder.fieldDataField(field); + } + } + } + } FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request); if (fetchSourceContext != null) { if (searchSourceBuilder == null) { From 37610548f88f5975e468dee4cdc7a0e29a5a470e Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Tue, 26 May 2015 18:31:18 +0200 Subject: [PATCH 034/123] highlighting: don't fail search request when name of highlighted field contains wildcards When we highlight on fields using wildcards then fields might match that cannot be highlighted by the specified highlighter. The whole search request then failed. Instead, check that the field can be highlighted and ignore the field if it can't. In addition ignore the exception thrown by plain highlighter if a field conatins terms larger than 32766. closes #9881 --- .../highlight/FastVectorHighlighter.java | 7 +- .../search/highlight/HighlightPhase.java | 24 ++-- .../search/highlight/Highlighter.java | 4 + .../search/highlight/PlainHighlighter.java | 15 ++- .../search/highlight/PostingsHighlighter.java | 7 +- .../search/highlight/CustomHighlighter.java | 6 + .../highlight/HighlighterSearchTests.java | 126 ++++++++++++++++-- 7 files changed, 168 insertions(+), 21 deletions(-) diff --git a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java index 715dacae39d..c997624ff60 100644 --- a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java @@ -63,7 +63,7 @@ public class FastVectorHighlighter implements Highlighter { FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; FieldMapper mapper = highlighterContext.mapper; - if (!(mapper.fieldType().storeTermVectors() && mapper.fieldType().storeTermVectorOffsets() && mapper.fieldType().storeTermVectorPositions())) { + if (canHighlight(mapper) == false) { throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with term vector with position offsets to be used with fast vector highlighter"); } @@ -177,6 +177,11 @@ public class FastVectorHighlighter implements Highlighter { } } + @Override + public boolean canHighlight(FieldMapper fieldMapper) { + return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() && fieldMapper.fieldType().storeTermVectorPositions(); + } + private class MapperHighlightEntry { public FragListBuilder fragListBuilder; public FragmentsBuilder fragmentsBuilder; diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index 10afac729ba..f005f5be7c3 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.highlight; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -45,6 +44,8 @@ import static com.google.common.collect.Maps.newHashMap; */ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { + private static final ImmutableList STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = ImmutableList.of("fvh", "postings", "plain"); + private final Highlighters highlighters; @Inject @@ -91,6 +92,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { } } + boolean fieldNameContainsWildcards = field.field().contains("*"); for (String fieldName : fieldNamesToHighlight) { FieldMapper fieldMapper = getMapperForField(fieldName, context, hitContext); if (fieldMapper == null) { @@ -99,16 +101,14 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { String highlighterType = field.fieldOptions().highlighterType(); if (highlighterType == null) { - boolean useFastVectorHighlighter = fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() && fieldMapper.fieldType().storeTermVectorPositions(); - if (useFastVectorHighlighter) { - highlighterType = "fvh"; - } else if (fieldMapper.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { - highlighterType = "postings"; - } else { - highlighterType = "plain"; + for(String highlighterCandidate : STANDARD_HIGHLIGHTERS_BY_PRECEDENCE) { + if (highlighters.get(highlighterCandidate).canHighlight(fieldMapper)) { + highlighterType = highlighterCandidate; + break; + } } + assert highlighterType != null; } - Highlighter highlighter = highlighters.get(highlighterType); if (highlighter == null) { throw new IllegalArgumentException("unknown highlighter type [" + highlighterType + "] for the field [" + fieldName + "]"); @@ -116,13 +116,17 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { Query highlightQuery = field.fieldOptions().highlightQuery() == null ? context.parsedQuery().query() : field.fieldOptions().highlightQuery(); HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context, hitContext, highlightQuery); + + if ((highlighter.canHighlight(fieldMapper) == false) && fieldNameContainsWildcards) { + // if several fieldnames matched the wildcard then we want to skip those that we cannot highlight + continue; + } HighlightField highlightField = highlighter.highlight(highlighterContext); if (highlightField != null) { highlightFields.put(highlightField.name(), highlightField); } } } - hitContext.hit().highlightFields(highlightFields); } diff --git a/src/main/java/org/elasticsearch/search/highlight/Highlighter.java b/src/main/java/org/elasticsearch/search/highlight/Highlighter.java index 407cdc7b1ae..26c3dc0bf21 100644 --- a/src/main/java/org/elasticsearch/search/highlight/Highlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/Highlighter.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.search.highlight; +import org.elasticsearch.index.mapper.FieldMapper; + /** * */ @@ -26,4 +28,6 @@ public interface Highlighter { String[] names(); HighlightField highlight(HighlighterContext highlighterContext); + + boolean canHighlight(FieldMapper fieldMapper); } diff --git a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index 12d7d08fa8f..460b2df05cd 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.search.highlight.*; +import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; @@ -117,7 +118,14 @@ public class PlainHighlighter implements Highlighter { } } } catch (Exception e) { - throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + if (e instanceof BytesRefHash.MaxBytesLengthExceededException) { + // this can happen if for example a field is not_analyzed and ignore_above option is set. + // the field will be ignored when indexing but the huge term is still in the source and + // the plain highlighter will parse the source and try to analyze it. + return null; + } else { + throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + } } if (field.fieldOptions().scoreOrdered()) { CollectionUtil.introSort(fragsList, new Comparator() { @@ -164,6 +172,11 @@ public class PlainHighlighter implements Highlighter { return null; } + @Override + public boolean canHighlight(FieldMapper fieldMapper) { + return true; + } + private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, TokenStream tokenStream) throws IOException { try { if (!tokenStream.hasAttribute(OffsetAttribute.class)) { diff --git a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java index 0375ff204f7..dcbb810d4dd 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java @@ -50,7 +50,7 @@ public class PostingsHighlighter implements Highlighter { FieldMapper fieldMapper = highlighterContext.mapper; SearchContextHighlight.Field field = highlighterContext.field; - if (fieldMapper.fieldType().indexOptions() != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { + if (canHighlight(fieldMapper) == false) { throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with positions and offsets in the postings list to be used with postings highlighter"); } @@ -126,6 +126,11 @@ public class PostingsHighlighter implements Highlighter { return null; } + @Override + public boolean canHighlight(FieldMapper fieldMapper) { + return fieldMapper.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; + } + private static String mergeFieldValues(List fieldValues, char valuesSeparator) { //postings highlighter accepts all values in a single string, as offsets etc. need to match with content //loaded from stored fields, we merge all values using a proper separator diff --git a/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java b/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java index 2845af198a2..3a9135cb731 100644 --- a/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java +++ b/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.highlight; import com.google.common.collect.Lists; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.mapper.FieldMapper; import java.util.List; import java.util.Locale; @@ -68,6 +69,11 @@ public class CustomHighlighter implements Highlighter { return new HighlightField(highlighterContext.fieldName, responses.toArray(new Text[]{})); } + @Override + public boolean canHighlight(FieldMapper fieldMapper) { + return true; + } + private static class CacheEntry { private int position; private int docId; diff --git a/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java b/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java index b3e723f213a..7a0ebf57738 100644 --- a/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java +++ b/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java @@ -51,12 +51,124 @@ import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.*; @Slow public class HighlighterSearchTests extends ElasticsearchIntegrationTest { + @Test + public void testHighlightingWithWildcardName() throws IOException { + // test the kibana case with * as fieldname that will try highlight all fields including meta fields + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "string") + .field("analyzer", "keyword") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + ensureYellow(); + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("text", "text").endObject()) + .get(); + refresh(); + String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); + SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); + assertHighlight(search, 0, "text", 0, equalTo("text")); + } + + @Test + public void testPlainHighlighterWithLongUnanalyzedStringTerm() throws IOException { + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("long_text") + .field("type", "string") + .field("analyzer", "keyword") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .field("ignore_above", 1) + .endObject() + .startObject("text") + .field("type", "string") + .field("analyzer", "keyword") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + ensureYellow(); + // crate a term that is larger than the allowed 32766, index it and then try highlight on it + // the search request should still succeed + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < 32767; i++) { + builder.append('a'); + } + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("long_text", builder.toString()).field("text", "text").endObject()) + .get(); + refresh(); + String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); + SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); + assertHighlight(search, 0, "text", 0, equalTo("text")); + search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("long_text").highlighterType(highlighter)).get(); + assertNoFailures(search); + assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); + } + + @Test + public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOException { + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("unstored_field") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .field("type", "string") + .field("store", "no") + .endObject() + .startObject("text") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .field("type", "string") + .field("store", "yes") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + ensureYellow(); + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("unstored_text", "text").field("text", "text").endObject()) + .get(); + refresh(); + String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); + SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); + assertHighlight(search, 0, "text", 0, equalTo("text")); + search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("unstored_text")).get(); + assertNoFailures(search); + assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); + } + + @Test // see #3486 public void testHighTermFrequencyDoc() throws IOException { @@ -1171,12 +1283,11 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest { RestStatus.BAD_REQUEST, containsString("the field [title] should be indexed with term vector with position offsets to be used with fast vector highlighter")); - assertFailures(client().prepareSearch() + //should not fail if there is a wildcard + assertNoFailures(client().prepareSearch() .setQuery(matchPhraseQuery("title", "this is a test")) .addHighlightedField("tit*", 50, 1, 10) - .setHighlighterType("fast-vector-highlighter"), - RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with term vector with position offsets to be used with fast vector highlighter")); + .setHighlighterType("fast-vector-highlighter").get()); } @Test @@ -2169,12 +2280,11 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest { RestStatus.BAD_REQUEST, containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter")); - assertFailures(client().prepareSearch() + //should not fail if there is a wildcard + assertNoFailures(client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .addHighlightedField("tit*") - .setHighlighterType("postings"), - RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter")); + .setHighlighterType("postings").get()); } @Test From 9d5e7895080cd186f8cea327f4fa43445476bee6 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 27 May 2015 17:38:31 +0200 Subject: [PATCH 035/123] Cat API: Do not rely on hashmap for sorted entries The tests for the recently added added wildcard feature were relying on order of the hashmap being used, which could be different. The implementation now ensures, that the header fields are parsed in the order they have been added. --- rest-api-spec/test/cat.thread_pool/10_basic.yaml | 2 +- .../org/elasticsearch/rest/action/support/RestTable.java | 9 ++++----- .../elasticsearch/rest/action/support/RestTableTest.java | 4 ++-- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/rest-api-spec/test/cat.thread_pool/10_basic.yaml b/rest-api-spec/test/cat.thread_pool/10_basic.yaml index 37994201191..283e353b7a3 100755 --- a/rest-api-spec/test/cat.thread_pool/10_basic.yaml +++ b/rest-api-spec/test/cat.thread_pool/10_basic.yaml @@ -36,7 +36,7 @@ - match: $body: | - /^ bulk.max \s+ bulk.min \s+ \n + /^ bulk.min \s+ bulk.max \s+ \n (\s+ \d+ \s+ \d+ \s+ \n)+ $/ #(\s+ \d+ \s+ \d+ \n)+ $/ diff --git a/src/main/java/org/elasticsearch/rest/action/support/RestTable.java b/src/main/java/org/elasticsearch/rest/action/support/RestTable.java index 0b5b44112f4..3e6eb713529 100644 --- a/src/main/java/org/elasticsearch/rest/action/support/RestTable.java +++ b/src/main/java/org/elasticsearch/rest/action/support/RestTable.java @@ -157,16 +157,15 @@ public class RestTable { private static Set expandHeadersFromRequest(Table table, RestRequest request) { Set headers = new LinkedHashSet<>(table.getHeaders().size()); - Map headerMap = table.getHeaderMap(); // check headers and aliases for (String header : Strings.splitStringByCommaToArray(request.param("h"))) { if (Regex.isSimpleMatchPattern(header)) { - for (Map.Entry configuredHeaderEntry : headerMap.entrySet()) { - String configuredHeader = configuredHeaderEntry.getKey(); + for (Table.Cell tableHeaderCell : table.getHeaders()) { + String configuredHeader = tableHeaderCell.value.toString(); if (Regex.simpleMatch(header, configuredHeader)) { headers.add(configuredHeader); - } else if (configuredHeaderEntry.getValue().attr.containsKey("alias")) { - String[] aliases = Strings.splitStringByCommaToArray(configuredHeaderEntry.getValue().attr.get("alias")); + } else if (tableHeaderCell.attr.containsKey("alias")) { + String[] aliases = Strings.splitStringByCommaToArray(tableHeaderCell.attr.get("alias")); for (String alias : aliases) { if (Regex.simpleMatch(header, alias)) { headers.add(configuredHeader); diff --git a/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java b/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java index bdf0a07cbf5..3faa0ea3673 100644 --- a/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java +++ b/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java @@ -57,7 +57,7 @@ public class RestTableTest extends ElasticsearchTestCase { List headers = buildDisplayHeaders(table, restRequest); List headerNames = getHeaderNames(headers); - assertThat(headerNames, containsInAnyOrder("bulk.foo", "bulk.bar", "aliasedBulk", "aliasedSecondBulk")); + assertThat(headerNames, contains("bulk.foo", "bulk.bar", "aliasedBulk", "aliasedSecondBulk")); assertThat(headerNames, not(hasItem("unmatched"))); } @@ -67,7 +67,7 @@ public class RestTableTest extends ElasticsearchTestCase { List headers = buildDisplayHeaders(table, restRequest); List headerNames = getHeaderNames(headers); - assertThat(headerNames, containsInAnyOrder("bulk.foo", "bulk.bar", "aliasedBulk", "aliasedSecondBulk")); + assertThat(headerNames, contains("bulk.foo", "bulk.bar", "aliasedBulk", "aliasedSecondBulk")); assertThat(headerNames, not(hasItem("unmatched"))); } From acb07c72b9d2fd6e21f9710ffbdf4573c01c9f22 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 25 May 2015 15:06:56 +0200 Subject: [PATCH 036/123] Bulk: throw exception if unrecognized parameter in action/metadata line Closes #10977 --- .../action/bulk/BulkRequest.java | 80 +++++++++++-------- .../action/bulk/BulkRequestTests.java | 52 ++++++++++++ .../action/bulk/simple-bulk6.json | 6 ++ .../action/bulk/simple-bulk7.json | 6 ++ .../action/bulk/simple-bulk8.json | 6 ++ .../action/bulk/simple-bulk9.json | 4 + 6 files changed, 121 insertions(+), 33 deletions(-) create mode 100644 src/test/java/org/elasticsearch/action/bulk/simple-bulk6.json create mode 100644 src/test/java/org/elasticsearch/action/bulk/simple-bulk7.json create mode 100644 src/test/java/org/elasticsearch/action/bulk/simple-bulk8.json create mode 100644 src/test/java/org/elasticsearch/action/bulk/simple-bulk9.json diff --git a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 617c3fc32bd..715c1d716d9 100644 --- a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -246,6 +246,7 @@ public class BulkRequest extends ActionRequest implements Composite public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable Object payload, boolean allowExplicitIndex) throws Exception { XContent xContent = XContentFactory.xContent(data); + int line = 0; int from = 0; int length = data.length(); byte marker = xContent.streamSeparator(); @@ -254,8 +255,9 @@ public class BulkRequest extends ActionRequest implements Composite if (nextMarker == -1) { break; } - // now parse the action + line++; + // now parse the action try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) { // move pointers from = nextMarker + 1; @@ -285,43 +287,53 @@ public class BulkRequest extends ActionRequest implements Composite // at this stage, next token can either be END_OBJECT (and use default index and type, with auto generated id) // or START_OBJECT which will have another set of parameters + token = parser.nextToken(); - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("_index".equals(currentFieldName)) { - if (!allowExplicitIndex) { - throw new IllegalArgumentException("explicit index in bulk is not allowed"); - } - index = parser.text(); - } else if ("_type".equals(currentFieldName)) { - type = parser.text(); - } else if ("_id".equals(currentFieldName)) { - id = parser.text(); - } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { - routing = parser.text(); - } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { - parent = parser.text(); - } else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) { - timestamp = parser.text(); - } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) { - if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - ttl = TimeValue.parseTimeValue(parser.text(), null).millis(); + if (token == XContentParser.Token.START_OBJECT) { + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("_index".equals(currentFieldName)) { + if (!allowExplicitIndex) { + throw new IllegalArgumentException("explicit index in bulk is not allowed"); + } + index = parser.text(); + } else if ("_type".equals(currentFieldName)) { + type = parser.text(); + } else if ("_id".equals(currentFieldName)) { + id = parser.text(); + } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { + routing = parser.text(); + } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { + parent = parser.text(); + } else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) { + timestamp = parser.text(); + } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) { + if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + ttl = TimeValue.parseTimeValue(parser.text(), null).millis(); + } else { + ttl = parser.longValue(); + } + } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) { + opType = parser.text(); + } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { + version = parser.longValue(); + } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { + versionType = VersionType.fromString(parser.text()); + } else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) { + retryOnConflict = parser.intValue(); } else { - ttl = parser.longValue(); + throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]"); } - } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) { - opType = parser.text(); - } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { - version = parser.longValue(); - } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { - versionType = VersionType.fromString(parser.text()); - } else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) { - retryOnConflict = parser.intValue(); + } else { + throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); } } + } else if (token != XContentParser.Token.END_OBJECT) { + throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected " + XContentParser.Token.START_OBJECT + + " or " + XContentParser.Token.END_OBJECT + " but found [" + token + "]"); } if ("delete".equals(action)) { @@ -331,6 +343,8 @@ public class BulkRequest extends ActionRequest implements Composite if (nextMarker == -1) { break; } + line++; + // order is important, we set parent after routing, so routing will be set to parent if not set explicitly // we use internalAdd so we don't fork here, this allows us not to copy over the big byte array to small chunks // of index request. diff --git a/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 3e1e762b45c..040bb81ef6b 100644 --- a/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -117,4 +117,56 @@ public class BulkRequestTests extends ElasticsearchTestCase { assertThat(bulkRequest.requests().get(1), instanceOf(UpdateRequest.class)); assertThat(bulkRequest.requests().get(2), instanceOf(DeleteRequest.class)); } + + @Test + public void testSimpleBulk6() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json"); + BulkRequest bulkRequest = new BulkRequest(); + try { + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + fail("should have thrown an exception about the wrong format of line 1"); + } catch (IllegalArgumentException e) { + assertThat("message contains error about the wrong format of line 1: " + e.getMessage(), + e.getMessage().contains("Malformed action/metadata line [1], expected a simple value for field [_source] but found [START_OBJECT]"), equalTo(true)); + } + } + + @Test + public void testSimpleBulk7() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json"); + BulkRequest bulkRequest = new BulkRequest(); + try { + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + fail("should have thrown an exception about the wrong format of line 5"); + } catch (IllegalArgumentException e) { + assertThat("message contains error about the wrong format of line 5: " + e.getMessage(), + e.getMessage().contains("Malformed action/metadata line [5], expected a simple value for field [_unkown] but found [START_ARRAY]"), equalTo(true)); + } + } + + @Test + public void testSimpleBulk8() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json"); + BulkRequest bulkRequest = new BulkRequest(); + try { + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + fail("should have thrown an exception about the unknown paramater _foo"); + } catch (IllegalArgumentException e) { + assertThat("message contains error about the unknown paramater _foo: " + e.getMessage(), + e.getMessage().contains("Action/metadata line [3] contains an unknown parameter [_foo]"), equalTo(true)); + } + } + + @Test + public void testSimpleBulk9() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json"); + BulkRequest bulkRequest = new BulkRequest(); + try { + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + fail("should have thrown an exception about the wrong format of line 3"); + } catch (IllegalArgumentException e) { + assertThat("message contains error about the wrong format of line 3: " + e.getMessage(), + e.getMessage().contains("Malformed action/metadata line [3], expected START_OBJECT or END_OBJECT but found [START_ARRAY]"), equalTo(true)); + } + } } diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk6.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk6.json new file mode 100644 index 00000000000..e9c97965595 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk6.json @@ -0,0 +1,6 @@ +{"index": {"_index": "test", "_type": "doc", "_source": {"hello": "world"}, "_id": 0}} +{"field1": "value0"} +{"index": {"_index": "test", "_type": "doc", "_id": 1}} +{"field1": "value1"} +{"index": {"_index": "test", "_type": "doc", "_id": 2}} +{"field1": "value2"} diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk7.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk7.json new file mode 100644 index 00000000000..a642d9ce4fe --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk7.json @@ -0,0 +1,6 @@ +{"index": {"_index": "test", "_type": "doc", "_id": 0}} +{"field1": "value0"} +{"index": {"_index": "test", "_type": "doc", "_id": 1}} +{"field1": "value1"} +{"index": {"_index": "test", "_type": "doc", "_id": 2, "_unkown": ["foo", "bar"]}} +{"field1": "value2"} diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk8.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk8.json new file mode 100644 index 00000000000..c1a94b1d159 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk8.json @@ -0,0 +1,6 @@ +{"index": {"_index": "test", "_type": "doc", "_id": 0}} +{"field1": "value0"} +{"index": {"_index": "test", "_type": "doc", "_id": 1, "_foo": "bar"}} +{"field1": "value1"} +{"index": {"_index": "test", "_type": "doc", "_id": 2}} +{"field1": "value2"} diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk9.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk9.json new file mode 100644 index 00000000000..ebdbf750116 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk9.json @@ -0,0 +1,4 @@ +{"index": {}} +{"field1": "value0"} +{"index": ["bar"] } +{"field1": "value1"} From 098c01d86c5c2fbd28cdcc86db5646fcac9edd69 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 27 May 2015 18:25:38 +0200 Subject: [PATCH 037/123] Internal: remove unused code. --- .../allocator/BalancedShardsAllocator.java | 3 +- .../common/collect/BoundedTreeSet.java | 63 --- .../common/collect/IdentityHashSet.java | 201 ---------- .../common/collect/ImmutableOpenLongMap.java | 376 ------------------ .../common/collect/Iterators2.java | 65 --- .../common/lucene/HashedBytesRef.java | 85 ---- .../ThreadSafeInputStreamIndexInput.java | 39 -- .../elasticsearch/common/unit/Percent.java | 64 --- .../common/collect/Iterators2Tests.java | 50 --- 9 files changed, 1 insertion(+), 945 deletions(-) delete mode 100644 src/main/java/org/elasticsearch/common/collect/BoundedTreeSet.java delete mode 100644 src/main/java/org/elasticsearch/common/collect/IdentityHashSet.java delete mode 100644 src/main/java/org/elasticsearch/common/collect/ImmutableOpenLongMap.java delete mode 100644 src/main/java/org/elasticsearch/common/collect/Iterators2.java delete mode 100644 src/main/java/org/elasticsearch/common/lucene/HashedBytesRef.java delete mode 100644 src/main/java/org/elasticsearch/common/lucene/store/ThreadSafeInputStreamIndexInput.java delete mode 100644 src/main/java/org/elasticsearch/common/unit/Percent.java delete mode 100644 src/test/java/org/elasticsearch/common/collect/Iterators2Tests.java diff --git a/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 11f3e45653f..d4a268d522b 100644 --- a/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -33,7 +33,6 @@ import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; -import org.elasticsearch.common.collect.IdentityHashSet; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; @@ -597,7 +596,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards int secondaryLength = 0; int primaryLength = primary.length; ArrayUtil.timSort(primary, comparator); - final Set throttledNodes = new IdentityHashSet<>(); + final Set throttledNodes = Collections.newSetFromMap(new IdentityHashMap()); do { for (int i = 0; i < primaryLength; i++) { MutableShardRouting shard = primary[i]; diff --git a/src/main/java/org/elasticsearch/common/collect/BoundedTreeSet.java b/src/main/java/org/elasticsearch/common/collect/BoundedTreeSet.java deleted file mode 100644 index 2adec0f283d..00000000000 --- a/src/main/java/org/elasticsearch/common/collect/BoundedTreeSet.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.collect; - -import java.util.Collection; -import java.util.Comparator; -import java.util.TreeSet; - -/** - * A {@link TreeSet} that is bounded by size. - * - * - */ -public class BoundedTreeSet extends TreeSet { - - private final int size; - - public BoundedTreeSet(int size) { - this.size = size; - } - - public BoundedTreeSet(Comparator comparator, int size) { - super(comparator); - this.size = size; - } - - @Override - public boolean add(E e) { - boolean result = super.add(e); - rebound(); - return result; - } - - @Override - public boolean addAll(Collection c) { - boolean result = super.addAll(c); - rebound(); - return result; - } - - private void rebound() { - while (size() > size) { - remove(last()); - } - } -} diff --git a/src/main/java/org/elasticsearch/common/collect/IdentityHashSet.java b/src/main/java/org/elasticsearch/common/collect/IdentityHashSet.java deleted file mode 100644 index e3e4834519b..00000000000 --- a/src/main/java/org/elasticsearch/common/collect/IdentityHashSet.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.collect; - -import java.util.*; - -/** - * - */ -public class IdentityHashSet - extends AbstractSet - implements Set, Cloneable, java.io.Serializable { - - static final long serialVersionUID = -5024744406713321677L; - - private transient IdentityHashMap map; - - // Dummy value to associate with an Object in the backing Map - private static final Object PRESENT = new Object(); - - public IdentityHashSet() { - map = new IdentityHashMap<>(); - } - - public IdentityHashSet(Collection c) { - map = new IdentityHashMap<>(Math.max((int) (c.size() / .75f) + 1, 16)); - addAll(c); - } - - public IdentityHashSet(int expectedSize) { - map = new IdentityHashMap<>(expectedSize); - } - - /** - * Returns an iterator over the elements in this set. The elements - * are returned in no particular order. - * - * @return an Iterator over the elements in this set - * @see ConcurrentModificationException - */ - @Override - public Iterator iterator() { - return map.keySet().iterator(); - } - - /** - * Returns the number of elements in this set (its cardinality). - * - * @return the number of elements in this set (its cardinality) - */ - @Override - public int size() { - return map.size(); - } - - /** - * Returns true if this set contains no elements. - * - * @return true if this set contains no elements - */ - @Override - public boolean isEmpty() { - return map.isEmpty(); - } - - /** - * Returns true if this set contains the specified element. - * More formally, returns true if and only if this set - * contains an element e such that - * (o==e). - * - * @param o element whose presence in this set is to be tested - * @return true if this set contains the specified element - */ - @Override - public boolean contains(Object o) { - return map.containsKey(o); - } - - /** - * Adds the specified element to this set if it is not already present. - * More formally, adds the specified element e to this set if - * this set contains no element e2 such that - * (e==e2). - * If this set already contains the element, the call leaves the set - * unchanged and returns false. - * - * @param e element to be added to this set - * @return true if this set did not already contain the specified - * element - */ - @Override - public boolean add(E e) { - return map.put(e, PRESENT) == null; - } - - /** - * Removes the specified element from this set if it is present. - * More formally, removes an element e such that - * (o==e), - * if this set contains such an element. Returns true if - * this set contained the element (or equivalently, if this set - * changed as a result of the call). (This set will not contain the - * element once the call returns.) - * - * @param o object to be removed from this set, if present - * @return true if the set contained the specified element - */ - @Override - public boolean remove(Object o) { - return map.remove(o) == PRESENT; - } - - /** - * Removes all of the elements from this set. - * The set will be empty after this call returns. - */ - @Override - public void clear() { - map.clear(); - } - - /** - * Returns a shallow copy of this HashSet instance: the elements - * themselves are not cloned. - * - * @return a shallow copy of this set - */ - @Override - public Object clone() { - try { - IdentityHashSet newSet = (IdentityHashSet) super.clone(); - newSet.map = (IdentityHashMap) map.clone(); - return newSet; - } catch (CloneNotSupportedException e) { - throw new InternalError(); - } - } - - /** - * Index the state of this HashSet instance to a stream (that is, - * serialize it). - * - * @serialData The capacity of the backing HashMap instance - * (int), and its load factor (float) are emitted, followed by - * the size of the set (the number of elements it contains) - * (int), followed by all of its elements (each an Object) in - * no particular order. - */ - private void writeObject(java.io.ObjectOutputStream s) - throws java.io.IOException { - // Write out any hidden serialization magic - s.defaultWriteObject(); - - // Write out size - s.writeInt(map.size()); - - // Write out all elements in the proper order. - for (Iterator i = map.keySet().iterator(); i.hasNext(); ) - s.writeObject(i.next()); - } - - /** - * Reconstitute the HashSet instance from a stream (that is, - * deserialize it). - */ - private void readObject(java.io.ObjectInputStream s) - throws java.io.IOException, ClassNotFoundException { - // Read in any hidden serialization magic - s.defaultReadObject(); - - // Read in size - int size = s.readInt(); - - map = new IdentityHashMap<>(size); - - // Read in all elements in the proper order. - for (int i = 0; i < size; i++) { - E e = (E) s.readObject(); - map.put(e, PRESENT); - } - } -} - diff --git a/src/main/java/org/elasticsearch/common/collect/ImmutableOpenLongMap.java b/src/main/java/org/elasticsearch/common/collect/ImmutableOpenLongMap.java deleted file mode 100644 index 571b54ebeeb..00000000000 --- a/src/main/java/org/elasticsearch/common/collect/ImmutableOpenLongMap.java +++ /dev/null @@ -1,376 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.collect; - -import com.carrotsearch.hppc.*; -import com.carrotsearch.hppc.cursors.LongCursor; -import com.carrotsearch.hppc.cursors.LongObjectCursor; -import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.carrotsearch.hppc.predicates.IntObjectPredicate; -import com.carrotsearch.hppc.predicates.LongObjectPredicate; -import com.carrotsearch.hppc.predicates.LongPredicate; -import com.carrotsearch.hppc.procedures.LongObjectProcedure; -import com.google.common.collect.UnmodifiableIterator; - -import java.util.Iterator; -import java.util.Map; - -/** - * An immutable map implementation based on open hash map. - *

    - * Can be constructed using a {@link #builder()}, or using {@link #builder(org.elasticsearch.common.collect.ImmutableOpenLongMap)} (which is an optimized - * option to copy over existing content and modify it). - */ -public final class ImmutableOpenLongMap implements Iterable> { - - private final LongObjectHashMap map; - - private ImmutableOpenLongMap(LongObjectHashMap map) { - this.map = map; - } - - /** - * @return Returns the value associated with the given key or the default value - * for the key type, if the key is not associated with any value. - *

    - * Important note: For primitive type values, the value returned for a non-existing - * key may not be the default value of the primitive type (it may be any value previously - * assigned to that slot). - */ - public VType get(long key) { - return map.get(key); - } - - /** - * Returns true if this container has an association to a value for - * the given key. - */ - public boolean containsKey(long key) { - return map.containsKey(key); - } - - /** - * @return Returns the current size (number of assigned keys) in the container. - */ - public int size() { - return map.size(); - } - - /** - * @return Return true if this hash map contains no assigned keys. - */ - public boolean isEmpty() { - return map.isEmpty(); - } - - /** - * Returns a cursor over the entries (key-value pairs) in this map. The iterator is - * implemented as a cursor and it returns the same cursor instance on every - * call to {@link java.util.Iterator#next()}. To read the current key and value use the cursor's - * public fields. An example is shown below. - *

    -     * for (IntShortCursor c : intShortMap)
    -     * {
    -     *     System.out.println("index=" + c.index
    -     *       + " key=" + c.key
    -     *       + " value=" + c.value);
    -     * }
    -     * 
    - *

    - *

    The index field inside the cursor gives the internal index inside - * the container's implementation. The interpretation of this index depends on - * to the container. - */ - @Override - public Iterator> iterator() { - return map.iterator(); - } - - /** - * Returns a specialized view of the keys of this associated container. - * The view additionally implements {@link com.carrotsearch.hppc.ObjectLookupContainer}. - */ - public LongLookupContainer keys() { - return map.keys(); - } - - /** - * Returns a direct iterator over the keys. - */ - public UnmodifiableIterator keysIt() { - final Iterator iterator = map.keys().iterator(); - return new UnmodifiableIterator() { - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public Long next() { - return iterator.next().value; - } - }; - } - - /** - * @return Returns a container with all values stored in this map. - */ - public ObjectContainer values() { - return map.values(); - } - - /** - * Returns a direct iterator over the keys. - */ - public UnmodifiableIterator valuesIt() { - final Iterator> iterator = map.values().iterator(); - return new UnmodifiableIterator() { - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public VType next() { - return iterator.next().value; - } - }; - } - - @Override - public String toString() { - return map.toString(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - ImmutableOpenLongMap that = (ImmutableOpenLongMap) o; - - if (!map.equals(that.map)) return false; - - return true; - } - - @Override - public int hashCode() { - return map.hashCode(); - } - - @SuppressWarnings("unchecked") - private static final ImmutableOpenLongMap EMPTY = new ImmutableOpenLongMap(new LongObjectHashMap()); - - @SuppressWarnings("unchecked") - public static ImmutableOpenLongMap of() { - return EMPTY; - } - - public static Builder builder() { - return new Builder<>(); - } - - public static Builder builder(int size) { - return new Builder<>(size); - } - - public static Builder builder(ImmutableOpenLongMap map) { - return new Builder<>(map); - } - - public static class Builder implements LongObjectMap { - - private LongObjectHashMap map; - - public Builder() { - //noinspection unchecked - this(EMPTY); - } - - public Builder(int size) { - this.map = new LongObjectHashMap<>(size); - } - - public Builder(ImmutableOpenLongMap map) { - this.map = map.map.clone(); - } - - /** - * Builds a new instance of the - */ - public ImmutableOpenLongMap build() { - LongObjectHashMap map = this.map; - this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest) - return new ImmutableOpenLongMap<>(map); - } - - /** - * Puts all the entries in the map to the builder. - */ - public Builder putAll(Map map) { - for (Map.Entry entry : map.entrySet()) { - this.map.put(entry.getKey(), entry.getValue()); - } - return this; - } - - /** - * A put operation that can be used in the fluent pattern. - */ - public Builder fPut(long key, VType value) { - map.put(key, value); - return this; - } - - @Override - public VType put(long key, VType value) { - return map.put(key, value); - } - - @Override - public VType get(long key) { - return map.get(key); - } - - @Override - public VType getOrDefault(long kType, VType vType) { - return map.getOrDefault(kType, vType); - } - - /** - * Remove that can be used in the fluent pattern. - */ - public Builder fRemove(long key) { - map.remove(key); - return this; - } - - @Override - public VType remove(long key) { - return map.remove(key); - } - - @Override - public Iterator> iterator() { - return map.iterator(); - } - - @Override - public boolean containsKey(long key) { - return map.containsKey(key); - } - - @Override - public int size() { - return map.size(); - } - - @Override - public boolean isEmpty() { - return map.isEmpty(); - } - - @Override - public void clear() { - map.clear(); - } - - @Override - public int putAll(LongObjectAssociativeContainer container) { - return map.putAll(container); - } - - @Override - public int putAll(Iterable> iterable) { - return map.putAll(iterable); - } - - @Override - public int removeAll(LongContainer container) { - return map.removeAll(container); - } - - @Override - public int removeAll(LongPredicate predicate) { - return map.removeAll(predicate); - } - - @Override - public LongCollection keys() { - return map.keys(); - } - - @Override - public ObjectContainer values() { - return map.values(); - } - - @Override - public > T forEach(T procedure) { - return map.forEach(procedure); - } - - @Override - public int indexOf(long key) { - return map.indexOf(key); - } - - @Override - public boolean indexExists(int index) { - return map.indexExists(index); - } - - @Override - public VType indexGet(int index) { - return map.indexGet(index); - } - - @Override - public VType indexReplace(int index, VType newValue) { - return map.indexReplace(index, newValue); - } - - @Override - public void indexInsert(int index, long key, VType value) { - map.indexInsert(index, key, value); - } - - @Override - public void release() { - map.release(); - } - - @Override - public String visualizeKeyDistribution(int characters) { - return map.visualizeKeyDistribution(characters); - } - - @Override - public int removeAll(LongObjectPredicate predicate) { - return map.removeAll(predicate); - } - - @Override - public > T forEach(T predicate) { - return map.forEach(predicate); - } - } -} diff --git a/src/main/java/org/elasticsearch/common/collect/Iterators2.java b/src/main/java/org/elasticsearch/common/collect/Iterators2.java deleted file mode 100644 index 20c0bd46483..00000000000 --- a/src/main/java/org/elasticsearch/common/collect/Iterators2.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.collect; - -import com.google.common.collect.Iterators; -import com.google.common.collect.PeekingIterator; -import com.google.common.collect.UnmodifiableIterator; - -import java.util.Comparator; -import java.util.Iterator; - -public enum Iterators2 { - ; - - /** Remove duplicated elements from an iterator over sorted content. */ - public static Iterator deduplicateSorted(Iterator iterator, final Comparator comparator) { - // TODO: infer type once JI-9019884 is fixed - final PeekingIterator it = Iterators.peekingIterator(iterator); - return new UnmodifiableIterator() { - - @Override - public boolean hasNext() { - return it.hasNext(); - } - - @Override - public T next() { - final T ret = it.next(); - while (it.hasNext() && comparator.compare(ret, it.peek()) == 0) { - it.next(); - } - assert !it.hasNext() || comparator.compare(ret, it.peek()) < 0 : "iterator is not sorted: " + ret + " > " + it.peek(); - return ret; - } - - }; - } - - /** Return a merged view over several iterators, optionally deduplicating equivalent entries. */ - public static Iterator mergeSorted(Iterable> iterators, Comparator comparator, boolean deduplicate) { - Iterator it = Iterators.mergeSorted(iterators, comparator); - if (deduplicate) { - it = deduplicateSorted(it, comparator); - } - return it; - } - -} diff --git a/src/main/java/org/elasticsearch/common/lucene/HashedBytesRef.java b/src/main/java/org/elasticsearch/common/lucene/HashedBytesRef.java deleted file mode 100644 index a85d786bd89..00000000000 --- a/src/main/java/org/elasticsearch/common/lucene/HashedBytesRef.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.lucene; - -import org.apache.lucene.util.BytesRef; - -/** - * A wrapped to {@link BytesRef} that also caches the hashCode for it. - */ -public class HashedBytesRef { - - public BytesRef bytes; - public int hash; - - public HashedBytesRef() { - } - - public HashedBytesRef(String bytes) { - this(new BytesRef(bytes)); - } - - public HashedBytesRef(BytesRef bytes) { - this(bytes, bytes.hashCode()); - } - - public HashedBytesRef(BytesRef bytes, int hash) { - this.bytes = bytes; - this.hash = hash; - } - - public HashedBytesRef resetHashCode() { - this.hash = bytes.hashCode(); - return this; - } - - public HashedBytesRef reset(BytesRef bytes, int hash) { - this.bytes = bytes; - this.hash = hash; - return this; - } - - @Override - public int hashCode() { - return hash; - } - - @Override - public boolean equals(Object other) { - if (other instanceof HashedBytesRef) { - return bytes.equals(((HashedBytesRef) other).bytes); - } - return false; - } - - @Override - public String toString() { - return bytes.toString(); - } - - public HashedBytesRef deepCopy() { - return deepCopyOf(this); - } - - public static HashedBytesRef deepCopyOf(HashedBytesRef other) { - BytesRef copy = BytesRef.deepCopyOf(other.bytes); - return new HashedBytesRef(copy, other.hash); - } -} diff --git a/src/main/java/org/elasticsearch/common/lucene/store/ThreadSafeInputStreamIndexInput.java b/src/main/java/org/elasticsearch/common/lucene/store/ThreadSafeInputStreamIndexInput.java deleted file mode 100644 index 1d3084e0352..00000000000 --- a/src/main/java/org/elasticsearch/common/lucene/store/ThreadSafeInputStreamIndexInput.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.lucene.store; - -import org.apache.lucene.store.IndexInput; - -import java.io.IOException; - -/** - * - */ -public class ThreadSafeInputStreamIndexInput extends InputStreamIndexInput { - - public ThreadSafeInputStreamIndexInput(IndexInput indexInput, long limit) { - super(indexInput, limit); - } - - @Override - public synchronized int read(byte[] b, int off, int len) throws IOException { - return super.read(b, off, len); - } -} diff --git a/src/main/java/org/elasticsearch/common/unit/Percent.java b/src/main/java/org/elasticsearch/common/unit/Percent.java deleted file mode 100644 index 8da3eff6ad4..00000000000 --- a/src/main/java/org/elasticsearch/common/unit/Percent.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.unit; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; - -import java.io.IOException; -import java.io.Serializable; - -/** - * - */ -public class Percent implements Streamable, Serializable { - - private double value; - - public Percent(double value) { - this.value = value; - } - - public double value() { - return value; - } - - @Override - public String toString() { - return format(value); - } - - public static String format(double value) { - String p = String.valueOf(value * 100.0); - int ix = p.indexOf(".") + 1; - return p.substring(0, ix) + p.substring(ix, ix + 1) + "%"; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - value = in.readDouble(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeDouble(value); - } -} diff --git a/src/test/java/org/elasticsearch/common/collect/Iterators2Tests.java b/src/test/java/org/elasticsearch/common/collect/Iterators2Tests.java deleted file mode 100644 index 65aa51c8ec0..00000000000 --- a/src/test/java/org/elasticsearch/common/collect/Iterators2Tests.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.collect; - -import com.google.common.collect.Lists; -import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; -import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.test.ElasticsearchTestCase; - -import java.util.Iterator; -import java.util.List; - -public class Iterators2Tests extends ElasticsearchTestCase { - - public void testDeduplicateSorted() { - final List list = Lists.newArrayList(); - for (int i = randomInt(100); i >= 0; --i) { - final int frequency = randomIntBetween(1, 10); - final String s = randomAsciiOfLength(randomIntBetween(2, 20)); - for (int j = 0; j < frequency; ++j) { - list.add(s); - } - } - CollectionUtil.introSort(list); - final List deduplicated = Lists.newArrayList(); - for (Iterator it = Iterators2.deduplicateSorted(list.iterator(), Ordering.natural()); it.hasNext(); ) { - deduplicated.add(it.next()); - } - assertEquals(Lists.newArrayList(Sets.newTreeSet(list)), deduplicated); - } - -} From 95faa35853bff06d14c9071120988c0cba9aae84 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Wed, 27 May 2015 16:58:29 +0100 Subject: [PATCH 038/123] Aggregations: Sibling Pipeline Aggregations can now be nested in SingleBucketAggregations Closes #11379 --- .../InternalSingleBucketAggregation.java | 13 +++++ .../pipeline/SiblingPipelineAggregator.java | 41 +++++++++----- .../aggregations/pipeline/MaxBucketTests.java | 54 ++++++++++++++++++- 3 files changed, 94 insertions(+), 14 deletions(-) diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java index eacc7471743..303d6c784e3 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java +++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -63,6 +64,18 @@ public abstract class InternalSingleBucketAggregation extends InternalAggregatio return aggregations; } + /** + * Create a new copy of this {@link Aggregation} with the same settings as + * this {@link Aggregation} and contains the provided sub-aggregations. + * + * @param subAggregations + * the buckets to use in the new {@link Aggregation} + * @return the new {@link Aggregation} + */ + public InternalSingleBucketAggregation create(InternalAggregations subAggregations) { + return newAggregation(getName(), getDocCount(), subAggregations); + } + /** * Create a new empty sub aggregation. This must be a new instance on each call. */ diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java index d68660d4c7d..cb14b0df4c2 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import java.util.ArrayList; @@ -45,20 +46,34 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator { @SuppressWarnings("unchecked") @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { - @SuppressWarnings("rawtypes") - InternalMultiBucketAggregation multiBucketsAgg = (InternalMultiBucketAggregation) aggregation; - List buckets = multiBucketsAgg.getBuckets(); - List newBuckets = new ArrayList<>(); - for (int i = 0; i < buckets.size(); i++) { - InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i); - InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext); - List aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); - aggs.add(aggToAdd); - InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs), bucket); - newBuckets.add(newBucket); - } + if (aggregation instanceof InternalMultiBucketAggregation) { + @SuppressWarnings("rawtypes") + InternalMultiBucketAggregation multiBucketsAgg = (InternalMultiBucketAggregation) aggregation; + List buckets = multiBucketsAgg.getBuckets(); + List newBuckets = new ArrayList<>(); + for (int i = 0; i < buckets.size(); i++) { + InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i); + InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext); + List aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), + AGGREGATION_TRANFORM_FUNCTION)); + aggs.add(aggToAdd); + InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs), + bucket); + newBuckets.add(newBucket); + } - return multiBucketsAgg.create(newBuckets); + return multiBucketsAgg.create(newBuckets); + } else if (aggregation instanceof InternalSingleBucketAggregation) { + InternalSingleBucketAggregation singleBucketAgg = (InternalSingleBucketAggregation) aggregation; + InternalAggregation aggToAdd = doReduce(singleBucketAgg.getAggregations(), reduceContext); + List aggs = new ArrayList<>(Lists.transform(singleBucketAgg.getAggregations().asList(), + AGGREGATION_TRANFORM_FUNCTION)); + aggs.add(aggToAdd); + return singleBucketAgg.create(new InternalAggregations(aggs)); + } else { + throw new IllegalStateException("Aggregation [" + aggregation.getName() + "] must be a bucket aggregation [" + + aggregation.type().name() + "]"); + } } public abstract InternalAggregation doReduce(Aggregations aggregations, ReduceContext context); diff --git a/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java b/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java index 177f3bca51f..3f12b81325e 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -34,11 +35,13 @@ import org.junit.Test; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.maxBucket; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.maxBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; @@ -282,6 +285,55 @@ public class MaxBucketTests extends ElasticsearchIntegrationTest { } } + @Test + public void testMetric_asSubAggOfSingleBucketAgg() throws Exception { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + filter("filter") + .filter(termQuery("tag", "tag0")) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + + assertSearchResponse(response); + + Filter filter = response.getAggregations().get("filter"); + assertThat(filter, notNullValue()); + assertThat(filter.getName(), equalTo("filter")); + Histogram histo = filter.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + + List maxKeys = new ArrayList<>(); + double maxValue = Double.NEGATIVE_INFINITY; + for (int j = 0; j < numValueBuckets; ++j) { + Histogram.Bucket bucket = buckets.get(j); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); + if (bucket.getDocCount() != 0) { + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + if (sum.value() > maxValue) { + maxValue = sum.value(); + maxKeys = new ArrayList<>(); + maxKeys.add(bucket.getKeyAsString()); + } else if (sum.value() == maxValue) { + maxKeys.add(bucket.getKeyAsString()); + } + } + } + + InternalBucketMetricValue maxBucketValue = filter.getAggregations().get("max_bucket"); + assertThat(maxBucketValue, notNullValue()); + assertThat(maxBucketValue.getName(), equalTo("max_bucket")); + assertThat(maxBucketValue.value(), equalTo(maxValue)); + assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()]))); + } + @Test public void testMetric_asSubAggWithInsertZeros() throws Exception { SearchResponse response = client() From e54dd688a1c5c78a2637b7ab264f6eabc0352985 Mon Sep 17 00:00:00 2001 From: jaymode Date: Wed, 27 May 2015 09:14:01 -0400 Subject: [PATCH 039/123] make JNA optional for tests and move classes to bootstrap package Today, JNA is a optional dependency in the build but when running tests or running with mlockall set to true, JNA must be on the classpath for Windows systems since we always try to load JNA classes when using mlockall. The old Natives class was renamed to JNANatives, and a new Natives class is introduced without any direct imports on JNA classes. The Natives class checks to see if JNA classes are available at startup. If the classes are available the Natives class will delegate to the JNANatives class. If the classes are not available the Natives class will not use the JNANatives class, which results in no additional attempts to load JNA classes. Additionally, all of the JNA classes were moved to the bootstrap package and made package private as this is the only place they should be called from. Closes #11360 --- .../elasticsearch/bootstrap/Bootstrap.java | 9 +- .../ConsoleCtrlHandler.java} | 22 +++-- .../JNACLibrary.java} | 14 ++-- .../JNAKernel32Library.java} | 54 ++++++------ .../JNANatives.java} | 47 +++++------ .../org/elasticsearch/bootstrap/Natives.java | 84 +++++++++++++++++++ .../monitor/process/ProcessInfo.java | 4 +- .../mapping/ManyMappingsBenchmark.java | 5 +- .../recovery/ReplicaRecoveryBenchmark.java | 4 +- .../aggregations/GlobalOrdinalsBenchmark.java | 4 +- ...AggregationSearchCollectModeBenchmark.java | 4 +- ...AggregationSearchAndIndexingBenchmark.java | 4 +- .../TermsAggregationSearchBenchmark.java | 4 +- .../JNANativesTests.java} | 14 ++-- 14 files changed, 176 insertions(+), 97 deletions(-) rename src/main/java/org/elasticsearch/{common/jna/SizeT.java => bootstrap/ConsoleCtrlHandler.java} (68%) rename src/main/java/org/elasticsearch/{common/jna/CLibrary.java => bootstrap/JNACLibrary.java} (77%) rename src/main/java/org/elasticsearch/{common/jna/Kernel32Library.java => bootstrap/JNAKernel32Library.java} (84%) rename src/main/java/org/elasticsearch/{common/jna/Natives.java => bootstrap/JNANatives.java} (76%) create mode 100644 src/main/java/org/elasticsearch/bootstrap/Natives.java rename src/test/java/org/elasticsearch/{common/jna/NativesTests.java => bootstrap/JNANativesTests.java} (73%) diff --git a/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index cc1b7e059ad..15851b9dc18 100644 --- a/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -28,8 +28,6 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.inject.spi.Message; -import org.elasticsearch.common.jna.Kernel32Library; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -48,7 +46,6 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import static com.google.common.collect.Sets.newHashSet; -import static org.elasticsearch.common.jna.Kernel32Library.ConsoleCtrlHandler; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; /** @@ -122,7 +119,7 @@ public class Bootstrap { // force remainder of JNA to be loaded (if available). try { - Kernel32Library.getInstance(); + JNAKernel32Library.getInstance(); } catch (Throwable ignored) { // we've already logged this. } @@ -143,6 +140,10 @@ public class Bootstrap { StringHelper.randomId(); } + public static boolean isMemoryLocked() { + return Natives.isMemoryLocked(); + } + private void setup(boolean addShutdownHook, Settings settings, Environment environment) throws Exception { initializeNatives(settings.getAsBoolean("bootstrap.mlockall", false), settings.getAsBoolean("bootstrap.ctrlhandler", true), diff --git a/src/main/java/org/elasticsearch/common/jna/SizeT.java b/src/main/java/org/elasticsearch/bootstrap/ConsoleCtrlHandler.java similarity index 68% rename from src/main/java/org/elasticsearch/common/jna/SizeT.java rename to src/main/java/org/elasticsearch/bootstrap/ConsoleCtrlHandler.java index ab2fcd70552..6433b336d9d 100644 --- a/src/main/java/org/elasticsearch/common/jna/SizeT.java +++ b/src/main/java/org/elasticsearch/bootstrap/ConsoleCtrlHandler.java @@ -17,19 +17,17 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; -import com.sun.jna.IntegerType; -import com.sun.jna.Native; +public interface ConsoleCtrlHandler { -public class SizeT extends IntegerType { - - public SizeT() { - this(0); - } - - public SizeT(long value) { - super(Native.SIZE_T_SIZE, value); - } + int CTRL_CLOSE_EVENT = 2; + /** + * Handles the Ctrl event. + * + * @param code the code corresponding to the Ctrl sent. + * @return true if the handler processed the event, false otherwise. If false, the next handler will be called. + */ + boolean handle(int code); } diff --git a/src/main/java/org/elasticsearch/common/jna/CLibrary.java b/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java similarity index 77% rename from src/main/java/org/elasticsearch/common/jna/CLibrary.java rename to src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java index d3e2c19188d..97bf98e60f6 100644 --- a/src/main/java/org/elasticsearch/common/jna/CLibrary.java +++ b/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; import com.sun.jna.Native; import org.elasticsearch.common.logging.ESLogger; @@ -27,9 +27,9 @@ import org.elasticsearch.common.logging.Loggers; /** * */ -public class CLibrary { +class JNACLibrary { - private static ESLogger logger = Loggers.getLogger(CLibrary.class); + private static final ESLogger logger = Loggers.getLogger(JNACLibrary.class); public static final int MCL_CURRENT = 1; public static final int MCL_FUTURE = 2; @@ -39,17 +39,15 @@ public class CLibrary { static { try { Native.register("c"); - } catch (NoClassDefFoundError e) { - logger.warn("JNA not found. native methods (mlockall) will be disabled."); } catch (UnsatisfiedLinkError e) { logger.warn("unable to link C library. native methods (mlockall) will be disabled."); } } - public static native int mlockall(int flags); + static native int mlockall(int flags); - public static native int geteuid(); + static native int geteuid(); - private CLibrary() { + private JNACLibrary() { } } diff --git a/src/main/java/org/elasticsearch/common/jna/Kernel32Library.java b/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java similarity index 84% rename from src/main/java/org/elasticsearch/common/jna/Kernel32Library.java rename to src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java index d2b634ae768..57af6b145df 100644 --- a/src/main/java/org/elasticsearch/common/jna/Kernel32Library.java +++ b/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; import com.google.common.collect.ImmutableList; import com.sun.jna.*; @@ -35,9 +35,9 @@ import java.util.List; /** * Library for Windows/Kernel32 */ -public class Kernel32Library { +class JNAKernel32Library { - private static ESLogger logger = Loggers.getLogger(Kernel32Library.class); + private static final ESLogger logger = Loggers.getLogger(JNAKernel32Library.class); // Callbacks must be kept around in order to be able to be called later, // when the Windows ConsoleCtrlHandler sends an event. @@ -45,10 +45,10 @@ public class Kernel32Library { // Native library instance must be kept around for the same reason. private final static class Holder { - private final static Kernel32Library instance = new Kernel32Library(); + private final static JNAKernel32Library instance = new JNAKernel32Library(); } - private Kernel32Library() { + private JNAKernel32Library() { if (Constants.WINDOWS) { try { Native.register("kernel32"); @@ -61,7 +61,7 @@ public class Kernel32Library { } } - public static Kernel32Library getInstance() { + static JNAKernel32Library getInstance() { return Holder.instance; } @@ -73,7 +73,7 @@ public class Kernel32Library { * @throws java.lang.UnsatisfiedLinkError if the Kernel32 library is not loaded or if the native function is not found * @throws java.lang.NoClassDefFoundError if the library for native calls is missing */ - public boolean addConsoleCtrlHandler(ConsoleCtrlHandler handler) { + boolean addConsoleCtrlHandler(ConsoleCtrlHandler handler) { boolean result = false; if (handler != null) { NativeHandlerCallback callback = new NativeHandlerCallback(handler); @@ -85,7 +85,7 @@ public class Kernel32Library { return result; } - public ImmutableList getCallbacks() { + ImmutableList getCallbacks() { return ImmutableList.builder().addAll(callbacks).build(); } @@ -98,7 +98,7 @@ public class Kernel32Library { * @throws java.lang.UnsatisfiedLinkError if the Kernel32 library is not loaded or if the native function is not found * @throws java.lang.NoClassDefFoundError if the library for native calls is missing */ - public native boolean SetConsoleCtrlHandler(StdCallLibrary.StdCallCallback handler, boolean add); + native boolean SetConsoleCtrlHandler(StdCallLibrary.StdCallCallback handler, boolean add); /** * Handles consoles event with WIN API @@ -123,20 +123,6 @@ public class Kernel32Library { } } - public interface ConsoleCtrlHandler { - - public static final int CTRL_CLOSE_EVENT = 2; - - /** - * Handles the Ctrl event. - * - * @param code the code corresponding to the Ctrl sent. - * @return true if the handler processed the event, false otherwise. If false, the next handler will be called. - */ - boolean handle(int code); - } - - /** * Memory protection constraints * @@ -167,6 +153,18 @@ public class Kernel32Library { } } + public static class SizeT extends IntegerType { + + public SizeT() { + this(0); + } + + public SizeT(long value) { + super(Native.SIZE_T_SIZE, value); + } + + } + /** * Locks the specified region of the process's virtual address space into physical * memory, ensuring that subsequent access to the region will not incur a page fault. @@ -177,7 +175,7 @@ public class Kernel32Library { * @param size The size of the region to be locked, in bytes. * @return true if the function succeeds */ - public native boolean VirtualLock(Pointer address, SizeT size); + native boolean VirtualLock(Pointer address, SizeT size); /** * Retrieves information about a range of pages within the virtual address space of a specified process. @@ -190,7 +188,7 @@ public class Kernel32Library { * @param length The size of the buffer pointed to by the memoryInfo parameter, in bytes. * @return the actual number of bytes returned in the information buffer. */ - public native int VirtualQueryEx(Pointer handle, Pointer address, MemoryBasicInformation memoryInfo, int length); + native int VirtualQueryEx(Pointer handle, Pointer address, MemoryBasicInformation memoryInfo, int length); /** * Sets the minimum and maximum working set sizes for the specified process. @@ -202,7 +200,7 @@ public class Kernel32Library { * @param maxSize The maximum working set size for the process, in bytes. * @return true if the function succeeds. */ - public native boolean SetProcessWorkingSetSize(Pointer handle, SizeT minSize, SizeT maxSize); + native boolean SetProcessWorkingSetSize(Pointer handle, SizeT minSize, SizeT maxSize); /** * Retrieves a pseudo handle for the current process. @@ -211,7 +209,7 @@ public class Kernel32Library { * * @return a pseudo handle to the current process. */ - public native Pointer GetCurrentProcess(); + native Pointer GetCurrentProcess(); /** * Closes an open object handle. @@ -221,5 +219,5 @@ public class Kernel32Library { * @param handle A valid handle to an open object. * @return true if the function succeeds. */ - public native boolean CloseHandle(Pointer handle); + native boolean CloseHandle(Pointer handle); } diff --git a/src/main/java/org/elasticsearch/common/jna/Natives.java b/src/main/java/org/elasticsearch/bootstrap/JNANatives.java similarity index 76% rename from src/main/java/org/elasticsearch/common/jna/Natives.java rename to src/main/java/org/elasticsearch/bootstrap/JNANatives.java index fa8e074713a..eb29df85cdb 100644 --- a/src/main/java/org/elasticsearch/common/jna/Natives.java +++ b/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -17,32 +17,34 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; import com.sun.jna.Native; -import com.sun.jna.NativeLong; import com.sun.jna.Pointer; import org.apache.lucene.util.Constants; -import org.elasticsearch.common.jna.Kernel32Library.ConsoleCtrlHandler; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.monitor.jvm.JvmInfo; import java.util.Locale; -/** - * - */ -public class Natives { +import static org.elasticsearch.bootstrap.JNAKernel32Library.SizeT; + +/** + * This class performs the actual work with JNA and library bindings to call native methods. It should only be used after + * we are sure that the JNA classes are available to the JVM + */ +class JNANatives { + + private static final ESLogger logger = Loggers.getLogger(JNANatives.class); - private static ESLogger logger = Loggers.getLogger(Natives.class); // Set to true, in case native mlockall call was successful public static boolean LOCAL_MLOCKALL = false; - public static void tryMlockall() { + static void tryMlockall() { int errno = Integer.MIN_VALUE; try { - int result = CLibrary.mlockall(CLibrary.MCL_CURRENT); + int result = JNACLibrary.mlockall(JNACLibrary.MCL_CURRENT); if (result != 0) { errno = Native.getLastError(); } else { @@ -54,7 +56,7 @@ public class Natives { } if (errno != Integer.MIN_VALUE) { - if (errno == CLibrary.ENOMEM && System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("linux")) { + if (errno == JNACLibrary.ENOMEM && System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("linux")) { logger.warn("Unable to lock JVM memory (ENOMEM)." + " This can result in part of the JVM being swapped out." + " Increase RLIMIT_MEMLOCK (ulimit)."); @@ -66,21 +68,20 @@ public class Natives { } /** Returns true if user is root, false if not, or if we don't know */ - public static boolean definitelyRunningAsRoot() { + static boolean definitelyRunningAsRoot() { if (Constants.WINDOWS) { return false; // don't know } try { - return CLibrary.geteuid() == 0; + return JNACLibrary.geteuid() == 0; } catch (UnsatisfiedLinkError e) { // this will have already been logged by Kernel32Library, no need to repeat it return false; } } - public static void tryVirtualLock() - { - Kernel32Library kernel = Kernel32Library.getInstance(); + static void tryVirtualLock() { + JNAKernel32Library kernel = JNAKernel32Library.getInstance(); Pointer process = null; try { process = kernel.GetCurrentProcess(); @@ -91,12 +92,12 @@ public class Natives { if (!kernel.SetProcessWorkingSetSize(process, size, size)) { logger.warn("Unable to lock JVM memory. Failed to set working set size. Error code " + Native.getLastError()); } else { - Kernel32Library.MemoryBasicInformation memInfo = new Kernel32Library.MemoryBasicInformation(); + JNAKernel32Library.MemoryBasicInformation memInfo = new JNAKernel32Library.MemoryBasicInformation(); long address = 0; while (kernel.VirtualQueryEx(process, new Pointer(address), memInfo, memInfo.size()) != 0) { - boolean lockable = memInfo.State.longValue() == Kernel32Library.MEM_COMMIT - && (memInfo.Protect.longValue() & Kernel32Library.PAGE_NOACCESS) != Kernel32Library.PAGE_NOACCESS - && (memInfo.Protect.longValue() & Kernel32Library.PAGE_GUARD) != Kernel32Library.PAGE_GUARD; + boolean lockable = memInfo.State.longValue() == JNAKernel32Library.MEM_COMMIT + && (memInfo.Protect.longValue() & JNAKernel32Library.PAGE_NOACCESS) != JNAKernel32Library.PAGE_NOACCESS + && (memInfo.Protect.longValue() & JNAKernel32Library.PAGE_GUARD) != JNAKernel32Library.PAGE_GUARD; if (lockable) { kernel.VirtualLock(memInfo.BaseAddress, new SizeT(memInfo.RegionSize.longValue())); } @@ -114,18 +115,16 @@ public class Natives { } } - public static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { + static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { // The console Ctrl handler is necessary on Windows platforms only. if (Constants.WINDOWS) { try { - boolean result = Kernel32Library.getInstance().addConsoleCtrlHandler(handler); + boolean result = JNAKernel32Library.getInstance().addConsoleCtrlHandler(handler); if (result) { logger.debug("console ctrl handler correctly set"); } else { logger.warn("unknown error " + Native.getLastError() + " when adding console ctrl handler:"); } - } catch (NoClassDefFoundError e) { - logger.warn("JNA not found: native methods and handlers will be disabled."); } catch (UnsatisfiedLinkError e) { // this will have already been logged by Kernel32Library, no need to repeat it } diff --git a/src/main/java/org/elasticsearch/bootstrap/Natives.java b/src/main/java/org/elasticsearch/bootstrap/Natives.java new file mode 100644 index 00000000000..3342cdfd2c0 --- /dev/null +++ b/src/main/java/org/elasticsearch/bootstrap/Natives.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; + +/** + * The Natives class is a wrapper class that checks if the classes necessary for calling native methods are available on + * startup. If they are not available, this class will avoid calling code that loads these classes. + */ +class Natives { + private static final ESLogger logger = Loggers.getLogger(Natives.class); + + // marker to determine if the JNA class files are available to the JVM + private static boolean jnaAvailable = false; + + static { + try { + // load one of the main JNA classes to see if the classes are available. this does not ensure that native + // libraries are available + Class.forName("com.sun.jna.Native"); + jnaAvailable = true; + } catch(ClassNotFoundException e) { + logger.warn("JNA not found. native methods will be disabled."); + } + } + + static void tryMlockall() { + if (!jnaAvailable) { + logger.warn("cannot mlockall because JNA is not available"); + return; + } + JNANatives.tryMlockall(); + } + + static boolean definitelyRunningAsRoot() { + if (!jnaAvailable) { + logger.warn("cannot check if running as root because JNA is not available"); + return false; + } + return JNANatives.definitelyRunningAsRoot(); + } + + static void tryVirtualLock() { + if (!jnaAvailable) { + logger.warn("cannot mlockall because JNA is not available"); + return; + } + JNANatives.tryVirtualLock(); + } + + static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { + if (!jnaAvailable) { + logger.warn("cannot register console handler because JNA is not available"); + return; + } + JNANatives.addConsoleCtrlHandler(handler); + } + + static boolean isMemoryLocked() { + if (!jnaAvailable) { + return false; + } + return JNANatives.LOCAL_MLOCKALL; + } +} diff --git a/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java b/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java index c99c92faccb..49d140e203b 100644 --- a/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java +++ b/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java @@ -19,10 +19,10 @@ package org.elasticsearch.monitor.process; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; @@ -50,7 +50,7 @@ public class ProcessInfo implements Streamable, Serializable, ToXContent { public ProcessInfo(long id, long maxFileDescriptors) { this.id = id; this.maxFileDescriptors = maxFileDescriptors; - this.mlockall = Natives.LOCAL_MLOCKALL; + this.mlockall = Bootstrap.isMemoryLocked(); } public long refreshInterval() { diff --git a/src/test/java/org/elasticsearch/benchmark/mapping/ManyMappingsBenchmark.java b/src/test/java/org/elasticsearch/benchmark/mapping/ManyMappingsBenchmark.java index 236c8821b3c..b40d29a948c 100644 --- a/src/test/java/org/elasticsearch/benchmark/mapping/ManyMappingsBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/mapping/ManyMappingsBenchmark.java @@ -21,8 +21,8 @@ package org.elasticsearch.benchmark.mapping; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -85,8 +85,9 @@ public class ManyMappingsBenchmark { public static void main(String[] args) throws Exception { System.setProperty("es.logger.prefix", ""); - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Settings settings = settingsBuilder() + .put("") .put(SETTING_NUMBER_OF_SHARDS, 5) .put(SETTING_NUMBER_OF_REPLICAS, 0) .build(); diff --git a/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java b/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java index 66c4e62bdbc..1ac7c433038 100644 --- a/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java @@ -20,10 +20,10 @@ package org.elasticsearch.benchmark.recovery; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; @@ -57,7 +57,7 @@ public class ReplicaRecoveryBenchmark { public static void main(String[] args) throws Exception { System.setProperty("es.logger.prefix", ""); - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Settings settings = settingsBuilder() .put("gateway.type", "local") diff --git a/src/test/java/org/elasticsearch/benchmark/search/aggregations/GlobalOrdinalsBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/aggregations/GlobalOrdinalsBenchmark.java index 69eadffb52d..c415dbf9b2b 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/aggregations/GlobalOrdinalsBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/aggregations/GlobalOrdinalsBenchmark.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.benchmark.search.aggregations.TermsAggregationSearchBenchmark.StatsResult; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -66,7 +66,7 @@ public class GlobalOrdinalsBenchmark { public static void main(String[] args) throws Exception { System.setProperty("es.logger.prefix", ""); - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Random random = new Random(); Settings settings = settingsBuilder() diff --git a/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java index 96c80b5051c..bf13b774edc 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java @@ -27,10 +27,10 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -71,7 +71,7 @@ public class SubAggregationSearchCollectModeBenchmark { static Node[] nodes; public static void main(String[] args) throws Exception { - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Random random = new Random(); Settings settings = settingsBuilder() diff --git a/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchAndIndexingBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchAndIndexingBenchmark.java index f57c9848886..45f7dbf9562 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchAndIndexingBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchAndIndexingBenchmark.java @@ -26,9 +26,9 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -71,7 +71,7 @@ public class TermsAggregationSearchAndIndexingBenchmark { static Node[] nodes; public static void main(String[] args) throws Exception { - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Settings settings = settingsBuilder() .put("refresh_interval", "-1") .put(SETTING_NUMBER_OF_SHARDS, 1) diff --git a/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java index 55d3db53dd0..e92a24c1c99 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java @@ -28,10 +28,10 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -99,7 +99,7 @@ public class TermsAggregationSearchBenchmark { } public static void main(String[] args) throws Exception { - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Random random = new Random(); Settings settings = settingsBuilder() diff --git a/src/test/java/org/elasticsearch/common/jna/NativesTests.java b/src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java similarity index 73% rename from src/test/java/org/elasticsearch/common/jna/NativesTests.java rename to src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java index 014a2ce5b27..ead01b38cd8 100644 --- a/src/test/java/org/elasticsearch/common/jna/NativesTests.java +++ b/src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; import org.apache.lucene.util.Constants; import org.elasticsearch.test.ElasticsearchTestCase; @@ -25,23 +25,23 @@ import org.junit.Test; import static org.hamcrest.Matchers.equalTo; -public class NativesTests extends ElasticsearchTestCase { +public class JNANativesTests extends ElasticsearchTestCase { @Test public void testMlockall() { if (Constants.MAC_OS_X) { - assertFalse("Memory locking is not available on OS X platforms", Natives.LOCAL_MLOCKALL); + assertFalse("Memory locking is not available on OS X platforms", JNANatives.LOCAL_MLOCKALL); } } @Test public void testConsoleCtrlHandler() { if (Constants.WINDOWS) { - assertNotNull(Kernel32Library.getInstance()); - assertThat(Kernel32Library.getInstance().getCallbacks().size(), equalTo(1)); + assertNotNull(JNAKernel32Library.getInstance()); + assertThat(JNAKernel32Library.getInstance().getCallbacks().size(), equalTo(1)); } else { - assertNotNull(Kernel32Library.getInstance()); - assertThat(Kernel32Library.getInstance().getCallbacks().size(), equalTo(0)); + assertNotNull(JNAKernel32Library.getInstance()); + assertThat(JNAKernel32Library.getInstance().getCallbacks().size(), equalTo(0)); } } } From cbb7b633f6807abbff4f9acf20d126f250cc47be Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 27 May 2015 19:34:44 +0200 Subject: [PATCH 040/123] REST tests: Fixed bad YAML in search/10_source_filtering --- rest-api-spec/test/search/10_source_filtering.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rest-api-spec/test/search/10_source_filtering.yaml b/rest-api-spec/test/search/10_source_filtering.yaml index 1a2a79a80e4..a78a5a2a28f 100644 --- a/rest-api-spec/test/search/10_source_filtering.yaml +++ b/rest-api-spec/test/search/10_source_filtering.yaml @@ -89,10 +89,10 @@ query: { match_all: {} } - match: { hits.hits.0.fields: { include.field2 : [v2] }} - is_true: hits.hits.0._source - - + + - do: search: - fielddata_fields: ["count"] - - match: { hits.hits.0.fields.count: [1] } + fielddata_fields: [ "count" ] + - match: { hits.hits.0.fields.count: [1] } From 491afbe01ccc0d76099135a283f9c70ef62c901b Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Wed, 6 May 2015 16:13:11 -0400 Subject: [PATCH 041/123] Aggregations: Add Holt-Winters model to `moving_avg` pipeline aggregation Closes #11043 --- .../pipeline/movavg-aggregation.asciidoc | 115 ++++- .../images/reducers_movavg/triple.png | Bin 0 -> 98397 bytes .../reducers_movavg/triple_prediction.png | Bin 0 -> 93252 bytes .../reducers_movavg/triple_untruncated.png | Bin 0 -> 48762 bytes .../pipeline/movavg/MovAvgParser.java | 5 +- .../movavg/MovAvgPipelineAggregator.java | 23 +- .../pipeline/movavg/models/EwmaModel.java | 11 +- .../movavg/models/HoltLinearModel.java | 18 +- .../movavg/models/HoltWintersModel.java | 422 ++++++++++++++++++ .../pipeline/movavg/models/LinearModel.java | 5 +- .../pipeline/movavg/models/MovAvgModel.java | 133 ++++++ .../movavg/models/MovAvgModelModule.java | 9 +- .../movavg/models/MovAvgModelParser.java | 34 -- .../models/MovAvgModelParserMapper.java | 10 +- .../pipeline/movavg/models/SimpleModel.java | 5 +- .../models/TransportMovAvgModelModule.java | 1 + .../pipeline/moving/avg/MovAvgTests.java | 224 +++++++++- .../pipeline/moving/avg/MovAvgUnitTests.java | 288 +++++++++++- 18 files changed, 1204 insertions(+), 99 deletions(-) create mode 100644 docs/reference/images/reducers_movavg/triple.png create mode 100644 docs/reference/images/reducers_movavg/triple_prediction.png create mode 100644 docs/reference/images/reducers_movavg/triple_untruncated.png create mode 100644 src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java delete mode 100644 src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParser.java diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc index a4e20793849..6f998ffca98 100644 --- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc @@ -180,11 +180,11 @@ The default value of `alpha` is `0.5`, and the setting accepts any float from 0- [[single_0.2alpha]] -.Single Exponential moving average with window of size 10, alpha = 0.2 +.EWMA with window of size 10, alpha = 0.2 image::images/pipeline_movavg/single_0.2alpha.png[] [[single_0.7alpha]] -.Single Exponential moving average with window of size 10, alpha = 0.7 +.EWMA with window of size 10, alpha = 0.7 image::images/pipeline_movavg/single_0.7alpha.png[] ==== Holt-Linear @@ -223,13 +223,111 @@ to see. Small values emphasize long-term trends (such as a constant linear tren values emphasize short-term trends. This will become more apparently when you are predicting values. [[double_0.2beta]] -.Double Exponential moving average with window of size 100, alpha = 0.5, beta = 0.2 +.Holt-Linear moving average with window of size 100, alpha = 0.5, beta = 0.2 image::images/pipeline_movavg/double_0.2beta.png[] [[double_0.7beta]] -.Double Exponential moving average with window of size 100, alpha = 0.5, beta = 0.7 +.Holt-Linear moving average with window of size 100, alpha = 0.5, beta = 0.7 image::images/pipeline_movavg/double_0.7beta.png[] +==== Holt-Winters + +The `holt_winters` model (aka "triple exponential") incorporates a third exponential term which +tracks the seasonal aspect of your data. This aggregation therefore smooths based on three components: "level", "trend" +and "seasonality". + +The level and trend calculation is identical to `holt` The seasonal calculation looks at the difference between +the current point, and the point one period earlier. + +Holt-Winters requires a little more handholding than the other moving averages. You need to specify the "periodicity" +of your data: e.g. if your data has cyclic trends every 7 days, you would set `period: 7`. Similarly if there was +a monthly trend, you would set it to `30`. There is currently no periodicity detection, although that is planned +for future enhancements. + +There are two varieties of Holt-Winters: additive and multiplicative. + +===== "Cold Start" + +Unfortunately, due to the nature of Holt-Winters, it requires two periods of data to "bootstrap" the algorithm. This +means that your `window` must always be *at least* twice the size of your period. An exception will be thrown if it +isn't. It also means that Holt-Winters will not emit a value for the first `2 * period` buckets; the current algorithm +does not backcast. + +[[holt_winters_cold_start]] +.Holt-Winters showing a "cold" start where no values are emitted +image::images/reducers_movavg/triple_untruncated.png[] + +Because the "cold start" obscures what the moving average looks like, the rest of the Holt-Winters images are truncated +to not show the "cold start". Just be aware this will always be present at the beginning of your moving averages! + +===== Additive Holt-Winters + +Additive seasonality is the default; it can also be specified by setting `"type": "add"`. This variety is preferred +when the seasonal affect is additive to your data. E.g. you could simply subtract the seasonal effect to "de-seasonalize" +your data into a flat trend. + +The default value of `alpha`, `beta` and `gamma` is `0.5`, and the settings accept any float from 0-1 inclusive. +The default value of `period` is `1`. + +[source,js] +-------------------------------------------------- +{ + "the_movavg":{ + "moving_avg":{ + "buckets_path": "the_sum", + "model" : "holt_winters", + "settings" : { + "type" : "add", + "alpha" : 0.5, + "beta" : 0.5, + "gamma" : 0.5, + "period" : 7 + } + } +} +-------------------------------------------------- + + +[[holt_winters_add]] +.Holt-Winters moving average with window of size 120, alpha = 0.5, beta = 0.7, gamma = 0.3, period = 30 +image::images/reducers_movavg/triple.png[] + +===== Multiplicative Holt-Winters + +Multiplicative is specified by setting `"type": "mult"`. This variety is preferred when the seasonal affect is +multiplied against your data. E.g. if the seasonal affect is x5 the data, rather than simply adding to it. + +The default value of `alpha`, `beta` and `gamma` is `0.5`, and the settings accept any float from 0-1 inclusive. +The default value of `period` is `1`. + +[WARNING] +====== +Multiplicative Holt-Winters works by dividing each data point by the seasonal value. This is problematic if any of +your data is zero, or if there are gaps in the data (since this results in a divid-by-zero). To combat this, the +`mult` Holt-Winters pads all values by a very small amount (1*10^-10^) so that all values are non-zero. This affects +the result, but only minimally. If your data is non-zero, or you prefer to see `NaN` when zero's are encountered, +you can disable this behavior with `pad: false` +====== + +[source,js] +-------------------------------------------------- +{ + "the_movavg":{ + "moving_avg":{ + "buckets_path": "the_sum", + "model" : "holt_winters", + "settings" : { + "type" : "mult", + "alpha" : 0.5, + "beta" : 0.5, + "gamma" : 0.5, + "period" : 7, + "pad" : true + } + } +} +-------------------------------------------------- + ==== Prediction All the moving average model support a "prediction" mode, which will attempt to extrapolate into the future given the @@ -263,7 +361,7 @@ value, we can extrapolate based on local constant trends (in this case the predi of the series was heading in a downward direction): [[double_prediction_local]] -.Double Exponential moving average with window of size 100, predict = 20, alpha = 0.5, beta = 0.8 +.Holt-Linear moving average with window of size 100, predict = 20, alpha = 0.5, beta = 0.8 image::images/pipeline_movavg/double_prediction_local.png[] In contrast, if we choose a small `beta`, the predictions are based on the global constant trend. In this series, the @@ -272,3 +370,10 @@ global trend is slightly positive, so the prediction makes a sharp u-turn and be [[double_prediction_global]] .Double Exponential moving average with window of size 100, predict = 20, alpha = 0.5, beta = 0.1 image::images/pipeline_movavg/double_prediction_global.png[] + +The `holt_winters` model has the potential to deliver the best predictions, since it also incorporates seasonal +fluctuations into the model: + +[[holt_winters_prediction_global]] +.Holt-Winters moving average with window of size 120, predict = 25, alpha = 0.8, beta = 0.2, gamma = 0.7, period = 30 +image::images/pipeline_movavg/triple_prediction.png[] diff --git a/docs/reference/images/reducers_movavg/triple.png b/docs/reference/images/reducers_movavg/triple.png new file mode 100644 index 0000000000000000000000000000000000000000..8aaf281c1bfef6a17d9b4be7d9e8135f0cfee0ce GIT binary patch literal 98397 zcmZs?WmsLyvNen>+$Fe6Ah^3b1b26Lch}(V?iSn~LU8xs?(PmBx$nJuC+B=T^T%9s zuIlcpt{ye2Y7r_cEeZ#X1q}oQ1Sc*gBo71x__znrhXnt4B0WPu2n2*dW+o^oD=sKV zAZu@9Y-VW$1f&|2=my26w7&4pbH6gQo3^iJ%rHT6%{3uNvA{LMEF2pHn`kIT7$g)X zq972QU^pxrhAL317%b_AP1CI$$V z8pw}d&(H$YN?Vl7M0-U82pG#3OrI}y2M;UH$jl7-ZJ?GZ(3E&L=x0*ZyW07?#sL?X z;3}{$71GHb(h=Ub4G*B4#Z|#UJs@3oBz7M|)j>3EK00DdLP#LAuAmMVx_va~<(}zf zj>dTo$aydz8DXN>4&a=%s|oDQv&e^Bc>bk>04~n}#5p0rxzjzN8&LOJ@WBZ@EbQ8k zYe+P^X9V80_h^}>E*^1`^Db8ts%5aMV{9yBzOW}=>4A4`5YQj*H#IP&iadsb#})L% z*UEeNmbF2ph%1OY#@2qi?f~A)+h6nRKZtoYgPpr>_g4g+V%R}PfpUBy#1{ZPgPM6M zy|1VB-Ue#OZp&y>qN&fpn2T@y);qe#>u+*MS-I{kKGo61v4=LRyyNv`q_}C|z^FeelzA%F8@{_}vu!d%hri)7lo5?*03EgskWc?+O=})2%aGH(nOMa~FHBn4_7-d!rOIRP{>* zg3c8zUG-Y3$%C?K;28e$8y!iv^B%DGxhO;9$|eGBP@vkH-92998s1KY_j&gQK_8hOn?7g2t_N{$h+Zf(16KmK+oEW~xbz@2 zU95|xtV9L!cBt7nT#LRuyiLVc#$+XGzuR=gdovQ)RIx%{bKx{L5IXt`<_&hP@#wXy z4~Cu$S)lIFw4|#jtBU~pR$P1SKRoDlcv%aMa=#CX4L(*-!2t0;v?VB zry|y}E-C^w8=T@vNBxS;ZNkIg~5G=0)W1CxJor!0)C3F^*3 zL6GKSH^|xGze6c2h@cafgFb}{2HkgsI6*f>(AinCt`4iv> z1!54ee1kv^{7I-LhPecD$X^u0yaQ+vSd&FWg~ts-&S5&haYSkbNfd0#Q_9E7&B_Ir z+b)qQ5yt3Fpko|`_}V>O4Q~m|*{e;585N|kiPH>O8F(Dq;0+wz>u>>$6>iLToktq; zz$CPsyE+vzS#zLp5Prbm$lMC|3i1li9sL$WsLw{fg`pII)Q_|s8Tyk%&z`O@#8ORxU zCDcN6`j6M23R`X$wXR<6j69I|(Gxv71FT!mmopE{(CX0q&`QuS&0EsRStX3z$b2BZdvTlyoeBl9Eg zBU&a@CVwV%CJ56!Gkeps>6^*XDfvn0siVm_Qvh)pOO6wfT#EzbbwaFVfaD)lJyZ*_YTgI~3TjY+`H**@fAuJK$|n z?VW6C?riNbY(Q_ecFzqn4~&gg44n>S_N@+bkMwreB@ZN%g%^kJ6Vswpq1gc0(AZ#~ zbK#R^6Y8+7f9kaEY|#4AcH-Q3Ube(MEVH4qcfU2UWwV)$zJWxC!bTxWNJk__SVDC` z?5bTYXsafsI>>)eAR9RuL04WKYg=y{bb*bO_bqskFqSA5SnhjCZAq7@pJ^3_ge3o{ zD3#22Q5@1r@-s?pg0&{>It?S2$+NuGUSvgNbz~&uPsryasiab*+oTpGU1CsT>|c$< zoWx$^lw;syXkxvo1!)baP-qosueFpV=e1a5ocW^2i)mdXE0xdE+nd{qUq~O5p<$to zp~*g}qq3kTq7H>6hSf=cNK{HZO7sp#3?~g2CuEoCD%LASl`@rlFQF(4E?tv*E&H6i zSLLi_WgLBhH^EKn@p;yzG=8?KB>H@J^|RU1wNsm2UmKB>j_SrLr%pf32@`#e0!| zVKbw*IX}T5Vw;i7nM|ix-6W%~!=dzgi>JeD@wrG7RWnXA_zvcrxBdMP_zn~p0uQf% zU5vwZ;nVifhWn~hrXfEC5p^qO%Wm6P+i`QY%ll65VeBsbap{@cxWm%d)#HqXh^AJT zqWkaB!7@=38C6l|!s|BP!QMgMBX4ML3oopoI3Q&pmwuIgPW(6gZ~Rt0D?K3r&jLRM z;{^H$!kgy zN)An556VR+MOUKhadtV~Uk=(ZDpDwGWWQ@zj?ljugboyK!EOylp+r67`{2>y`p|e2 z_7LXs#8T4J*lh*&gocqEk=RdPh&RLs4ILP+@0_8wB*1*>lIND^oGF;GpPrX#$v)zu zcHp?b*~6@2Ts;gsOdi*t0MEo7o*X3S6m_9_S`jKzGj>9=Q8u3!xa&D7X1`(&+K1Zj zorvB~+nd)$Y_`4o@g$rb9^kg*ru3Nc_~mf~3=ixznTb{j&Hhu-r%LU+j+d*jp2Q=f z+`J5ZOFN;<5aj2$sPbWzk*|wiXCzoF!L+bx>u4+}RVZk?#@)C(K$MWQ={;q9CLSb zmOqCcy{H?~_lS>l@3nIBuT zn)zI*zS~)ZD2vXZ+>*h`C}qz(QMk-=DR?Tc7EU_7xBR$%UJYBF6UU5{Cv9MMbOyiCTUHPHrZe_}zVuf5|qgWHojB0n`9$Jke$ z(wJ&e+78=BSU>DMcDK2HyA6E)Sv)j3bX)op;U@z74A;yqV=W_G-BVo${=HYm6Y7i!EYk6V(YRS9N)$y3|#-1@`sN|COC*Rnxt1FO=XJ(*p(m-?sZa9AKWbaQ} zx~7z;K;3w8fjqfO8-RK^5fsV@gMm8v3&hzkW2G;lCZ)%wdj}*3+?ZjQWGQ{Bd8+BZ z%-C?)zBYSboNP+%c#O-C=~BC@AEZJeCZb#;^LDi<=&MNxElGTqZWOSV1ui7duh(H$ zDpbE+#mj5G(pHH?9a~nnalB{^b3c1Bfis62h#*7(Rsdz>y><-uNTRLqUZPtbUR3)9 z!I9zo-O+vT#d^JctOd#>Z;~)b$j3#;V~M52}~Pa0`d;3 zhO0nw53Vo78x9-VCW1VClnV#@6XL1k&w~>82>I_@^oeX0+n+Dtp|V2t$fQY(GO5Hn z)2B0*5?LINP7eh+=!3&GKV|MY?=EF7K{^+JhSCPH&g5enm>Zzj$V8V%`nan+ths|^m88|DYo-oARX+>P z)ZgChjE1U}E)GQ-W&7{#?4j|hc-g&6+~2;n@4kfCh<4R^`n(W(5t_!51nQZlE+a#>Ps6?hT}R{N}OY=nkv z!LZDIM=vcw!+z$)W!J9qkmlU6r-NF6x`m{SNRRCFDI9rX#Y-datGx&obuU?)=(rk> zB%xG|P^t7|%dp2>5VxvRR0ji=aT>7t;Zl>MyA$v6He55TQRGd;spN`Oz$kqBfI5k4 zR<%T}bfb6!w^6cr_5tGV!M5S?_cobcVV|yRzrldT$|f;t>&&xLynF8Jjwn~o2eD`Q z$MMJ8rv-=vh&`}Eh=Gt8gq9d)K{tVgAK<+FzGCy&C(iI<+>+!;5l}<(;QE}=B1s~ zRvdyErlmgUWRhd^a61}a&Qt00B9*CRNn+7Frh4|wtkQpvd`Yh=!>~zRmllz(N_>u+ z!((=9c)d-5xPpVg&oZU5GB>sMbnMu*w2;&=^jO)xiSj4wlc9CWazk4)y4K9YUge&4 zC}ypgs9HY?TJv4YnTuJreHi(vLME!O?a?uXX@sANUGCvwzjB%R)8pkC0i_}yOd*8F zR9Zm_VH9-&@A=?)$Y6ZmVIw)Fitw6neQ=%FTZmeS=7|QAYKCh4weZR7s_DMv7W6J0wpr>$M{y|mKS*exiVyCLXcvtkcGJ#mU$ZKl8WQl z%`R+e1ij?i=seZY_dH8U%i|M9oY#y<&BT>HF1L1ePqnt`$9Mn{KwQB6NBa+6;Z`A- zK7l?smuvAH5@XVk!Yr|c5zXYW#PE0*C0V(j6nErCl$>SNrS0<6ve&93g3nDAJKm>cMNQybFAR58?Q)xGP@>&NRaYn1AU zmm(L}9odddPk!#)%(qzT(AmFNo%pyCoaIDOt~Ep_obR1?4g?Gv{Jc(EAha<$cgp^} z&c-X{v$2k2%Oy(&i93tuNhCzMv!4rHi(+q*9HJIT9k?%Y5{eNX!8z*mpqF65 z8Jd?ALjFGVM`Qa&^Y)@bxdS;ti-_tnD++m|cN@!vEJ|a?2l|_leN~B@(VY3)9 z`L0d(Ys1~KGW4eT^KUCooi1nII&cOR3R>Q;wxD~truqEw8rLy1`Cd&iS* zTdy~Y`==&?TG*?3+|TY4_mLejxNO`HM!)uqRw37qPvlCQX3vSpW{mTA(tUK)YnIgC zGuJm8p_P$*B}pdgc_U}lfEdhUxUPYLr~p7zp+LXNfM$mzd>KjrG4G*W_Yi?3S*|4e zhTvR$sDs3*MPF=181{P!z=p_ed_^V^czm6{;UNbx#lFy?azyR~oK6Cu1uk>zCB0z#%->nS)8wIaNOatBffFx4p0ewgcTS5vdWj$^o>-6KzgAapOt zZII)T50fiH83$4Z8c8?|Gf*%kVk=R~#iR(l2v~hx^)=+ctoOY`JSE9Q(L(6pc@+dV z3N{x}>R0}3Q3SPTi$?Voz8uklI-PwP)`KAgb^u^Pn2g=Tb}y4P(7x!2*S7Om^HK$N z23-UL2ATftTR5`tejj)r{z)eZ66GOLA<-B)H|@3NU8`nTd$fN|UIB9abkUVbBK@3q z!#MZpk(CE0Q!BG*y>WfAqaPj``~F3+D>3v(oDfmqVRUnF2qze!DhEIQqLG)`-VrYt zv29e$vTuey{5lPBf`I2lP3YA2dV_dTf61}Ax>0vjyzN7%OFGEi{2s&8LMX}8P9eW_ zaeBZl>Tcj4ZPyRiXF{5mrOJ_-Sj*Y)ny`;qbkl@6!6;hC>flCOwv(>S;mTfiU_Ug- z9C|gvT0l8qqtoJ&6a0~B>G{OF!pO9}#pltK;l*|q`bFr)s(9JZba20Sz6Tti44XVv zv33T49g-&QjwQ~{Ej9SvLSXlG39uwKP9T^*vqHFc0|)w2O#&Hrg8}?iV}K?2a1VeL zbV2ol3kVDbVI+LRazrORP_alO7=&Zx(B&#hs_4ipLGXjhH^iAZm#{K}tq1rpoJNsG zK}R{1LDPM`!t(prH!HU~w;L`}9}rN@QBzSJ0A7Y;3tw=*oD-(frJDPe2((K|iF>8E zr}c*xr7LGQrdrc#p>)7|kp;l@lEbJd z*kd&+IL0{ak$gT%qvLG_tn4S_j}SBt?Gb7k4jncXzU^W_auGKxe>3gK;F|hr5wk__ zQ94L8EINTBQK#HfC4Y^$!n$EWg;+BF`)>Yei9t?z9$8hDrLL~)B>DV9n0UxxJ9|h_ zRw1u_vC(2i(^%t3ZP&@<&cm3Ro=(QdHRuXNucpV!!)gatE%&*HlZVXH_ec8Y5wKt2 zA^;%dH-%}f@@P1qJR z8UHllFh0&uY0F6}R?d+pcL&qDX(q`_*k0 zBEQ74ocnqsoh<3%eQx^xqq`~hFd2oI(FScz;2aVqLIGqZxQ^XHLPxtNPN^8-^yZg<@LAn`s}1L2y1n*8RDW+E;;{0RIU*<5mh97yj#ehIB%2t^9T z=x<3s5oRx$JekJoY3jc^njDfIWHSQjkugV7>QmR~L~Cs89P6_591Irr5Vs9?G`F#K zD|gukum??Azoum?0Z~>HJxCptUm(Q7(VN@(t)?MY#@We(O*b&m8u|>@QDS`Qr)(&b zEp07Xl^-BHFN;)ft7mjdy`_N5h&+gRmZVLC&{$F%S&I0@zU;z|#nJI=fA{K|&!18V z+7A0h_G|%cKf;^g7NB`?qUKB!K6GQ+l#v3;ZZc+<*NIK&kc*aCDS4KLL+xpimR$8^SOJhFNo`Aza&WoM0p znb-Fh_XcGP779l}csD!|Hu9EPRzo&*t`TRh7fd&)t_z0uh1vt&{pha&hrRu@tfJQK zH5i$0*gTz=Jl@DzVc8;MB%VS(94z;{;cp8ikN!5rT_Nw2uQlzqUQ?czw{a8G)Am2F zPGpj^xhXj)Kb8NSiFL7pS^X8ra>`O=Q{hvRoN;n@ z3oK^_>WjI&@Q0N#!OGQ5Kd3W?h~uM1oz$Z&3t!mJjYT8x>~o+m9FV&_IV$; z(s>pfbyeph4g>-M&N5R}bySs-FgbI1#c z{IB@OCvGBBM@L%@T3Qzu7aA8v8XJ2PT6%VNc3L_HS_TH{k8e;rxLG^uxl&s@5dXhH z{%;&1BL@R}Gh0V98*75!z<<5^cgjCR)&3P_p{M&(@{c$F zE6GLsTZ2C|`hVv7pQj)8;(_L({m1q^&;fbukw8FvK;lCDimt#XnNXgIbE)t6gWf1{ zd}1IZe8`041mRcaoR*nb+H9a{2*@(9}wU_FM=c})e2?z?MPPByOTvlZEZX`h1ZAe=)-P5(0cPZ z3JEDGZZk`t_viJ)85!Qf;9&4XR)c<6^;X9()EbQ^1-0Y;Kn}qBT-Oa5*YuuPVB8*u ztkLaa&rhzuSlka?d~#S3ZqoOC9sYlo3keB~hI+En5;zAKGZM zF74d*TJU+@Z6EjPSm#~h_V$*^VNa5Rf*@87MwQcM z&6X^!+-l3Humb*H+YSWG3keGgI|+)w#fOWF8?{teSm=0M(Il^+fWrN-5fJrkZ$Bf& zI@RuWy)y6d_P9Sc-j%Sw{e0dfbHD0>Z}qF%Z_WGJ${|URT(7gmWCG9Wi+|*PmPeFz z*Q;~u<6gSi?a`csYPSG15^BnE8v5IwgNxA)PcmFfj_$<;_|x?np!KbMf6_vxBEF`? zfT&^mx%@q((P^w8{X8&67Zpd-7YOn{En4IvPHQWNgrsCa%~4^2snz2MO~N{tYR1y0niq*0Yn(+ zw1Y04HK{_C6_H}Yxyc;*06@hne<*lw+5LM+*_Tuvmx28ff}h0K0F=l7K9@E8d}u!z z(uap7oQB_k9%B3Fa=$7W&Qm6R z!k%-VcMnn$T|ZC@s+4%DhsUhP6(vlUN5W62vgV&ZN#bk2AfU-|oEDV^d0HtThj)5o z%gwhqj=g~1)anJ>+OEMlN|=h88i)Kh9Qt99bWcH&NM*3eJ>ox{nDBn}d0t+}L4vMq zzmoVQ=bH~*&F9AqV8a4QhyS=kf3sxRD8myK+W2Yp6`3Chzw0v6X0jK7SMvQ~fH%Qc z_iAT|Lcl*Ek@$WL{!_N+l{hJtE;UHygdVDzSa&B3o8gvgJS6N6TMrvM{hCOxS~fGZ zt~=s5nQy0{_{L$hv%Hol`l{m{2*vo+kzDQxzL(#hXPA!)WdqFB$ER!Q(!?YvL)Eo= z3c9jo536wR{Ra#!`8wdcRZMnv)o^oJF$33&t^@cyO-yHVb2%wTh^3(qI}0z@!Ctw2;Sy9B5G=@?Sjs{ouR;x}86 zDka4)gun=OjU9|XM8h}#*2xl4x6T*;!+yg}z#(*_!^1>(l!#J8nYMg;8yN`vela~r z0HTcUW6pcg2S|FZzarWa*c!SLKVMPYJ-lh>+NRa1ihz z6*i8re=VN?5gU*Z1Zr=^BOr^2J%LsC%?@PwC>@q8ctGDXDFyGZllzkvSgoXuc;m~u z=7gs5_%8WwM%6A4J|Q{XOgB5E5V_wR{<%DHdLp+t?q)uL06uCk*k6-*BVt%goryYZ zSAy{OG$ZAGEQ`mFHTV@pv%$3T=zF6T$(hB4hbY2-fL#yKMr2HPI>R<*c0H{qBy_(Z ziW5Hp0ARlKefWfcku7DbVEsk}^1HF+$w5K%NG?Xn63bjnQ35Xsb1eSYN55~R-2(oi_nl%E*i9UP0Ipvs z{XR-sAqU0F*#&pNyizXl-z*L9+3x!zCuYL2kGYdzgn?}WL9&MV@(EC7puK#3vjM+O za<;_6%d~i0&_Ypq$2+}wHH4V|m;@N{8#Tx=9B_{)_0nTgc`Ea5!;Hf7dciq$^OREK z3^%rw>MLYNb7?U6mskn((E@D=hPUlmF(0=JBz29sqRMILiTv|57hJE0I-kM@36Tp7 zuWJ*^1Hb1^zC<6l80D%8h9v?CNy?(*&S$Mw(;{nOrKk)1N%C(O5P_S0tbVu82$Vo7 zG+aTSHRR3A^g3J7Hu*hgdQSNNB!|8wdE36CuqREv%m`eCM=JOQ`*4Ik?cp|s=PNZ~ zSdv^0iNpj-iUp|d9p*|w0C=`dR5r4rh zf{49gqC-QpC93YwhJj8(HI)J>w$$|h3;R}32Bp#^2cq1fZ0VSpmx(FcHluWhYDtNA z_o#E2mI^BCij3uViFtF;@n^=7AC32_nfYnk=&KRdv(4F^+s$a_xWN( zf>nM;KWzkt9uZ5ahNdo`B;JvZm#^Z|fEE7STmO`c9w|8wKDh^x%Fg&@KtJEhLU~3c zi6DgpEI@0{v3X7;z>}6Sq-a;Q#AzMN-8de}j&KN1sTKqhddCIi1ElFdj?3Unk*F!Z z;4m}L3nJD9%B42{4J~ZMblg@Ht4$9U)Vd*{=&TO{u1Q}|P- z_b(YNPGJq+Xw(`FzO5|W;x4P+AMxKDE)ngsyiK1&{o*9rAiXD&?3l;Rb#|8+BdwaS znO6D%naip$uls@0FcmWw?wxfmI;&`OGDzwzMoG*Epd!M9AVbqH$00|ZO_BYD&q8p1 zi-$7@$tG*U8q0Un@Wjvp_qw#1Szgf8neq~(@z93BM3rr;RjK6)@o+`o=WJ8?{WobWr>HqPxt8M~yvU6uPGpu=F5pj&!;2bif&8`5kDRPu9LV3_ z4n)Wa5rfvx+Jkluhm)Z!)#_I2$X)r4#Qe+(KQRR6uM_a$0+649A))=n(X9e}AwWJN zORLii?ZG^~wU z0ixuG3?QCn2^<(7@}zN|O=6+Zsq`A9RM}}|x^*B{g~ud`Bl#jU3O3_E(ji|yAjppu z0(~MCW?JVRm$mrr3w6=WQ$~yFtdL2E= zxm12GS1VI#qjf+#qv~7uJ z9W|1PRx2;)?uNJO=8S=V+W7$)<2rHsrHNRh% z>v+ZBl~NQzoM{P*&q|cP-|o*gKft%%!(4x}Y&+ofzMx8cz3|R^!ZJUtpNSh+ITW2} zoZ9kdd3ufl=ddG6O!7Jvz3DUKJb>tOmAkzkuvryWa(RvGZ0;7J<+-7&sL&TZFsnaE ztD0t2rXui|`+8$TyLhm*Y^})?=5(I--LZiDm#_>F3_~9DVOLS9 z43x8`8uK1FngnbXv_BQr$1YMARIfirs(2tE*6p=7Dw)5QCROkgGgGO)r6Sq9(Nb;? zx^XyCk&`V*!US3Ayt1%Sn{yG4Y+uWY^WeOJ{idPqEbmicFx__QLMdZ~FO6EwFH`zh zv?^6$lRXX7Q^9*oZ%p~aC7TstR!z2e=k^y3_r^Sh2D|T@2JVmeA!c%qU6Y|Icr1Z{ zj+;l$&aXP}FZyMxPtX-_Vj)KR^l2ynFk>t>2tPVx#9Y1rC2TRNaUSXGIuQUE`mV)w zK*gy?FPnrT++WE-tgvpcUoRfS>>TfgE!LT43iHE6>-M^-(xL#zI}AED=SfF>M_Igc zh1j@aR(44J0ab$t1n*!!h6;w2b`y~HVWgV? z+pPu#ukj&gx~xK{B`fjUoU)ebJPWtldedjRoF*R%2ny?teBR!(eLA_vD_E}LPNs6s zD$QJiRWU85en(#Q*r!&^qAe7`7lku$M63J&9DnhgmMTH!?5vccC}GT*!`)2IIXC6K zoGTewF0)_*$nQ?wOAsDyM>zMkAb(lrQaXoifNts*aZ}eH*z68fMdGgAQ98eNT2Be6 zcL12SScw5`J~TWTlL$zhaYz<62cbgNhL%{~#laQYA=kWfpEYZ&QLWqN9S}bWylGAl5 zE%d8z(#5+}19Y`$o{#B{(#tkxuQLeKt;N!^`r|~cr=0GL@?}Fv{vd>Y#E^k0d?VF4 zy3WCGGrdcoEa}Qu;xfF}4`k%*WI~ek8nW<|FEZBl7Lb@I zVi>^CfI)UtAT1~oldz+YdAc=U(jO7tHzIM^x3HGS8u`iluw#Fwy>Y?x)`B+9i!P+I z#`hJ^bWC5AyU=-~nd6Wov=BUanW6G_V%;aEl0uqkPtTR_*Dc{3$ylM}Gq#9yoS^H} zqG9pG$`z0o`+!IYC2yWemUj@I#|51GLGe3-dTdV#*bFEr2r}eHg?t{bgtzSd2dXrF zL0vF0VakB9xTI2Y=@8VBX#GOB_#!ZTRw5=@y)4zK;GP{Zbo6 zb?EX@!aihe<`E50XKLO_<=^Sg6O%9rME=*+1!S|*!q%F0Nf ztrVS_=L);|C8Vh}M9eyv6?s#HjTqxg>c7ukXvhi%fKi&}YA69ptvD!9{I$!|`@Ed`%#3I>tTQT5FI`_%9rk3uq){xxMngNG%k`iVvr>-` ziqRzp<#bDe?h_yifZ%NUqF6wQao97dW$x`^A|Xh=L6P8#{t@^6CJ_&+F@smc3Pt)Fy57OKb%Y&fy1XO9EHw^!&tF-*o;%kS6na`5C*TTORTQxIy&uui z(i^k!o`(<)Dm}afj?|zt-njx$=NNe zlcos4{*%US6L)K0sl43LmLYjML`EBxm@jBVPwBYe<#e5v7di`0*3~+)Xt8t!j;Aqi z8FhEn{DMQ#6D(cVw`+N}wsu$uW}Y>iXW5W6DCYH9WKd^wO2Fur8%rl`X=B)X2Ib}s z76}NNo)$@Zh~qsnP2su85F-iKYoj~rq$ld!^ZESfZ;-+EyI=$iyu4xh<@Tf2RIdGQ zG(^=}bTEq;0i^XOZ)_&kJ;i-g*6`UtWHDLs3(I*9T8uv!hev!e;j`$ z!Ibw?7Npaq8A}3TAO8Wm9|qiv#vd%I(ev4t$`_;uHW%T2XmM^Sm8 z^*0Fhp{Hbgm6ku4i)j)e*lmBlwRO>*k?v^_WAA~_Qws3kNz-rkN_6a|%VZ~z)1u-* zD3w`){-W<}JD1mDCjc}9_$S-&8~j}D{jN6vRKvc_=NHdL2*l5y!DTARSPNNyqcP`8 zvekp#`vzsrGXUAZAcD*oG|!{u6OlLgXE_NH(eLz@P7o4Nf(eBb@KJte1-cP<`WnT>?=Uz@oe$I%P_)hzLpbu~?81lnOsz6@YnLbea8kxr--w z+|wM-N~D;%k%YwD_k{{8R?RY^zbcC#Su`Ke?+k=5#S|LQPCCi>t2|5{SwTDxSJSEW z{tc>DSax!L*D9GD;!hYR1$Jf{3&oXkC_C=nivaP%w-fwGnws5aQ%p&as^N~B+s87! z6zvLv7!=|Tj`|8?nS>(1{@tOiEWW7(sy^Q_p;$hGtl#N{j10CzBnF7SdkhbgQlyzU zuqTSpXm1bSKbyLh)qNNIxldPIPkW^f$oGNd!^WOGE~6X*kd~EJa>PAf$p!ErgZ~H? z;J{EJDVa$icm#;azYRJv5G5Xx?i(rm?Zrq!ir=PC(#(&Fdlt7z(G;PT2CPkUriitI7#M8NG4{olE<)B4NWP$_4>nUHO|awd-_whIFwur`@Ptj9lQ5+ZB2l31?sLmnU%{Pi-F&Ax zJtxeLZcbJys$#; zsIVwzBE*2elU#U$hq74j?UDiWcPj8sRf=tXWt3}N)M^sK3H--b;Npu z+S(m5$f|^7qaM)u1hvxfQFuRqWWC=*xe4DiUU#=-o?56PPQofcMU(UXO5Jh0R9ycM z%ozOjKu}zKsKlbh7l>bM1H%fS-dcZ(T`)Iww?n7>e-7rSN~nvCGc{_-@`6f5_jwNy z71jb(48y|W{M$VL_fh<7AYd1OfVq2&-LQS@aA~xG`4T?E*Cp5G?+2?ZiyY27s0e}7oV1w3A zu<^5~lAY-R6}c|8wDkABPyooK06~F2h{RZd|KVeQSE-_`D&Ju+V{d5?2Ng~^3+&zU zx7zVSi#?e<^Iq6Zf_eVSD-R5nYgBwNX6jY*wXe|Eo@qXE{fp9hqg3fu|{Rehn$HB?UAH>R$}x`vy5^EUguCCl_Ni^;RG->ow|F_Y|+C zm=G`g6{g{{$fzXM_-ADGNPo6~@I!xJN{|mGUXm~9ajB^+e^B;K<$gGFG8qV-(tbXoZ|lg%>!3y45J97 z>cyLP_nOb8NzJ|Vcg=LT|7hp%rEy|>hVT~Xk*!HcE16!&V;&os#wrT1e`_~F z6Ch;n^%LetEw0bt2Z2ElP{`_Lf@yX^)K_TFYndDh;RA$5QH|HjN=RJX zKl?BFP``sz%vWa=#YqvLC^{#i@6KgW*ACw(OcoL#;-ZM~JF@ z#2>ylCFa`z&30-v8wH!m>VSqRx6F3A2N~x9V!W>QNmL!uxb#d-*c~1K7W)=R*O@{@ zpyzU=?lIS%lstar=1p{&m0E>M6zqZio~5#Y+y%*|gyUx*oQ+RT@ehFs%=Ascn{_e6 z2O9^C8^bNHSZ9Y>_zdz1?kU|?xsj#L%aywxTGBP|2-V-e#9b4^vL40s@Hpoz3(6eJ zn2f!1^ef4lm*A$4-0MQ?*v;skL|hABd3Q2dXzf42dy|A9EsKg|P-?0O0IEks^ypaICY#ME0Zc)_t!7Zf7`XjD@2f z7u_Cub0f;>WdqCnL;AxDt9e0S3A7<+aX9me%+JMD21tXZa4$E?cstAXE>De2Cg;Mx zmYPZ=wN_dk%ZtAJTlDrbCF<5gwqmhb8iLUhU%k_&$mUegs^qm5v)(=s;={olW<6Wo zH7P}|Iv2VDB~z5MgW~5`QBGiDZEVOQjaGuC@qCea@j#(Zv4?uYb*}E{6`}?zgM^=PX^}v zG!fu|kvsI7Qr^~Q3BxASi@KsL)N#bF?3BH197)HuT~QAI zA4M|&X|3)~_e0zgVv`(`nB$v5O6v`$X^CtMObFVZpQ;~w_8M1jhI5_jb1xMfkTOIG zesk`NTwSpmQ9kEYvO9}>)=RaL z`I;fW<>+ok;Iz`&Kxt{a`e8{}mnqXD7SC>C>5$x0rd51r?j}UiB1J-F!kJ6M)GR8* z--D=B6&~jPP<~fj7l&SoPn(AG=oVkdWqMv$;MYo{$;!9^3-%HK7gs&qHwfgqE_^PA zvSO|WhvTD}@lTk{`QR+iFyUc`MG7Np4=-MrnqqI z8RlJo4tsv&1BqmzZbD4J%2wDI^Yz1$;`WqQw@%1g$GWe6f_UY3u;$mkmu%5`b)XLR zQQZWz-%jCjuTypL5XS#W7WlgHL7Yyt%9d)?3KL>?mh9?#{lq$pV_u+h=QrgBjTO6=)Y}LU1P9t9&oS8 zfM%*4mDa@Qdn<=>BsYGf4YLgmO}y|~R7L;%#AE}`Z^7lHIQ{*G#pwNFVGR{Q9J3~R zLVSLm`6C7@cAdCRXd%zYmBX#G8;j)0Z4AfewrH*~o%WX9$%ZZQS@7?GGG=|_T-eI^ zhK&Xe0I3n|W+`|lzjLm0Y>g&~$mwD<9N@^fbDo>x;ut$N7Rv8FhL#3fJn@}5i8#C= zoGW)oYQBbP9Q(#N3WDV37nYe#5s3c}Qrn68=0n=DwJ*4wsjQuW@kym8lE}xczzoQp zY%fCinL>&&mD)y-r9I#549f1!Gv@}?GN6>!QO06n#g!P9MfnmuvWcJpo1zNMUw518 z$N2s_C;LVA@t<@09Eb{^KGDiOVwFoR-tIFH3FDWyJ7lHc)@$5m!yRh8pq`w4$3#M{ zH@oHDXR-aKkc@~A_+Thj?#2#<$jBEUnzMHn#247PjZ&QEG{aL%V0&aF*i3B|%zMjh zF8tk@#DX-#NVTer4fO}jWok}zLH!2s$9Wzr&~(4EY*^GWKVL>*xpH&UhH@$ybG7l? zfz~@xE~0n4BocYYmOIA;-ZRb0CY^V`I&%gI9ciU@2?9M{6~o(qtC5d7%hO3%30Km^$b1IJfWZHxrwU)3C8^vuV_rjh)8k z#CBubwrv}YZ5w^3=X*|n?{&?Uf3u&N{j9y#y*~HaO+)0tt?&Ipsnf9bBb+a+xi($MI%` z8M7K9bVdqNv&~aC8s^gIFAvI6R%*!ZZBTA7@rnm@!L;VL`!P@>XCM| zkVC_Sxd4T@x;6*Q@zmrZ;m_+Y0|Fiw7;M7Apu8qpSDXSQlNVD75ntvd?9osxcUPWp zvKuR#bo-ccg{u$8Q+fX?lufEQ4^7X;(31g|ZRIH?C^5+iB{#D9tMCMS0!jszi^*M` zpVV`Egl@K@<2XAMvGiVvQ;9%HmWbvz3EpTXH(bmdo(L4Xe&719>T0^O*Jo7Zr5y1G z^53>?HCity+f@{d=tFN{{>VfAyf6r)?6ASd%GCL#uq}46=%LA@geKlEHHgPUh=VP6 znFuB!t(>feKwtH{v5j|mp9#l>3b=_7&^|T-Duj76jWF^g#n_61!NC5L z$d4}#E&a0D{gd(j;12Y7QHwpoOP7rd7Rm4Jt}m43fa$9A^a_r2*;H;n%(Q||h6`6K zI>)73S6=4uIW7@-lwvukolsan&h>^nt@60U>CFI7^IXVq39Pm9TYvU8ZCE5C?c#c* z2rFFR^Br&BuF!FqLlPe#?7~;FPAL9r`)`m!aRGoTof6!)FUzkAD-bhrmaLNTV9kUR z2GR#r$i-9?61}SYT0#b#R6|;UebtrdF~?A!e+V zbF%gY`P74nr2-2nl+@?~B)*`;81)L9F<6aaYKcFufRbV8YC;ur|J2Y%T`JrB2d%RA zG$p6Su~|u(mOBs9G^Od$41=Nx5c+p@BALB^mSFJae=Dt)^KtecmqpMXS36zmcg-qg z&TgUDV6UfJ>5L7hQ+gZxA~0c$x6c@xqfCTn(p;>;<99&6Q3Lrzff2gm|$zOWKA&A9oe3dsm|03b0DZ(*7lIJVCG!>iQQYKLjDP;yN!1rck@##2jS&bEX_BM#Ex)1`b3IOm8wu@EzC$=qyzpK%?qpPGtSmLj z5N`)V$C8<=6t+wJ*uvKPqrp`{`)>7Ng55(-t+5EnjHJhlt%!JE^30Iieu9mR(Hwai}0SKf7WAz)H`@N)Ml0>ScTjW2vWqu^3-afueZ zYV!b7&7Ux-J_(*1A_Sl-4>4B($sRq-JR#_`>9vFEvTTEqL*-0S+mBPHKE`Em>Sh&h z*x}01u7A(gX=$Hc61y+b3mWkT}S>d2C-{fO%vLqk!uG|?Sjs#^h*z71nY}leV@MXpIW@BQ8OU;OKCq; zVOq2-P;NGdssL%ufXg_u7)UQy-D*8(EcU=GBI8e=$O7uD^VJ-tGM_Ke$mdA zJNyt*L)9D9Lg2{*q0>)-8TW08FLe;YE3OC*DvQ|xe5VXynOyMSmo9ubsduSrRJ!ZM zXrwyO$7d5{3SRNs$+W#Uqq*=7^!^k@NrWF-q7VTUbCZwA`&}Otq*K}XPzwEkN!6y1 zLnK~h>|(isr#+%WL=S@{qpoPg*&WIZ&+SavC_DldmChRQ83R85mkA7gCenyFNd|EHnO|a~ zn|-2|wCNBOfFGcp1%?bJ5=;-E{h+q-SS+G3!DN^y@N5#RR>O=Ji7BsGx-+8Fe2LM> z6p-&x6iH%ebFaNw;qA3$kBdK-f~tSKsigyfVp&3D9uR{9`eoDpzh9OX$foRA(tTP+ ztgq>SH2T`w&d_@5kM%RnCFbb$Y_~=j;#WK*opp^ai>Lq!6T?S)MU&qU6H()3MntX& z{z{JK$$1X#q%n;( zY(<3XczxW2ftQLwp2INcI|&+RBh8hiSZ)`WcbSE)%aGoWKD~&fkDQBf(HzEf(93!@ z^kILYS>O`!l=+E59M@-_sE)=>@qDk2YFD0e5UOMs`=@yEbT*Eor-8gI@Ywi@EvZ@_ zn&8(;PaEPUG@44ggfJan*1Mm7txcIy(&1qpdYP4Ydl6N4bk4QuW%@@2(^x;GgMvxI z>S@(XF_SSWO_}#Q5y1J3<6UZwrOtyM54~B6l?Y|EA4m7Rw`T=ibHhLmX)uo-6!hwg z5yW;aj9r?sMEHXf#p|WFdLZO4u?@UJj~)UuN!+D+xlP^Is=)QU?^;Dd{A$lo!-%y3 zwcjS6Hu^Ar<@JWRH{K{iIq*@&bEr1XgbGiwRwfdmJsJ6hHHjBrqjg)dmP1*dToe@X zU{FmR(0L#Xfp2@U(nozUiUjB(==;)7Jp%|*l! zP%=n;Pu3DvY)^J2F1HOkDXLF?KWU`ml|zyM|8M=1lj_rU5Qi2OS9Hjb=|rBM!ln5{ zqbu|EfDHa-#fgBfn~QKUMozF!{7F5flVjO6n<2#8BsPfD!ucL&qCJ`<@XrZ;L&+Fz zN=yWQD*8g^=01qZc3L}GRlHNlVX3SEX=&g1lodJol7Ax$`#jgj|5ayUf*^Dx?Z#7X zky*8Z0mHfY0R~nwmht$f-vI} z<8+ZV$w&N&c&xD2ciaDyQfgp>;kbju8pMjU2R(yc?H{UT!Snc4fWJ|g>rA~)l1Jaw>E_2D@Svz|d%0U7J2s#*Nyw$g zYJt*b{pmp|@#aLMZb0%f^ z$}>bP=n1~+tYCZu1F|84k}ZVTH=1g8GV3aT188;jZ?qxn#lxe{SnQm;uxKIZR+!Io zIMYO&AD<30eNKWeXr-TtzH_IUl)}Hl(EO>TLkZI*@PI8_*wrsiM^pR)9gn;p@1wsH z;T%yMj`X_`kDEVRma59@QhaagbQ={RTh+%U@&Yew=WYwHBZaqh?H=~Dj>_SyZWMxt z>zl3QxLXz_X8A0YOq(P_B6NXSt_}siiT%ziXgvs8ba?Da+(oKFvUlKm~#JefL(V zib1@f{deeUb|LNAXbp%BZLfrsV%k0d_X4EaUJiKUC4vcc-v|-ROMM3Nw_#x|2za~i zX?WbykDoW5)TXltSVBv(>^pAiZ)r3jjRhdx%SdG-L{YZ12p9h^SrE!H;$+oVSQy?9 zK@tKUtufTbT4R}@iwz_9PqW)-Iqq0MP*uz=FA|0liyIRD!JBQXrAkaqh8puL!4nYc zyYmc6s39Ob&=7Q7K}hBxCfy(r7*d3qV(HnPHUEJ_ELK>NjEv+_Ze{$qDhoq8PnObo zPlymJoSmY$^iw~EP!D)BZtm+=W;*k1E~!W{10h6C3WXP(&PMb1k9yq@(&_fGwMA~7 zDh~#kkdptmZmFyaj&A!1C83Q&X|Xna(m#(){&cN>zGRrPTW+M}t{Z7wE+B zwDPUc$ioTA1Ss&O8Doe8p7&M41`AH@P)6kw-Ox|^5 zWv3%Qf5vYVILngN{d44~Wth?pz}DU0M%OELs)^=|DPl5ozHU=b3>R#8?VU!)qTID3 zBNODPB9+^}?g&-IFK~tEio7@R9Ng6J-l?%DHEu*oZ6k`1z>p3k8hQt96^d8)3 zTh3cKJ<2#dlvwLR{;N(D5fim`;^QhP4W&aftOQQ@0_Z7HQ92%}PzGnTzO%DE{z@d< z;M59o?&te_ayubYFT1IhiQ__EdR)(gdm>gi|V2n$UglN&}gb@!0_2{ zB1^imDGl==;#|MVm7)D|AKIbFBDkhtJh!y6Js1wn>UQShU9|YO2f8*BMZZ{U6C_}Z zqKn!-GI8W`dy$tqA?Zds8fytC-^<*1=0WcrzL&2VA6FcDdn-#cjcHS1(2*c8bgolX zwlHW_iwrb)^P}zoPepb4vdvlzk{5hNN8w!*WpcBnvVQ;GuZsQszD{obL&~A*dXD}7 zJEp^YM8&u*_8l6*ch+3i`!*pzdrJ>O+qK7l4s~`wehC5i% z6KT}HBS~}+Mp5A27sm@uY<0X@VciW#5M|*)W_H}7Sy?21Rn@ltoX*!AvhH?nJ;!J6 zfmrw4>^R5j3a0IS2N}QKl&aEMC-M`URVk*t;SrIA84jJEB)024L1izKXnqNWHd>T>1CdSug|G4d2ZZsF88Z-0q#iO;0t%VbaQnRTzRM zmk1x431H%Avfra3(|}52K*D;$hyAPMxWUxjnX>f9o>q6F_kpa4rSBclICi&`wG5=q z0t*W422J6v2Kr^(6Ruj;T@~}c(r;|ejw6%T{+k7mENDW=a2P_{3z7*+?zCndVV}Il z#_CD|pzlC?v1d&e9VRQQOB_)=zIG;zAsdnf-gFu8?#CqPSAc`?iC<8LHKanvkv{!f zolnKv0dLMg`y7!Gn%Ya3xb^R&sM}I3ev*yX%rB&zzJfL|ZH<-UY;86L>+R0xV*UWy zBg_{sX5p|Mgo z5JLkk;pN~*{Ws7PlgZEKVTC7VaGQ+j?5+0wB+vCQsA3?sj!85tl{)u*HC=ZO@(q`85AcC5Y@` zgL}DFbQYWDwZ?4ejT0mBh`D35?U7DSQGPsb`ZJ04 zBDis!l++0Zy|E4rB$>`f7TW(HbuM`i42bf*ibl{{n?`~FOjlfa6k<-~&Ab~ZDR=rI zA<<_k<|UT7I_R83*BwnpO(RJI?v|ErTx*WmrWv&>+^cp>gl;lkhB4T%$co4DWvu+8vC#Y9YOl`ozE0sIwZ)q+|B1q%fQ*wJS zEgeC_6Sn7aKG-JhW@pVoXEpnR?78oOmmcHYl8f7^3!>E&B_w=%4Jt^{DgJH&RGH1Y z_SN~vYYKOh`yrlzwjt+(!avA&?*5DHTTtIhN)2jnG8!(EqkheZK{vSdR~Iy;aC4W( z9{wU368B5b}qYgStyOl}d)*vie!6x)6zr+?%wjEGpB zD=+H?=;N_A5E$b1T;+*)22qi|L4+I=H-2U?PPHPz$Bl%hKmPZy62yi%wK7v^-#2k@ zoJ#1=D=Hn0PQ)~q=>zp+$u!`!0s}tOp!}>s!;M+Bt7jtKYva=3srM0ieked1!EV`X zcOwn%-0A!7&q<1pOz2hc^u+DKZcRoUkSVwc0a?5}x2PsvSw8LbE8=Hr$j2!70<|LB zjl#NynWbcW-mjgmr#R1K*+~-uxk`I|bdO0oMXd1H zJ;-pR!PWq@1_}=6OR{-amSm7*gP(W}u>TcY6*rcx<>oB|)!t?zS|4?G4zlQ!#30?7 zen*h@KnSDk`EV2DQv~}ZnCx6o3DF%v2#gn-X7};GuC0Q|rfw>dq8jVx(Nd;Kc1+(i zLtC>7vMs27x$f0Kb}$N)Oep;l)Ab~#7g|QpG{;|-xJMNT=dBb(RAnrKG*5Hk+7XTCb6F}>RXay z&VH1>DLNiE31^m@Y8k+98MqYd3ZX-Uda)8!KbT>=p-`v?8RVyGi|b3m2#HWE0hJdE zI&N5)El!4%Z(((5m3(5D(cJH!)XyU0inff5PJZhhYqr~exEY>6|I>J^J)^{K|zW(l%ms>x`xlc&`q$(sKt zMI9N5BHIwJiP@DvSNNW9znPTcw+y@GLjHN5w3+MKUB!bqyijJ2fDnvA?!%4+t^0)7 zZM!>tPDh8=m#;DkVctJ%uj-wrwzmonetKtk-6ne-v-c1e6leyHsvWMlpW8U_Uu-i> zN6|ZJZ9_ox$m*WyEJP7==aLzWRVU?V8=v_y(L5y&3&XA#o3QaVY~ZB~0eqEk@oz0@ zh6*q~LOa5wfKr6I*J**-7He%eJly)_$@>veH@jT^hXuT<8`i$=h_>Cy6t|Tk5)w-6 zs_L%XNo%}bg^2%i>=BVcXmBrYU4j0VPnEtK5)Yw5yTtXjxe~!&a5#O+YKs+@aK{&3 zJ_R9ds34=9KU8E7f7-3XN&7eJUIF`A+9VW0_-3OJKE+L_!TzGW^q3me6J6r}*uIb1zmakaZL zT%69*8vp8)*}v!$uU9zS*7_ZU+AhqSpNUhowkoEw*{d_L=u!mL2wEM@;zrM4Ms|ZR z#)9!)#o1e5{%Bd4=JHd^vqD`@eDQdg^SGP#yDS(u4?-w&AHiKT6!&J{UIAcR`~Yrw z+tQdZjKNQ2(NEH=ak01Xnq=+gD@GDRo$Fa|Xc*%)v$s$6eE`1-ejd*nwzXh!<`_(w z%r6$5Pms@d2da^yF~r`Il9Xg7LPEBYNEs`YAMxf$me-PYw%#x9+l2*IuQ1B3XHxKA z(;l7W@Ok>d!N>l0JQ8ukTncXxn)v9`>)spmu^1N)tJ0;El6zOZb6z#hiOo*PFV<{z zVQt?8_wv?O`NRpNK%Pn_$60C=!M|RH_*lX~Ub`oH3&h6BKC&WVAbkP}1^?8pFu=Rs zEw}3eBrcsV1~O+LR-o_&)AQ$88MdPM6i6YjbZn?*`ihl}>UmOfun1|ye7QzPgL8f_CK*%#@P zJgF?#CO+zYgSCAS@L#eYBPxg)(h(_z`E$R&v(9$@NJC!Uoid<;PD2X5CRABPyYb$P zh(GzUVA)dW*xlWb7}7e_28~71`s54ObVInr#p_3jl#hXBWqJ|5AGnEk5t~nG%N2-Z z(F3?5vnf#I2_dry`Q0FZo9NL^9nG$OeQ1_)R5RnqUeD*y=5QOGhb#`G&4p24O>j_O z=%dr$V-Pkb4BsvNCt zhd7c=aU^*%BCj*0ZNs>%ge*ATJry=&yHf>vdIZlh7e~^aE z=GFG+-{B=tB#nT{6t zzvdH)0mME2nLG=1DqHVCa>$cV`PP9Tnzc)nT&|+Y(bS~%>7221PRx+ThaA@-g?u&e z*f{$lQ?tpnm;iLzJZ#)4)Eo~`H60}f;k*;l@N(^0fn<^dGj~!3=al^pE zP_Nz8@Ax%%n7W~%l4{;e6Us+@A`@Z$Ii>uyXQ9<&sda z;UGhm%_BDj`nTGx|H2ZGzM>qvz?<2JzNT&8+OxuWUpJP~$-4x7`$VskA4r3B0X00C zCXWOJ)LPO~htbJ&^hrUz06o-S{gj!^*+*>&Z3C!b@jq!bK}YkPak zpMX}@rr#1RNqVI4%}e0_gB?MpMcwaUeCz}(ONj)oI5n0;Lye;pm>pgte| zG_o3Y7jDRQprPBH$<>TVk~Gum;|nC)FNeh$r5;zc7;+7_3+wy= zBuwfIyGFAk9|^rLs?DTYAU6Wuui*_rL=->@-Krl0OrdgQ1V2wq(F9M{u`WfKOzwJO zZw1RI342IQUF>3cymj8FY8A0r2w-KwoIYN!oc>!*DBRicp z1R2`42nXK2AOC+PNKWbpi|)JI$BGt)S|=yr$5UXV@&cO}P5klG_yMibqP`cAFXVL7 z&U3xEKZS~n_k+mUtRTdPnV{BO7z$BRl6X9d^8pN*htFu;c~Tm=f@4eoE@*Z-sV|Wi z7d?X4_;Q*k7~w&mjt5x5?%5})7@UVtM>)PxSmMp0t>cZ#Di=V4P$lu@=31qUS=EvK zD4r4^2Lf&HL`2(_98v538lRhUPwKrvdH9m{NE>b$yW>a!&KR^8oIgV3|F+W>dDq12 zso!%tT?()xJmhSuKXLjP+`703%A?&Q~cNC1i(YOI#Pm->%labuBk_k3x*@&g8 z;0p47U*EBKvS$!Bdu%iU|8+e^N9CL3sI*CQ3EHK58;#BH*ww?99PjI;hBnjk-t16b zmEdkb4jff=c9fI36yL>M{A6;mF>NH9fz5N$|6;2k25NM`wM(Go0ve+Qx@0QoR)v6F zkbhqS7H-K)+yCfQTQMb3id_@H_oDYx$ zYETyx1Zoir{kkPBC!#GlQ6=C6{wV&x5g2`8$cS|I?d?e>L$X=uLk?)nJuIy$Vjo{N zp1N%AZ-6Cea2pU41~>Z}I(1Gw@A$Bl2?i_svCifr2j*LmMe1L0{_A29%|W=Rb`(ol zpkE5FZIHFzXlb%&CUdmO}umXGAEBz*s*K=PBL4#xWdJ5KCdm#RJ` zAr=I?vPI=h=JzH_{R5hN&+Ba#nU0IsS(7hyS_T(?DQqP!iloMW-TaC4;)8&1~A}TEh2lSyr3N zb3UYU)i%H=eYY-O!2NigjThnP&z}krp-%?qs8QlV^3XBoO`KAYrca$dd}0fH?vD{5~s6p=**f-4X z%sFdW3-R!HH@}da%|(B{)(_n*D6U#hK#oXg5WQZouvLS`-<)VYBh5%r4?U+2$@B#k z5H>#}-2as+P|yHnU=#(uJKCn-%{h57#{sH-i3Hvz#LcJXqYvN=8us>g&Ui1Nv2%Uu z%;V>jxKb=8Y?PE1;APijJj}7f~wdTq9R?cd9aSv)p`ycoj_*89w8gLBWx1WFz z@0Y`a=1R3UXgMPx*~_;+6kU<<>PG45c;a?xx;IO1FFOoBerQiB+`SAiXeZ>feha~f z03f+jF_I+>_NL1=JRlHA?uLegk={pK@G5tLZNO<{wVf$DfZ9AEmRJ7|s52g!T^I-4~c;Iftl zA-jY?T`}J1=_)~TK7tMZ9#SpPfUh30PjBM1z)%qtwg&GMRs3oL2R264nKjJ`K0az4 ze<}2OgX{_54^_pmBlqhok^<7Tvm^5k#TvQrl!sur{VjzD+yC^2Y@)8?duoD3&361A zj-(LM-!}S%W3hb*LjEvzndv9CXtfZ{3=$v|{8w23{x+G~^&3~HxUjZFp}>4@g{)O; z1_i?=mLcSCJgzn7XCBI9pZ+c%5J&?I3Z{aH{7_owc%LSJuc|t!iQ5z?G8Ows3nQco zcClUCE1V220qWoQIHPnUhJJ@KnT;8no91@jvv)}9Lxct~Qk#Pz*?(wA!7(%x$Cm`8 zcAvNa4@L@Fc(;&e(y;j)s^rKYTwe)asFKp-O3tlHt?hP)VJ(DhMr4z6BE`7Wk2y04 zb7FKc4Z{oDB`Yh zUttwMYPT_=B>vYicFvWF>gAH};R#Z?7qyeHvX_b|(481bB>z;RL}1fC@Ox9D_PXwV zI)|;G{`u9|gtTJe1i9PxP_!pl2r@YAIIUwl-$+3?%u{{(^JXeYa4_>o=poE-)>GRdAUV-FU2vYgGy z$dwBiLvH18v5to_E zpTlLoU3u|~F!ci?&A-|WBPvQ^z-iu;avwN7HGc{-xfw;kM?|^;%wW!M;1ujv=n?*c zT>86*@M(sY$NKmJYA-^pL<7@fO{Le3NjDE_(#}!ljbuEJ6CF=@&UWlykq6$02A4rE z;toXcakYpKmRVcsXjmQ17rCgCkbGIz@S7aY5D_AaMN#vk#R;-p7jAyUbc1j9pa@SM z(m#gep>?JrnI!s1bqUJ^_dn(rBG|`K?PjtqmoTe9o1OHniX2FLE#|VRmF*I!Q&$_3 z!F2|*sYqi|HqC(C{C9U+10)7F^&QQ;3}fC6J5+a*t}#F8u|StpOyE9mEo{t4*IVGP z-65I@jn&N^&bByy1yJyVx_@01T9o*s5bLY%@-m>o&(Wb28@=MLsiAE!ShD6G&X0L-NSN&7B7vF>U@gVDlH6=}~QLTVd_2FW!|P zRQDFp#KFr>7IzC94?xN{bDmJsuKhT9cHn=aV21fqN)` zwa~kK7^VqYm_}~k{Q~m$d{azi=TUcb+YMTC^649Hg6Z2(eQL7FwxdONG09Xp&|yWl zb?m)LTMnZye2#pCjajm~APCT~q@XY%pqOUGR_5`?EtdbpkYt^u&bBj3ThD>Zfxar@-#(0vN$UWiCtI`^P5h@3HD}@~_m7 z-vrc*xb*vWlq(@iT@%j+VqMEeVa11KVBm6gh4hlTDXQ`tlpU{^UB$XDXuJ5T{4z}C z6q)NrHoF%dnfQwE*7h@wLf!vIcXks^&E{gQ*0>;x z6HZtqOwN1dk|AUly`+wYz#2>aXlZHp;*7T$uO>ZqOZdEzT~Dzex&OSL5e8KVb5l#F zQ7a7bw%)K)l9nWClkuo*qf8(FK%Z<&f7JJ$cs^c*9gYjI|lL_-P=kGanNd9|Er5XKAN6T z59!EKo)u26fCMVPjrL8mfE8p3l6|Y|#KB=oaj8-qe8C<>iCNTH@a3R^5C#nHAAKkg z6Hx*kETnn#9TqhGNOX3}z__f|zW!L(YSh&6H7PkOEF?H3+hfz=-Ymjb{b(h($`n7T zv?deKu(+H4F(g)ZV(`DA9T0vWR-ZM4<{zH4F-nx;S^Y(KJ(!l``aO#VE@TACo{0Fvn%ZDU+KTbQnRC{N$Un@|DJpYN!DicM&~1o! zClRjPM*ca9ZH0@mDJ~DJTIUhu(tn;-IfeyNh1IcPens|gEZ?Xn6&Q}n&-H+*>dT08 zF~^)7Ki{V|Pa@g0Q$>3C7ljZ~-H8aee@jS>6IV3YV0qohDxI*NeN zZu^1^bwFCu_$Ml{KwnID5(!9F25zE8geVq>cS4np!AeYmvX{#-n9urYU9*H|d2+{f zBM+g?D=NGi1+)wUZP=FzRax@xY!z0Kgb{#M-OOiYbY=0UcJ%HjQ~V)ME|7mdUv^_v zm9FtW&>onDW@-+X$s(x!+RDtcDTS3xLMVxIzen2e{+dqK^SGD)On?ichV~}pf$+3y ztlb#tLX?7Pd|MO#^u~MAAlw_2BeH#!$$~@)<_B25jGM2yM>584Ta#*P2QeaCCD@Yf5{WdHU;sslY>4s0Vs{0D^m7#Rme^&+F`gd+?N^!+S zj0+7SwQ>}yu82qoIcvNB2W$(TqDDVt@YI4KF^y>?v~_iuCOBQTwPz>KFdt(ej80)V z#>n>`g zua(>#$3?)bI9703N<~8lfH(M|_I0{N&YS-BnE%EjL848F$JODL;*T*#gZ!K6;nSOP z3@>BY&D>#F^O~y<-<gMfV=`^b#TLeT_J2iaE_O#kU|{-GZ%^j6N?mWht7XDilmu0O|CI2~Y4X%iFqeee?v3w(YLZi0JOSpoF3|=G!e#_ zTMDCp7CVD&_osILv&JP3w;Ql~r(S64cnX0t7rl>boy*vK|D-+wK#)?$25Y^pxRq3{ zKJmCZ?7UE9UW>>gDyyH$5Q#lM z&iRT8|K6DMRxKotawdne^AP&KA3LfKZALx8!<;YYZ1Iy$eXMB2YH6T6r75U9<>Qj# zu&zI11PYzd0&p?QKRj)^?8j-FE0tkVl?3J3>VBeMbT9Nm&)2M57UHd9o+Lc@S0EwQ ztv5Fb)K4VLP-wbPUQaI@n2bTIpdD2R_VYN_~6ompUx%qzh zv#7n`%1=@r{kGrf$ieA9f_SXTDpW;a@<@klCSl*)LHsk&M4S>89D<$IvjqPaX#Sk0 zeWxf-R6~jSn%(&k6JzUA65yhwu+ZW+L=Fiy9*aF6{Cn4CA0fmmY8MkI5k&hf%M34@ zW1B@E=v~DQEPdH$=KP^l5pTiM*0NeuFjwmj)Su zx~@engvc&IzSeE;ce0p;b=Ctr?kR__Y{c14uV%=Oz9L=K((8CHxRA!a0-g`xj;?=v zP(Q*7fTo{p0!`5`UCrTsldVjb5D*>Fy#DJ90(PNqUQk*j>ab(%8A5gVeW<4lJJS%s z-mtVTsGY<=o0VYwFff~*1OqX`>La7iRedBHR37zyvASydCV*hF+5l?~4!jYjqD29N zc{ztOvAd5#nW*lw*Z?UuCvj0duAO$K1Xlu2;QON(eeS$L^?D3VX+2x@i-}Bw2FoDR zEt^%yBHk#1s7t+-s_zW>W>v5;BG%KAYx9qC@ZM2Ji+Fah&NgYg+4(om2o(wbzj}fi zL|?KVS*oSokg05&^yG^3)02q9UXS7&pVuT5Nyu57DWIy8Mz(F}fo+(B&Z5mu<2keR zXc139`2b$_Ecb4sH8!JV3(b+>iyrXU<}t|MdH^al`HM#yD1s4Aaupa;!P9;x|2(4+ zX--6}17fOf^I_u=!GQX`{F$@s##Xp!zVy8dkgK1N5K!K%#e#6bUq%@zKns5uB7g2q zkT?SQP2C=bBuYS6ukc&bhyFK|9EKWr&VOdfS(FZuWs6!f`VtFyQfl4ym8%jde$dM{ zg>oZdyH@Cx=#d%msbJ5UA0BrJA!c^5ax5zV{&|>vu8Jxs;G+H_TWkR*ZG-rvl zv_7?`_g13#n!RIp=b-Y7;t+_X46wCxLUGWZ7NBD5+c@(S6FYV>eSj}X26cQ<=B~6* z`}zLKWq`VAcJ~hEuNccj!HJ^Vj)yd_+ORc*^mVtr9kZG99#`3j= zm|$oU)6Mjk8)G?Uc4*u=4u;7PHl*@)&)f9HMG&rPNstdbwBA@>*zw|m?CJDt^vuyR zMDY>-2e9t}2TLgWf~L#>@+7kIMC{1hfh@-(`!uo22fX7{DmR4(rptRBo6u z?->;<3cni~F7o%+ouWi4&8_oF{Dw=?mN|gUB;NS1*JG`wAUJRE%)+En{=j5>PA{IF zkbirq7xqtR1bra<1P!cH&Dq(6IvahbC9+m9*t~vNI}NbTpYn~4Qlg+D8-_O*bGxb3 zW)pB`kZsh7iSJU_?vC*jxy%CNmP5!Uf#dj`J;E}Ymt#mqvdn{Lh`o50kghw))W_n) zpd2mdGQ0mJT5Nt)6X{waA%BFVoGuf-7To$NpfpXO!7KX{O_1*PA2F=N``WfgHh$m> zt>luTXWG(woHJ5dJ?xm{au|yR)@x_J^Gu>Ny`w35UZH9B7rD&)6Wt2GznhE<%%ah2 zaV*Ia*05H{OgatGZbv_~>%gGhf@>p@3;?5ZI4awE+UCBB1bQ=;ek`~c&8khiY%&h# z;DC~a8DkI*eXKNQy5~g{Wr;gX#lglCJ)Toe`DbJi27jFkehuxt(RtZ1sD1TQ6-}8* zc~jsRNg^;h6mBG#yXKagLt<4ZRGuOS>W~%to@q)iy-4Tq^_##SC!7CG*jh1eHBt6` zG?M0@Hqyj0edQDB_H|E85ez;27Wsd_1W@pxDl3bK=Nb~yc{34C#Nb zajwVDao5q5ZsdK4{AK4JFHrMZ1Hy6@_C!fe8qk<~go#q}7Rx?Xm6Jp)Sd9^}n{AO1 zB4t28X0pyj*Ru%CH@n#ub3Z?mQfPaZIU+B4mWN*dE3a*$gS}iG+87VTrDW!l5?(g9 zB|h;+n>FZBcMUJg#V~jdHI{QSRGds1CyH^A;zCYn)Lp-|Y<1~%=3jNv%ySsNB z6(j#;^bP0K?}VSJ4iJlY1BCs;6W4t7vxG()Uhe;!G3^l-C1y4M8>@1|hB?3_Y+Q{cSSpfC{L_y(}l;TL$<${S9TD zUw;&me?FH{9~e4}z|OzJXIO*`3kq*XQ`1sEXILDgZ65^Jql>cGPTv+s9?x`e?j;~h zQ==$CHWL=@3xvAx*G;QO^gvt;jG1U1?P4cv)t}&Cu~wNa+5Q( zd#$py&DZy(C>ICwTSIU|MbU90vd}sh5Z9ij1?!h}d zzT+GF1~IK9-jJ&6b&fJt0ijlBfu3)R{pzw&0e;$lSd<~!;nDmBo$W8IkeEGlv0khi;ZT2-KFRE@F-fZ^XQ)-s% zVzg2wwauT*pB*+NO<2al7srYSY&0Co&je?*p{zT$%ryi0g~!7Btum_OoN~?G@uJzn z@0`myldtSeVDRCz{a)ku&!m}3?!b@^hHpe=BymKwj>5{NI^nBv4t3x^z+d5lg?*eZ zw2C=!GI+e8)P?wZW`uGAAV2O`S!!WG>bYu)Qkp>MF5Hb|8}F@JN#o|XE*vNh`!yEj zbn>{w-ltdCR_rtWHol>wN2B7CX*FwdyZ5w` z7=Z=&`VbMpzyQ`!4o_y=xEaG9RQPc#NIi7@kcnf`ue$KTP;mwQP1nbS4IxE|1@S1+ zzhnSz=>`vl1u-C}9XmiF7O4zFgS=CRo;E(-6Zt@>7E8fZ6fr@YSRrIx z>N=#feTNf3*9HY41BR)%A<)lN89y6X0FY#Eb88CQoO%;JIKLmO_uF^B1HEle@Rgx) zy;U%)hIJn{Vc6IQP`<*Y~4w*bv{B0+raWDco0W=|LB`VGPkl8YE zyk+2H0)rk<@Jmet<)-$FZsESr_{U3I)sJt^^Mq?o4{TA_@3w|#=7<=JCpYchj7`M4&vOfly$^HjaF&!93D>?pgPfR8_$4cK);oE4 zv7!HNxIeSM(ROZIm!+$VTE_6fah%kIk5PWDfAC==ZS5CczT9utP_~j0ip=E}W^AdE z|5Z~Z|C0t$I7zgjZZ|%TRhnRr)D1N_Vm*6wi1Z^+Sskjht$RcNed||C)0CcaSD5ts z1S~4@$O+pt0;XLIw^Qk81eUSoj1SWs1l&CU+sixo>S)fq4VzHlj`obuD%V+-1HIEj zZyw~A5b1Q^WN)#!(;uYsp(4`R^#BVk3T!~#j%B11{fvVAaSVspo=@xHA?YWsoU(X+ zO##%7eSYVBe%n8<66gUpKZ0(T*Hb2E62gF-&}AC@AX%GU+5Dn2UrML17Xx$SCP!OCoA}L4zZp9W2y8Ibt&^;P z0F{JMt4?!B!o#N>lld3{7cgbjLPV1#%U@;1ju^3VRU$-6#Atd7U0yz!-)(5>+43oA zL*b#17E)Ue_wn4#hPU<13CwVU6eg=0gS$a%5d4Yli5Qt zqOX{#5#=H%V54O%m305j>|nVf9|`7#G@Qt_={vsM4GUOKxKD}_@W34K!kR>wrkK9& zK+D?SQ|UnS0d%ls|3vsfy1e;^#xMf`<+H*Y`tz4vbe!#yqtZ^3wQ}{Z>G4 z8GCU}%U1DCVQ_xZs%6#3cSPOa@m@7WW1n0(uzwc({@{eUC3jcF9evR0aY%)%pTav1 zmO!1Fu6=npXJ?q&X6fK^ATs^v+xa7B1M~S$J9AaF+_!MaFo< zOasJ?2nfr7t|EuIi8hWo=*KN6`(q;!?J@kXa-)Q`KZ`AW88Y-AB(CzVTTE)l{*ni` z`6==aJ`GAmR=1)DZp@VxOVYPBmE`43jEDMJWfmU(=IvHBhJwD`W4n)Y-b29ovV#)n zCu^Nq)r<*z!Mj3TJ`Wo_V~zN}A5HQ;+u2zi)*i@;QHfLz+dn>OHB-hY=l4 zSzMHjf)jmIz7a#W9(Q+nm{!w|j6gf=4nBYPM!RL1bnq&Uz|j#u_%dHL{%bNGV#Aky z`EItJBUf?X`)zw^&7u$)Sxt!FJK12w7Y@S6bV-z_75?giJV!)I=mTz_Yl+k~(tGji z(c92FvUPL;ihA&=!rQM1(VtZrKYlp8eEOVQXYFC)dC{eyPFa&)$?_ZqoCqs&uR6J5LAVG+S(J;|M6K9=p~~lfdTd zex^1@9lL3ilMF1J#eiYjxP-Ou;n8(lAG~JXu4#BDXTK8*$xI}k#9Sb;iN-t`_q)IQ zE_@V`?dLhu<)`CQmCr!^{&%7~b+nu0^BT#q2WBm=FY0DxfsHLf#I;N0+j&u-LAo1q z%Q23F?nm~XMDlM}==YUvW|T?tQ~4nNhDD>bmj|PqH&>H)_IsyeD?ttJHeFZ-xs}w$ zAZ1NJoml_&j5E?~WY$E}8vW--QvYGZ$Y@=3>Z^$B5XD6FE_83<(A_~}l0K6;#g&TC znT@rX!7L#yxxe7(KV<|M1DK=cT5Kx;h&`PeA;r17H5b-US;FxVnk+?>gdTRxT2Csv z30c!hv2<0Rsgobb&ElucdQgX~r8lP}NNnRGq|1HW>U%4v)>kq_Ep}mqH)9yUy}==z zN&K!?{fx|kCcZ%aW5Z1GXZBN$$;bFVyw;1Nhy-x-o9yI7JSusM{0pK}TF3c2(Ha)E zQaa^()J9l@*2-(W4Zu?#S zdKO96h~;s$WP9O>;)B?RksmxHPR;eR0q)d41x*FJuF6VfN#lNCZO$-)KNX` z_tjcp)o)faQV>;Z@k&mz$QOl!-~Bb`@sWvI2@sB+F7!jKCfb|0__}UVV#o^EJC7yr4^)#H+_Cq;uCc>O zvxHH==~@%XX4V%Ye;8O~l4m;lu}gdPN*8`~^i>(VY0D>Fw+U2jknhecqp)drB53Lc zBjM?Gheyb0OT}K!k^1#Vc-j~&p%m>)i5Q{#-%U0PCc2I0 zOx#u$-kcm*3!7?sh_rj`rKDT=7h8x>x^CrdX=R0U?fe{(>4&x`^9gtlVD5hiZhfWv z&dO&()2e>{6d%Z)LWv@&8H$d$Crq)b8wL{<)}vV8Fn9kvzR;c{X2bNKTgjV>@CfJ& zjAo4!u$89SX9%OwxyEpVDi<*bsRUT$lb9s`xSiddv_K`&42gfH{fr|y^WC2 zz(sflLv0XXNX*1;36;`z@Fw$=wM!DOGy7FdQUU?G&~$gtQpV4n7(n2kq9u&7$XLb4 z9~X#Awag#k`$Eame~_CdU1p1<;heR1C~q^G+rYh(_T^fqq`mvMnk){swW9LHeAB`@JA9u9m4MzB+h=ZW zYcuVo;v8k497hvy(U=ihS;xjDU{o{3?R|;SWZ0ncn@9UNq4WxR4&R+m!6pP&NcJ<6 zVv|ZswdaLD#@o)N0Kqps95HjR8ei$bh$*Qvl26^MU)bPZMU4lW@2xk_+F(nu z!UGb-6}^)gmVZDJ3V%Gf&#+L}l5MLUD+e)Jv4UhY&FdcQCl>P07AtmgqPyc^lPv@U zSq(*4tX=v5__j9NK68;m29Dot@EA0kv{U^QTQC|XZb(UpX{1ary-=MBe>xf(96<=2 zk&@m53Tfya!aq=ZeZHEJfFZ=oJDpRzzhv1174il~Td7UH!~p+jY{9pSNSuVC$}kVRRQntZC1_4d!WcNC>Y%5lpVQ z86nnZ*=;)y0NN_K5@l__IVOuO zbB1dRPnM5YA4^c-WS`$GovhvX<;J>6ui-%1beLMUerUSj{E2eF&bv?sn5C0EE;3DG zJF`h64(!AqM^~mq2nDP}{AjNaRf7L4^bKt$KvQIj`vswpx*my(iUCsu`4fAH&pJ}! z57mRUdD#n!0yJkW`TvxT0bXEZkw=iO`wSS$L?D0c4jYk?{TpzaanQASi>#69hq63p zr1XtoGrG2jTr&07%nzR8rhefG;`q4tw9qMRM!?+NvO($pEgS}^2m$`L5=gu zB}#`bsF}i^nF|7um9$=`PjSZvJ5KdyI&#LPRWa&Tkih-v|DHwB-^fOL#pD)zzQYZN zgNl!^5wT{nWqc$B!MuRUBU76N=9_C?`s%;`OCQ5Dr~$M>;6O~X`QnQ2mh4HW#{_RW zsml9XLA_0yTb~uTX+mK`->vDbp$XM%S5oT~35N3fO*j~BvGW;Vn`0B(=B=7<-MYF4 zK5Fe`9|$_Zu_aLVGt(~$o9o0ipHSQd%#dcSIe*R3GX92`zjTlB1w}mDpXl3pTmYrmj;VTj&TEBtsGt6QMzImA&yhdTrO7Y&Ghr&$-=E=l|mtCu=D{z(9g@zmexOEhx_!T>sk|72s0X>K`j4;NwSIXW?lv`r+PG`l z^h=pK&4|DfhQ?1pt-ly1KkeSwmZQWa*|;=kg@x@^cG714(IfXFdH+?JJ85br;(p)f zgZd@5qan|V+sx;<7KAWA{Q>P>b2DYdFZldH(w3SK$J+9sf3Qjk0vwhv%`&MRQL2%tW!N6^#ylh|!F3M3{kRHO*ZmI?T=nWKf&sC9gtSG;zyv^NLaNuiCc z!i4%?Y11Mc&~kYrrD@(R1HDGaxfs$yOMc!T*=I;9e_d^+?;ehe%|@2Lx>-4qKIp)) z3fFWkFBjd;AYin`vuX=_mD|{`q}dTE%LNNVXg-9&&zz*3W<%yZ^nJM~R|Uu<~@=7W08@9fhil z*8=aI;G0K39y2nt58_4Z80MPhpRN?@UVVvJF=rYW9s9Xf057d=Z);*2>3jtn&}Kq= zXDRr%(sUZXse|hcP{TPkX@Sle!KW?UMoC1oru6L=oxuNh7J$zby-vYL@wcV?Z{Bs) z&DA5EBYEnAaf;vl4ESQx!Pmj$Fw1>b(Wq$KL6eCpqs@VbMzAV_@h@Q=mjuLn1~*A#m2z_0A8<8E}dOD?PXFJFIr*7#TyVG#0@dM?Y=j8{CyT z$NwtCDPH|0YHK?QLV>1_D!mURUV!mXGO$Rq7knL4BQrr{$k)pX>aDxT3{W|GOnwX5 zmLC<5wN_UP_D~k?G(ivhlQPkN@h(&c%z-&~p8SgLE1Ik%>|P4PxOCK@&ixK#!!c-F zGgII~kc#2LCz9(5Ho^C2htkx^rS6|;!js_n1^%PzY<0hS7K*@XhA5VVzX>zIp^mdL ziM(!9O%<0*+F^a+Lt@F^v#<{QnA%*C8L4Lj%WdPn)yGq0)@Xa$7`9!{r$?aI)&B5f z`0FiI#s8@r8CZ>pQCs_*2Q)Lyzct{w_yST7KJRY$7IFvL9KR+#LHvfjP*|Pi=}P44 z`D+Y$NSymDj7BmP3nCjPf@F~>;gHqNU?3|05ERwcNM$mQWo|kA(6F5FFt#bQ+uWZ< zmJu&=Wd3skc_pVm)QTu#V%2P`Tgvz}lCM%Y!pHBIcW+uKS^ROP;{AWia3CmG=*kv- z{5MsI%pe)uE#iRqK=;$n*Q~k)LM|N8bV^_J3Z@)US@z>?E{>%?K-Mjt{gnI95r1nO zlF#vyJP48fM-@_wlcCshvZbE<_NP&Q{{D|<%qjU}-}p;%B5#)aeg?)Vk+dmG9lTxr zk=x(r^H_4&C}}HIf?NgoTq$oRMHGzyU&H4%-e{nk(G5883EQ%d?j z$RwZf!TD!NiJ?6Y`{&%}`z2BIA4x(Uzt5%g7xy6M1d)GX-ROjW1h5X8zN2&a(G|Iq zaLa+GKuVjE_miBmyCe2Ux>Y(g6dytJ86Y!x9z>g;kbV)HpC%t_)^$1r2OGh)E^aG1 z(91bN*-ge#tYQ4IU9K^{g7oc|uuaA6N7K~Ye-2}S8g`<7T3#pq!p`LP64{1mORV}L zs~S0Wwi;CF@FhdHrKM`G%Oy`~;)5{g7vGHLydX!R#`$u%1HG#VG~J@;;Nt}2nrc~B z&~JPMDchsdSvi+J29%PU&6ub0cRYGbMBlC2j*Y*fvH+ve!q(AL(etStXCPcWgKjZJ4v;U$}&*%W<8M_aiJQ+Ds)Py$nfYD+XQ5z3}Z zN02?xe1fPVEs8~#OtJbewNtN6I=gzmopseS_4w86*%;dt?!9@NdPoM#l)hsMN-8+O zn4g{sIjb!r(zdzIl3%^^-j|n}=f9bchXb!!M~dyWQaTWEUO>;1emUGK$V&LuD5g=& zCKy*umeCr}r^-pafQ@Scb+ZfSh>49L2BX;#v;Z}*( z{M@3$sb6%|iV!deR>Hub_{C`0??mE;g}0(DZC%KnsK?Fbjo~7YMjvH{JLvFTVhXYs zTWz2Ke#s6d6+GNXZj)38eO1+{O7rSwg&+q?XgttiLtQx$v3t}nXKi|2D)LFBvcYA_b$>j{7}x>5=1*|~Boo>%a}2Wygq{wz=6 zsT6|z5B379gQkUBXP*yu7@Jg_{U@u-S$%_Onp_p3ID? zTW#^H3C(3UH8Kl6ZMg)JK>CL9xei}lxS|Cc6 zD^EcN513-lEk**<#N>gTmSqV%oAQgkhQv8?r3=xNwUgr*ODp`0H zz#YocRhFcS3{l|}Kj%+P-h7}oS6|di(RQ&9L>+DM+=CC()QS2TTJjfL|*kvEA4ExIRS1tx@|J)*&XO#(0*ong=* zlA6ZTOC5f&1n|^>J0xqn727_$r|jFuiBB*}%dGU5^`);LDUiVCsn=i&ii*)L$`JTT zM*?teDQV2*x1eokUe$%BhD9PiD*cT)Bd)J1+Dr`xbjV7+K{x*Su*~g2C2l^w)vEpK zO~OvcB&6GeZZz-*6z2M4A?ufaBA4Hb2kY%yNQxH^&~QPpyIyvSq&2tMbEsHQe4S|( zApqFZ2VlFSF@t7#JemFY*jdn+J7u!ZrJ(f2dEd^2+;osLi5h|~^UFbTI|a-Hf@aOr zH|r*3*8r8JOrmTwnZLw5%wA@I^Z*0(a3b=3-Y3f2jW%gbj)SvPA9&4IJM=ywT9oI# z`dc37qOPC*(BDA%OLLfyXLSd2-bug>k}y2e7~nQCm(oZAVCfg2FwR*EIU@Zc?n(sX zMtV22>yZ=cOUrOJszSddy?13C;JlT$3Aod^L|g4`I?yT2e|Ko7f@YDFO%A$QZ7j(=v z*@*C365|ve;(8yitopuCzKNNd1?Qw4WLBdrm_P-qSP7e76)5yPx-qW*%y+4#h~cJ) z$AWQRf7AJSXyiEpuaA@eK0UZ<(y@QCO9{3!k6CfolD)c#i#~$pzrcJzA2i@U_b9HB zWbU)J!dJF#1M2gbYI_r586wLXbaK=1nYw#RgCNA?Wu~YjD34_r%Vg5yqAgA@}3Pu>RtCR_)%PF|Hrw zB{qoL;(0hBc>UvI3$K{hsz?O`gVF?b2OC#I8Y~iU5q_gZk^#ql8mFs}94I;7Hg*eI z&}n#wZ!`(831>(kO~H0%&rY+D{>t(YkPLTCfx)!OZmMP(6yW(n60V=@>^u};<`TIa zg$jJUm8O=p3mD9fknJE?TTpRvd0Z#G>pqeE_-0R5jQ-Wtx3tHWaVPTmdZg7)2dE|l zD|<+V@)rJQkym0we^WlumohaC*y6Aw-IF;rMbtOqi@tl*$*|)3>)0PQ`d~En&H4L= z`P!GKmShf8c`<)3AS{#_ehM4{;A+p3&^{x}tEb=wfpyw{An5S?sxO2g5q{s!px~>G zBj`JUFDO~8R>h-(4LeT0jSS|@f7u{Wn8781AOQ#_Y}vj#`LUp7qA%joHJb8F4G1Gy znJ#&Jq>Df1YuzgFIEuo_{Z3^tJ+y;@oJS^QZ@FJk?>cI>7as^RF~n`8+6npfRh65a zZoWzWnd>(aBlS6ISoJcS?q*LnO1OB<^K5t-R-|t*99`5f3S6Y%6`c+$3kn+Z3>|S^ zWIzqgN+}Rl|7AvVC$j9GOoW51MmHv}i>_4A zIpZ{VXzASFCVWnj8ZaPJ{hrl?0@Rir$dDa0JM)j3oHgZ1cjfXcWa>*Xpb!CV({6K0 zXqsPd8HONVP+dG5DcU9rUY>n@t^XKDOgu#DPx5+7qmpSzFPHt}(g%-A?z6f+laFkF zg{9Np#PO7iQ&lswx2a^$p;&?tEFo(!jEPjUZGpqj z2@&7r#!ToN3hgxf$O`UzdFn$0n?!((!~piNeIyq6w+0{y+apJ!(Y?q*6AB1EAGerk zA{nXt?`ip8r4Wt5s8VQ=tCE$r?6&KZ-;#`Tb%+d$_KE%vc{U(Sz5VR792j>?c*4l9 zf5{7@F??&q$rq?VG0JU`51-0%GC;x@nVKb%k50;qFZG3@!9$gN<;iVWU%NUq(C4fB zTg)f7n(gY_T*^k}V;bK=={@;PBBN&1TDzI4?lj-Rif_s7LCp7gX5+Ry?FE*)RWx}b zSvpB5Db_X^k_v+t=5N5~WkfJ%P2bSf;2)lNlxRz&fhg_*fYd>kIn&aXue{x|&aa2!rt)al)IPJtq5kJn!Cl{Gv=WRWdO&Dg zZHs+(R%xuZ?4wt66$KE)bo-}Lkb&Q7(LZhT&ngRF@JqN)%u*c#)gzxD-d16m zsc+M{k3JkteIvsFH|wRVPpqtdAFPIJTqE{+YB}H%g@3(bndC2Q`ZVk^uUY5%j?LP- zl~TUQzNi4E^iw0N+KopgWBjW=mtepZ(qN)v1!)u?N}7FZE^M57GY88-iKb7QG0Lr& z9UcR|3~%@1(Ad^EQ!irZfqhIZ=9&Pmk#o{S1zuCI04_qJZelbVr{}lf_m#Scoo5n5 zQe7d&O)2~`l{F6)%3{a-@eqMkO~qm9`;_!X5lzqUHdS!aMe~`;ZWn7RYk6jt140?m zAUIU$fbB;wFE<_z&$w*R+tSm4wb0=P_cy2HdOP~89418JnBalBqWMChVjdQ>q%Rli zt41F@B)Z85#x{&fTmIN>OD%D)t9OJfovf9wY>e+zYov>OGsT*CX9*&W55MBcgBxa8 zT53<`gC3Q3T^dOyXKy=c0|K7JSiO(MAtK01#y~Wv7dYv4K%wOdw zONJCD_oM8`qb<7xycjhM_e0J|F1%#LNo-FFyR@XTkZ}!!Fh=Tu1hQN0fJ^0k_t%S| zP;)pChvH^t2x~$8d_~u{2BTuR=z{igds!MbQT&e8V2r^uWWMRHx}&*});>eZwsz=> z`bgQWCpF+E?8hc*Jw zbIs}{ZbsmEpP^uDy+OZ7ucVvZ3Y*Y+zNF8l2a?mYU0GJ)J^c0C8Xg7qQ{SlB1(B|^ zKI0E|t?`Ay_n}OjTmnjDK_v`?Ncy-zt8Ra6c#T|fV{^=KHv);O{~;-#-%5C_6iIHA zO1m`?z;IwfI*;T2C;OZra$0(a8+hhZXpQ9L7zkvZ5mm5?jPMq%h5^M~G$N{^rb+45 z9woqm&|pm9T`2#tKob`(ay&wNbt=u@ph@q`@AG|7QsT41wyGpK>rVVVtHxG&1U_Nih)6CL%ba2u<^~i!movw;TqqNk zgw|r+Djv%#^*ss>f8nf_9;Kv`^0th=I8*AT&}s9Ex)zY>7@!2yu;}M2Lhs=(%(ndt z3Ny_`;SU~;Q>6USd4N!wNH3M5Z&`n8-vJt?$tL<-;WzqNv#`-UN*6BMz7&{9Ouc@v zRh6(V{r2)`QOfEq?wg6hPZ%kIu=3eANQcn~YgY-3S{NKFwQ ziUZ70UE4R{bIcEoOKzzBV(r7!4w3ATQ7;)T39YN3K?c@CIvh0zgzsqkiz{y)Q;2aE zs~&EH{ZEOkBwwU*(sV;>Y*qZCktOv8O^os_&WXRk99(X62#Hsxio^P!_+m2aj1)Sf zF-o2XghxnW>mTkqS6)>_oLqQ*%R*<4c4I>OzSil%_M##>We=i)wf0-Sgt#UD?OE!g z-&?FU+c|@hWMuIF?Z?u9AWKE_(y-&;EOqL|R z??^h`2C$7F=EDv}Y4P~liPW(w#e4fmn zUal%ARO1zQtx>O+X`xUv5nnAzGftbSwWeI|F1vY@RKR=ov=S(oNfi+fuAL(+Bd(KL zKf(6aW%&I!lUK6{VaHPDhF$Gwx4v)9MoEt8a8Vk?0Q#gR_!SPMi!Qg&Z4KNnx)*)H zA{%%6>Nwg)dv#q=ecl00ZUaF(!t zB7q}}t7dNfRJ{wy8>hoU(k0}?1>ZTil+B4YW>)P4tr^;CaS)c%7-GUfWSy8hqO+Z@ z@4l~D6U%5VlQg|mfPmS6j^2b64G$8JXWM@m>QI3x34q7u8j@?aAFuV;^&&o;+C#JV zEMt=J5dU~O=|WGDQUc~g6=(73LT(nUT5EoI>7q^Q8DRQWGq1pW<`U+wevs12O=-}( z#mcxK<0G^ay8ZbRt=sy_hXIAlLqP2pr1$2Y@du~(jXa{!(adnq7k-sf$DW>>%*CTW z-?A3QC-ilWhUs6`ln6SwRFK$?X?<_HIqtQJc89uQ1vz_g76d#Q>K+_p8rv^bo>uIC zx|}FPq>hMo(YdGVdTm(kE=_pWzFim6Np268&>YB}-jG+vaX46AN2WBTauT6xfO2er z_Xy*GfBP-lB*3q#B!r7HQ7zhSd11{AgbRA`wSm&j-`b>#uN={v_v|=9U-5q5@ID;5 zh(G4I%kS(A`VaV3oqvz%Y`?r#xKFw>@JRUcfJZQ|&=v6AMivy2R&WT;go7TCnk~k< z@_!lzB1ByNy;LBO4G<8q{QdLr!^NJM|Cd{jvh2=Q=X#gO=s!_;$1dcnD)8_TxOv${ z_%G|TKLRK!maNbZ8oHzUI-N%txP|Jcd*6xtI&z;-#a!ffaSzPENuUa)V9>*T?C&nr zEqz?eyv8T1LsN!;$$+wgoH`pkN)Ku)^UI@A#Xk-Fu|ZU5Va3H$Kmyu1J+rSR0N9&h zk||MjDO8ZbDzd7h#+4VTZXt8L%T-gn;9i<#v7FA6N6P|bJLEM?bV4~y!RIb`VDyC7 z_RNg>a}4W_U!Mq&O%S*F{{3cU;b_^*U;O6U(jA(~V-@q1Zyr~j%6k6dnQFV&g#o68P@Os_w=Sb_@ZOm3dYdOcFqi(*$b@1;>|9px(^lr#HP~EPxX+lR z4U-F81-)@hFlkB@Xx0s1Y$BN_XFT{8O0qMm!_vP{-fu3LJy?G_33;a_4FQV)HJpc( zMoclG9aI8Pl9QzkhCh>g!(GI&7~i zAF0QGDIu5P&FAtp!wNmy(x0kHUDUz;HtkM$breD8kk6TTSQf7u-$C-ph)wZ`i++G@uW$S7ZE z^Eg&KwRgGQ-6XEH?`_4r| zhr8N|VG-;(Q?tt?gv4S8S)~n+W+;hNg4Uv-p=DBSU zunj{{SWTlaftbBdAxC}u{rl#ze{`FF6r#emfhea&jA-EV^p*V2CjtzUV8=yh;Ylf8 zMO#@?%M&$Z#;5r$37yA$;rp5znE_wRWBH0!KNDd`9OVo<{3-YV?I|#zfs%HJ2=0J! z+p}3)zQlu;8Q1DyOC*`Reqzo|SN3-aB8d38It!bGEGmo~|(p#s)@|d{3JzPs#2bFWy7Wx2V3+Hjx84Xz ziSVl{$ClO)T8tuPT68;dMjeU@@3>nNB$J0%asN(9#!|t9i}}GJ=$-EWAarCsJVJ1& zBb;)-EK}5_&;A?c$b~<-T5sZ>6fzHyt+O=t;eYdmSt(l?Cd1am$CV6_8Kts=Z({)J zkVwxN@#!*8PpW~YPkFgs=k&jda(q^%@e#I}vQvtz(yS<rL; zC6%q3u_`+fvYpmU^DX5g+^PO&{YHlSRn+uBP)luhzkqLV)p5-T2~}}#YQYHHVZqOx#31vv6Sz$&L_*88$49%kwH#jQ?FX>|#rh4I4bHK+uCor(O!< z)5@-6Wv)N@^(N_6{-|KVkqx7VlwK1XV?i$xF7Lhv-Tqi$!C_xbQd13fJFJdM^8e>J z+VW6AS$5l`Wq#FOPJ2m6BNX@xIA-rF!`#sn5kF2DI1@O#r7N%n>P;S!O1-3&6$8dH zzKcq$LEuhTfQCue`o)D~-Q8@(Qyb*!$&lUw*bomm|0Mp1RhX5D_RYL-%=eMYxX78V-c(GD8E-EkL&vM$nW}v;_;5(_lDofjJVk+B!3czjfJ1y4(M3 zpGiN{ppVoOIA-m^+NOZ2^-O8$6I9Nftl5-z)hP%0Ja3zl@|i+{khILDc!o(1k#x9`0T_u0{2x% zF^_Jqt0OsoPJ5Z!Efhctz%{m=Ft;&3u;4Vy@_=dVFs8(#p(y^cKjqm`QfSoO^`WIV zV^+6EUH#FL9l4isJCzy|nnTWhYv^F;Bgu>s?0$n@4W9m^K|Th90blH$P3nh%lDEjA zNu&s21C$!+A664NkI_x07Gbh0I>_I@?ORv_Ds?1jT8Qy-$IF4{fE0ED^Cj=4;GCa; z1xD)|@63C#jsBL0xkCNOMI}OUar0;~4+hyKP*~~v8HEcDX&AE9^KM5|({a4J?}=GW zmdUr5fU(pn;o#U?&G-q1if^Y~5#&8jM!8npTyix>8%&J^DiM*3HU?AxZqeWmbf!v_ z*)Gn8IttmIy6tK@oI7KTZW?2DUdvx&_RVD(B%wg8M%q|L1JEWPmY@vyyu0e7yn5K9 zDd9wa9b}^n;tjtE=glstwNmBpZF2&a1s+I12VwJ_aJqIo^}GW3Pi~7Ghj=GqUxB`Q z#S8NPIxa;4wJ39R3~j3oBNY8g_4B(Dt4 z=Eqfh;Q*euX2I(}E^q9Xd+=f4W+Ye@cG@;IPjLbJ$TvG*mdXh@>tBs%qSvCmf@D(x zv&Uy+N%UC#;w897>&v>JkS4IX#nBuI+ZKDWx*_kYS9eW~Ub8t8kor?SgO zxH)nT$JW)LbAmj)^zD0|ZIU?SePwmyQlWK4ySr?R#WN{J_v}Ajlb-UV@G_$IsJhKSE28mwEjU)DuIx+pwX&%#KzRHTy&Q}!wc1WmYV zExyit%5VJ8NN3iK2!p16S5(`L41}=aP58Y(GSW9IByfgJtYP7yOdzqRLjBY^TPw>#lr4cXw12@C><(d2EC4Y5gv=4RM};FO{B zBJ*m}!@1Lt1W_0G2=OzOJUvf!o10~0Gk4Kea469xK6miN9t%Q=#X!lxoj*gLm3;Pt z|NWk%EE>9V6@i+LM!@@!gMK@9X18fHR@{hWF{yCCC9a8}vCBB630I+Ma*GsXdesIV zGj==$%hdq4jHM^+`8kCIgeN(`3^96Xq9yACMPQ@C@@~bFARHHC{_HsGwPY8iMJx8hrB|1->tj`a7afj=X zEDL|4Hz;%^CC6N99nR++zR5U>6+E*i{TejtTwB>Z*zw0ChmK<%CO_ZTylsG z7);h&%YOypvu5Q6`L0lcYCq&lQ{P{%3lpc|Oi{xgedD8oO(>jnXdR+}ThE`vqypW; z_a@aD@Y9l^qv8(iruOYMdH1?x7J9lI<1zn8THz8PN`Us}4{u8Dw?*IZA({!PuWMji zw{&Z$OUSe1c7cm2F>;N^L$Z@UuLI+K!p==ANBo_{oLTLC4iBOQsN~#I zjfd0hhHWJ)BS;RofVj9?e}=W#yxT%og#8>2+XT>1stikSZedoy#&qkGsTs;+8dzbr zWu6#rTdP~#2#-_k9A;cHi`Cy#e{N1BKmQ`VGkOrB~K_v%b^r)hAz>wH?^rwkmSRLY7BJo^&T{I z9D#M_9$}RI3vcJ;7r=mNXj~Ta%h3Vb3?XZzV2a0_>>?~7ZG`V$i>vSV6B{ljw1!LA zK?up0pQ|1Y;5#aIligm^dnlRYN`py{|N;B=^-k{e2lY+M$q(Cv*M3&aSm~B(^oYiuZlHt_(ttr8iKWUjB)e+uL>G5Xh32`k3^gG z0ZH6@l;ut=#9vvOHfk9Ck8w^RNOq_7XS&I2h0UojELvnBFi02eFt!<|mE~{N!zM_JNf5zHV zLk@qP-EU4kXjXoFm89U0i3eUb z(xQ*aXjKH*ub4&|bJ4S(4#m0;)O7^y81sFpNPnb7En zgVbvnx}S>U44HqE}T$VnMm zg?Li8h8AUYLxuM)3jGbyyb|2<8xPAmK3ue?C!-ohXCw*Lq;QFCtWZoR$qHK@kf!E# zKq@{$*tm44Pj~>#KUk+(OY$wN)EDq{zFa&J2^@LTKpmqi7TLbcTGE?=)16H zunm?aTQ+r=tfX{{H_CzFSC|#7dg>jzgPPhocZ_W1J(Q%xSX?g{8_7niKy} zvWV>~x;hkKN{mE6W#RK}cS5FbBOgNoK!?iN6{c z$w_??KeO+gq$*zf@#wffa*5TSZBz+vJVKS&nY;)2T>R!sd1qmzyF2S|EOY1ZIedo| zb8^*ytJqbDKy;@l(c-vP#1C}m& z-SJ+)T{2)yGV*-GN*~?;y%Ce%+jY%9da3H^8a0ulO2$Oe+z87cQ%wDO zi(8tTC}zmV+X#uJ02LTa#UWBwDSpm=(NFYKx zR;X*IgKTm@spMb(?c$Zts`zrru#0tOCFw0`$lg)jpA<4tvqxv*9^dB8X6y%eQfh1j zNMd#{8^4f4(*J-~lJ334A;+iMrx%-K5(vhHc1{|BGwmm(qVfPkg{G85OYS&Jfgl-K6W~?fPy4Ik7 z>Y*$H1=CbrHL=qmfDM?5yx1zcAT5~Q1||nSx^PlUcauZk>Ye+iWg>)aQ@J+o z(>yi)Vnq3Z;p|bfnGq%aQGC(E*IaQ~0w8>@-C3!D8#(0ua~PF(#G|n_Qo!Htv(lI< z0E=1x&0KT)il4ywmi2q_Wplldn~=p~mq$ic zBMyaj&XD6>l?9O;&NL56C?OcPd{46n!yoeYpj=fFS_Tfh`ABxv@WLsvwe~Wd{;)q^ ztgRT!m1FKhs9bf?%l zhQL~w;NS^PpoYlHP{rHIL!8U5E8pG}D`)N5x+s9j8v$%0Y!j?NzXMJAq=l$Zy$`T; zUWT5-WbZ3T&P|iTLy_SQh|IAI=%~369a>B~F$Y3$#6nC&#mci_bBT(C8z#yhF2x+) zXu@vHNm?RZJERYKLJbbsip+~yr(&K1hE^yJ)#$P=o;3!l& zRJ`y*Nf5JG3Ct);=J%Z6C=2W#6R_C*r5@z5x}Z!jW46Bz8$g~vzp?{e=f*efTfWq( zpLDf1pHhL~xf|*!b+`*}J2Wx?C~yxiY-gKLS(V^+XWc9OSpFYV-y9u>^SvE5wr#bI zZQDs>r)lgoNn_hdW81cE+cq~g-c3K>-#PC&`{(Z2nLBsxTs}`)y)8O};ewpylhW+8 zY?Zq~0I1gMwfjc0&De^>(9x@4myzPuXWK;t^d+5)L)-#wa}6a3CBwGELS5}ZMm$^v zI(*T;iELMjbtBFfua}jC zQv0^8kE;D$yF|W@sfsnka7Tjmde^Hai>d<=qpuy%8DR2jFu+~L<#^pV4&Lcy635( z#7Duc$^I#zqZLR0iiajpBk?5|IOBEqdc|yRq_=9HFDgde3;j8i2K}W3(-Lj0iB~7Z zY7sa`=8?OXUV?q!Q68;&A`mzbqV zjpiS~gSB<&L)LJ4L!#P&*)lAM9U79G6?T8NCK`ly3-@HN_gT#adRnIsH~W^HVP7Hi zGZvRBum~<**Rj#n5s4u%+_&`2kX5>d^&x@%rvjpU5?)a!5kmzz)JX$)D4;sr^yECr z--7HpY*Z7K3>wT9 zZM$c+w{+i~ay&4FV1e{eRSes1E1X%*>K-T)E>J}8oA`z(70?}ZyA?9BnUrZiAN^-g6oq?8JEh4@pwxm1)1$#C2PPQfjh+TWdrkaC5;OF=0KY;6?^0{H_g zw3vH~UJUKsOWI%XcES6(z3@tvG^KS6V+s^^Gd;Re@}_xwOWxx|j#ph=+y$dRPnEwt z3F2F(9<{i+g%QZi(8*U^zG#?h2NoL>dKDf)7j8FNw@bw#iacW%U)pv*1f_8d3=;VV z+cDtEII~~%akj)1PkP=!5^~DOi>()V@Q96XDLb{Jcp$Q{&yR@6n+PmB%|-VRM92|o zz!1ZH2X0pK%+Ak|rWyjwNHB!B;JhG8B6P5mEL=&M_F}yAu_&z+u$K(Ur;Hyr&0?m^ zp}&twX4fc_-Kwg@GuEA!vGL%@dn0>8cK1NO{oZSmP0kM#&I_NZtvB~y{63`2U2pky zbZos=ED`r^gV6O7mtr*fRkey5&_ws`tP$UFuxF-oaN3i!jq04L@b=5p?&E*-Ats+ZLeVZo`9dFJCRfDsI`Iz+|c#SV-0~*sl{7>>O%vw9XF@bie(; zpLM?2(>w34@Asa$Q+G&Xz>u%DxAIZ&b3l8)(qF{NI(B96xXY?1-5~Ojnm+FA^pkLL zhN4rAMU8p%z%rM+k+eU%h6+noxfi4CEL3E=Ik=?tnxoxuZlcM9j&!4fo_|JYT{dRF z!&SvNH9q~MrR9J=uKN_E4~eLE`@yD8;r2Ky-4jSyP|xb5@oBL63i|V`%?r6Vu)6?| zkyo&NzCNf4ti1eiVqkxKD15DwoQI&{A!1wK%+7CO5Yk>CcAV7YoXkZP6ZBsm%j_z^ z?}*9qtJcD5mDV;ZohpTS;lA{@{fLwH9ynS(+0O0Obyl`QyZhyP4F)Vo7t2_8VQ-iZ zbr=k}BtgYM9W_0%9!$uzs;&Ec#6-?zvn0YKRCM(rL(f8CanxhAN>*h(J$Sp|Rlu5> z%(nz2E^TXt1^4e#3!QfgLR+{8me^$11}&@10s8g>V2t*MUl`d5}0l6m^v#l~6 zG8I|bfi46?nFFlf^*#gcBk0%T6oitflY;522MPiaN-*`d!a}nT6So#mRWQk!!LXik zezz1UK8_1!iPq6N7pM0!j*%*}Mb^G36R&HRkp7bLnu(Bu)e5fdbUnRhC+&Im!)|_$ zpNf$qPP+cdqa#m$AR73a6M=Z|ok!emn>b!1eVbJRL6CW!>%EX&dSSkPUj#|0EVGh0 zAvrSFS-H7x`(EBl;f?}n+vj+4L+M>)jFsd!w1s@_$2G2?AjQ2J-y8QMykO6V8JSQR z9BeMzG1=pW+M==(G*>rq+7X?OO`)`i{v=gQ1KJfvN#B|ULFk#QwpwT~qb)bOpVpUo ze^{CpY=7;dufBJBoB8b6mVXmq+$vkXFRnrV3~0Pb^fP#_ZD1%HK~Dv6L;pZ*RLsDg zmxW9Bg%xDuk2fKH^D9O~mI90Je_Gk#o>wV3id{}&LA(R{``)@7ih}2|G$I2ZZQ!SH z@}Mud4>&0390rLPR?lw!UeEo~7qZ=NR?M0sejHIS^Mr3>RD~W(_$@ee;O|Yj-izLu zrQ@8>IQvCV8>nl)fC<6e#+1k@2vHvcirG~go+UBYJv6o1sIMdVbg=l>3(AJE@)^!5 zcsx*#Ainp_trP)CFeS;9lR>ejyXuwXMTi%b)O|Axdz?4 zy|)+UgavJkII5f4$Iy%o9o;!JWR13S-xO%6g7#>CKAO59j zZN$EB^IO>{j^DVX5l@Zdofw|#I2S*uv;S3dE~3mExj7LMrR>pB;ESiNxO7Vx3Z}=A zG(`mjP??zH-@(hon%rK}38YnO*eh3-Wz=9nWeZ%|_nr-WHOz18b+i$0^6)K+9u=lJ zD0>9w@3GWMXf)n$5k-DHxjPMJfA!DoB4uZR=(VMWb;Z4vuBd%x04C#Ve_Y?f-R?0D zY|geGCG}E~K%h!up&>I^q`rvxokE3RVRi~^opiaJr#~xpLsi4am2V=TjK#pJ6v~f9sSFWZxUh03JqE;yfAm`fzd)*3 ziRC#Z>B8iB`B;1dwwAPO;nksVqC-~06#vbL7CAN>n7&FBfoZ_sriR9S?O?TRp!RN9 zZcKSk>6a`&me|d9pYqoRwzq#yeQGNM!Zm}b+cDEn zwPo0VS@-%edmq3aI+keS;q!k65?6y_4Ta8%6wt z7n8AqJS>*qyqyOy4=BFYuv<^L49-o5*!c!1nk*)X`k+i$e+?O*r+A7I5YJRC(nooH zy{dg@NA<_8qP|?5f#7~5vr8-|=Lvz@DEJ&;G)$s0ur4gq9nj|8W~$;;(<1z1jL zeJ13w0&q~7tR|b<8Di`j;)2Ue`VMK<4@=o%l&=n75Ria@xE0fWlGy)Y0eB6`UxFb7 zrU7R^fECzC0jHKo^%<4{(OoxL8Z$=-&zosb*1m6W^-qkLxEdP0UpG}=9c_}5;vw3y zOejDB<~^hcbk+yzC1vBFD9lPfWpj zuZ!!nE?)KoJDUdu*5gI;1DND@br`q>bq11n;^QquY^U{0kvH~@MPpmtFfH2{Hn*E$ zn;@&Q?(1#TxF0Pkwnq>GGo3HlXr(hNTLgaKTFz{lUSzreU95r6B2=j;D7wdlLqBcU zK?!B{qn`!X$c?C5tY^Zw$7nNfha&7?z?_c!JooKbSY^|rTe4iJ=z1M5#S)|sNQEwG z6Wcu5Lw|>pW#~Yk!Y87EtP_V>qsRH6%vV{6+NBbx*DE4VozF3&p21wyjX*uJx^PDPR8es_;L8QNfT))jXP;B5k!J z7hlStO(?nxYcH}0Y}AzIli^1!*F{+4Q^w8%`!(sD-)QAC3+eXv%o^KfY7ccV_p6_! zM^0<9t7X)OS#L)>Y2-OL_58xV&<~+xoOAlk*>a(Bz}mx$b{nB(fM=w3OYOw0znH#1 zc#2whDhL7Rzc@#rdPr+p!6b~clX}-C#$W>ac}E8>(zZk{$-nI}+A!F;*wPDR2LwYM zxTF+1?WcT7iwX=;%coD{hMK)RN-SJGIE|BONw(blfhL|wbeO|R9?D7Z>&Ps%jWtqz zpM^uUKC}^==9ccmhWO44pq$DrT=k@ZSr0z>3zU;!{>0M|ytZ`bbIqUK;GHQ!aeq-AiN{561 zR5^v)oeLN+R%Fw79^*$7VCWI|MY8eRxDx?`q+@@Rp+qMzoxpgtw9vQTl7f5Sk;Dpj z>76;iikKph^w4N$g8s}sfN-TwsXn4A82=mu=7#*h*Dwrxql_eFB`@`Zw+{Ei)8z(g z1d``wOTmhmarAJ|24I{rpinh1BX3=b%m8s%zBBSJ3D$?9x z2*JvJ9q2^=EHuIPD!y&7`fGTqTR5p#Z|J*u@FU+lN_(Eu@6j^C`h1%rGdq*-A?a#A zR+#MEf*=3MoAuCmmQ4>x{QQ~H!UropJF?32AxTF5f(V2^4)MC_miG>Th=PTSK)9*- zo1;GnMMX&syEL*@p$AYE37C+miq(I~+?)R3ZJvEXU7m>rqG|AqpN(csmaj^jdGT5{ zf!j6-%K_)sPV5}VsyhdD7(5INn-;RkzCtNt==XaYzS8xphnX}xnFwg;MOL*9jpI1p zo=Ufy(t}5WA@&bm5RK{CjRjZqUqDs-RqXDAvj%B{n;fB)dvD3pCz%Lk^*m6g0qO4w z|G29xk$rFMSwzOPTTa4&*JIHFv6T>(BWt&mW!`y6lU8BlLvD#<_aY`5fP^Cfj#K9B z=uNa$mdM3#p}l>U4k60?KC|a!9&=L*Cb-MQ>D-f>K}BUPVGHFc&tda(O=-`;AgKce zSpKE9LuulhJbCYbgzna@7N3@8_6^JG`IcPmh1Sd(2#d-i@|`y-bIDBLwY6-R<6 ztUYA{2}FJ*YF)bza4T@b`Y9;3n+B{2A|~6CG(oN zvLe>mp&-ji7Iq9oc~D8-^voNtO-{3A0Fo1uwZ;~0! zvO~==!_|s4vMTtG6ES=1#-+m^G!mC#9~CP)WSLB|%uYZ5XzvK4+O_Q*N`vo7VCR_Ds;v*`%%cE_S1d;R4l0?s?1_;R~#NC>Rz3TBN&BIy@KTp>WV|zMiP8; zi(Uo=qzihYAt9K9A%ep?2!BQ3&;dL!Ty^K0rT$3J;N&P=iM{H22WCKgz z2*dCXRMZ^;uVJ%5jim!Evs~)Y!gDi&Fy8{>_CK3#wiVaMZT2y;!@HI8UCKsp7M*96a!9EI!H~;ak97+zoM`_N~>oZ*qvQBcW!Uf|DQG$2tYqX4S zBI?s1u>F9P`rb>hmHn|M&A;KSK!fndGPRO0D+5R8R_e{|Yu>hpji+?_4Xc_I*hJ7b zP;H1mcF>LBwZFg;?^qe35MMkF4^Jf!Cb?sFt0mdR0o#3H&jPdu%c6d>*UQhFSb#aH zlwc4*o!7%q_!jW>+Q*zq64)%? z`UL}PkiYENcgNHI0-Oi(tfLEN+H8w%OM7G0xfa8x`K(z`7S|<9%!a8t*8k&;r0mED z1MPrsPy5p<1au_ty7Z0i&N%oh|(~ty#Mr`m5SN_GcxWTBbL2o-mYFy&EOaC4u+>4m=O&k)KsfO?g z7TgM5v!t+S+z<)FZWf)|B2`%Jh7b|?W`8~4=s(q9IXw`v4yx`l*4o|nGJ0&mPDkN( z@2v?^?FNcD?mnI#>s@JQ7-uQhR%JZHF1Pmn1M~6$b3F_F7&tkEVdKlmLjbtFzdt&b zBVI%N+cf}%87zbfE22%r+0#F$qY@eBplnj?;Doi^41+F0%=H^RJR0r$n7otejtORJ zJQ+f+=2Z-5?-pp&b>%=aXj0G@QP4v>XnDEdtgOzbhv91;kGBu@3ji9Y1vRHEyJtbO z4(rfcLR;m!=i}xkP>S!8RQaE2`dH*7$##>YP0u#JjJ3D#@pw`j*H{^OCL4T`h2Kh! zE>|n3kemr_e)7=PnV7(Md<;g-^7CKu5LnGbPWcwVY-DQRckC>gdES&QBHVYzUSl*7_qvoFOBjOxh6yx`Wftvh1tZPiT@`8*2k|Q29LiDu* zwtfF^MtU(=UHAfFo-kj>3*z~TXizHi(S+MYRs>GO?ToTlR<+9zwzuEyPNW(X!oo9Gh`I-FLTr^Dci* z{J@jHf^_5OHKm|`QSeisfYnaVoVHX52Spo?VMn0n@rjRcDejFi1hk`9XKeyKtzR%9 zd2lz*>0NA7T_37uwt6wGVj4(p?2Fj%od+&Ts301!t$(MfOLl2PrDbKQ>QO#bQ`ivf z8%5B-4q$pwnCK}1@r3Aj7j`s!Zu0&2MK`{`WnhZ>oZ<~SV9IrcO#)$cd!bgeENu0# z4Ny6|A#^=puA%#Uw}frq8>2ybJA z_j%Z&m+q0%qG?6n*HL^s#cLpSr2K#ng#Q-~i4cM@e=|>pJHF zhUA$**cSVC5m?lw8dvp)P?u;PM{4B%18B$NGP-O?nI*o%76VT%t;mC{g*zUo9~jMU z->+dJ3Dn#3s5=~{Itt=OihgLfcUWEEKE%?{+mvBtpzPl!a?Sfo&U<`(^bm>q4T7UO z)_e7_n+Q?qN%GupWEzR1UCl$cTCb42FZ^exD%L-Z`0NB$f=;tGQ{0u5&tUA{BLd(O z%)VV$En@D?IOw&6afz+2F*WJYoxj7w2_w!Ag!IzL^`C?AR>vCd^0Xhh*SGoJv$`AE zQXY*5{|$3xXRQhadm|)h;ebckJMsrf-E!E8fK`XDf&|_CHnD4RCZU$xtNClwti2U5 zC$r?WUv0Z{#q@8oZ|kceh=cE7&XUjQcnuSH;Z8#+S{EITr8CrNQ~>ZU9xxQ}eL z=+?aeloa6RV!JW2yRN`)jV2;iTRL;SE5!+V0-y6Ul(g(ccu6*($5iybQJ z^CWlVko3r0+sO=iL;F(|va`~)7sV6D8U#%X))Mg%LfybXRbQq7OI`Z%kAYQh8%}VZ z_YmyG%Ezm(Ri8Dt0(R3k_W4)Z8_Xig74Dx$TN-aRI~H0C^7R_U^gx+nis~CPb#B6A zdB>V$7PkMih6-Dp$o9DzJQ`k%=;!O7joo3{46;c=Ho_Ro@5c$G(LHA@!8TEvk^p3| zI3Zg)m@bzD{I$5%@!BHT$t+RdeO&`W$9LcV>229&8(G9Vi1eJqF-X&XvKW1_{WLF&4`1wUQ0Y+UiDjc|OCG>KF`WTEycS$wUU8KB}Pv z{Ia2qU3^MeeJtOoD8Dr}6Jk1gFxP{dsFcz#BpzUvc_E5|{_KyT;}F#7eF&AkST^!{ zw;Yf07j?@cQU06be3Hrdm(Y@;8z25G76{gp$`xF;Wv#895RVD8z+`qgk#2Q%Gy8y*6o4I03;Bj@ch>D;( z&3Un;KuwO0u58eQ++OFl6S?^MbTd4kd9jv~DWe%24Ar0P$^@0L|GhN>+9frq3n>ca zS@Jk2v_9ntAt~Ja&mT&UO9Rxmi(RD?hPu%KciJp8Hj51moOobl0u2@u)pacPX002Y zylu~*4$=D?KY*i*uC>uHG$B7)(11evWg7mWy?C|hN{$<`09X2aC30YWl0)EZviVmV5)Kac6}EljZ?~|l z8D-uh=G(?EC5EUAUcv#k2$#V!Iq6?;OH3b5SJJAp&xoseh2Fp8vHm5>VIP5~HIw0C z+?RI@xKFO+_&xBLAQGs>6M?OBP}v^C_uzdsWBMo4DIpmho{+oYLXlX{HR~bJTu1G?+dMOn>&PAkA6lVgd4xkkM zDZoNtG@I2Ir?C>QLlm+_vx=3)aWqrX&ue}LOwJ4$&RrP=2%x~Zb4uJ*cG_Ll05e&Gpkxn`hh#l$+O?^Y7swEY+f~B z*Xgct^jP%Lk*=RjSLc5K_nvP%-)=p|qr@E+ zHe}exBJ*=?I^_WolQpAxtqz40JE3^MkHNhXZwfZMIFK)P9h1nq*hrdrk0&WDSf`{%^tc( zJX*?9w#;3Q{`2YI`9W6<_MNlV5vT6gZsA?`eL9uec;d?mKIfpspxbT+NLiso!R-4C ztFDyFr*&H<0lVJ62|**ea07IR1lz{?#t!<{B-=oHVo)#-3TB1_o&1=yjA!yj&J3S= zLSPMlfwQTPy+NL)VIWFu>23jJ)*gW%PPt_DDNwM#)(8aMv1Ob=R2QoD2kuKnB)2a3 zBtCV*6auUTzL9=S#_MLt7#ABmZ>bNWKW=EftmDBzlQ;0sdyCdl>Zw>bQ||7_+RMEu^)d#UKDCr zL1*~g&6(e>{rrBseDaxXIjS7CKUiqh8l9Rp&}zksI)Nl!%HSN3uHC&u!IaJ zuKBa+!95JOTV=Y%39Rt7TLBg+Wm7j^Siu#j-(U?Sx@{%9g-7(mO}<->`jJc57!znL zd|%%5T;u{w^J%?=ub8ZT-0|?a0qPeYSH(9>+D{2u=u6A!t)ZhaB^mL7w9ri60(OX} zfe|szJPf8*Ut|!JgJZqVmN4m)$Zx=?w3*NF)trJG6Dr}J^ z&~kdLXx5i7p1A0)j{5JgdIKE=TJ}MOVkROy(ap71)P?3zPJ}2E^4-G&W`(Rdvcy&% z=c?tRRez>AtjaVgmpOpUaz@xbXU&>);R@C`_xeARCNh{;L~I42TZK@hZsaP-VG2hh zSJEVsv+lnGcJJWo5jvm^(zfC*tg4>Ksepvjk%_!dKDdzXdSluNc(_GrPI$h)>a$4B z05uh)dNVY&R;R}Y45|w?r{{7?6sNnM{pwn?t7hU~qNRdkc;uTG)Sdgl&FQ?v4))+K z{D`lXh8E zd>CwT^MIcyO=mpIJM16O`)FNfd&|R;m3QCC?wj>xb4ZAB!R#2F`}U#_?o;xT7; z?-u(?UFzV#h@%C8l!g-9L1qzuh=t>cC<7o@23Y=Pd7s(>x8XGNQ-DLkcP}zB$^gkek*!{4O@y`uDjwJc=+y)V*WxFF=lAW*k}4Z^ zRcm;Kazs_Erwhui9*Wvt0lQn%!@Nq;#aw+maxkyIAieT}b;rk1b<3cOa2UplX*@SQ zx}N+YAp&(CDWjX(^%_t842M_E(BdNMqz{d6N15@*McPE>vtEfk0@vw$HO6KIz^RiFN(d^>exsX-lGmlP*NZHRpIRQaqm<|$iFZG(&LgHT ziUPOL_@}ZU(0S}Ws=bW}h4m_2DRNcI!8dD-EBv=`FrLoie%+z1OquiJobx|$b(15| zrS^3&Y=^~frd(K+QP@}?P627@{P`oVN-7DEaA&VkXfHIuEWD0KR3nRBwJ*4@Dx_C1 zT>+vB`iI=zo9g~lJ6OG%+Wuq*fPqPVo)UbS5B)^8_^O4F2iA`ZVK5*Yq>nA|OrXW7 zB_ENfp`!KJ<17=I@)2 zSzW^yB*E7G&`O$r#Y2*$meXn;bxx*0AfrCoJ<5L#pPEWL7@L(5E^d4I*1RQxk8@qo zUbMFO=Kw05lN)T0rE}@B!!ob8W8KjaW#^~Kpy%_g5ZYUm-iK6ti`f&ZBm6#E*6Nh@ z7P%0iBg!C>>fNEyyEV_vG}YIrs2v5xIv^moX3@YEP)|U>YJp6u-QPh6N}rcIK<95y zV9$w%D~LkYzuP&N|59(C;Lc)&z1*pu0aCR$g>@k~YDT=eXCk(qR4o5&5b{26;#B!7 z`<#0;#Cxj90&A`3PT&ur%F%=g@MDjr@}ta$80<^yb!=nd zZWh?%ZOV z@V2JPpcgzw6911ec#e`7F1M$rxC$H?EVHX_%CD+z5Hyr&!999>87ESPHd0$E=P}wq zz49i&2_0!K+LFe(iD5$rxj=dJFp*CP!2ddyER8P-gnAVLx8S{T zBo~5rcv7GEwN1a0DG!)mfygl+iNQ6Rr9v-Me-@v1KNV?o&kMlOwiJrXG zH->rG{~d8llZlDX75j!1JmOJ=0LKWfUkhS)(0KYoTH;SD7h=(il}Ww%M<^HVE2^?I zN~i`N3Q3@4i#mYYquow2-`su@r(BfS7)qYszA*H* z$W|&y{ufo-SWn(>GsZwC*%MSVi1^-}bNM+^qGWtuJvXaW&5bJ&EB=XZ0{HH&iDnV& zeMCo!fZ)BzT!@*ECc{ zn{bdadN~+OzG=p{&;J%6*7!7&m`}uS_7d4hn!t_+xTOG=y%NJo=YCRJiK@7^%(GL4 z_A&OJoMgQpf-Pcfz<2Nq+7_}=TiBT7oE7t*mLyGNFsRjAH-laws~*fe>>4t1;)#I4 z&=nPgWndA&RSc6kh-eRH#E@q$FyQRs#=6ro+TY%^Dfir?ze3*@QU^OdYd#*XsOXZZ zefb%b_WOj|!x;p^D_$6y1-Gps5op>@dlRH-Ukfi(KiW&f5C zq)!!*U;>4PjeP|vTko{rAwFMIE~?q@H>B-dBrWb_QIj4xA4vnubBm>x6!NOA#tRby zH*tU#jck<6uRO+p?$gcqrc0e%>1-ez4`j(sq6J;h-Rb9`4-}>y=+BFFeo<4QRan2C zXiXOu(mey*b28#zVHVeHws|t!QHxbBp2xqJ zL@ny=@(=!FsLn$LE>h{ptSZyThUbP)R%R^!*Z$M($sz!Wi))%%UAsUQPNZJ`YnA3k z)g%ci{w_4er(Yz{)eg~@D0}5A*;%#R&XShed?qIy<`kyYd>FWUnTOat@{ZRxM-iKD zuY2UM)8A(KSHgq2aXlUCuYmU`z-GZ9=TvQeO4zZn#4sXmMI+0UDd{w2k^(IF(Uob_ z;aPi(rfJ=6u@ZXfngtHFkKb?hE%zT&cyPeywNr4&iY)4sRDK1wUX6NB78{^f))~ic zzj*`)rAMWq61RJ0{&VbW1c~1|*!DgIlZ_7z`J)FCC|@zOqOz&hVW$Eq99}yaIh1Gu z)q5+l9^4qW_ThR)s6(!C{ z_I6ZMrwH#Z_>Tl-ne%jqAIb*-?+NzpTe_MB9>~E9-nYY64kbf3(c$Hu`%29Ojk%^j zs?dPo`SDB5}s|_Qa-S+kkRh{?L`!+z&%R+@O ziG4mDYcrqwFB?mvYc1Hi=6GFuyVHC=;v5o&%L14%)puW*IEe-vRD=K0G^>Mf-g?6c;;e>5D6&W_c4A&3QPD&qW7_TBm1i1%qH*ZPy)nb@ayo;09~4*WDFc12I( zwHKMRCi>ShV`c2-LE0MbY+KSFPvfR4+wW|$*Q64KkO?QJi$rfBU@Yr=G!9VSX-WD+ zXgjI>3@ah+fhe97_ar3jZ3~Rkn2oKZAngU!tMh-U(C%M`kpvpM^k~nif7Xse6vP*- zP(o}v@()Bu!7KBven0qEd_%!^bPNn#%84+R5S_h&ea(Q2`rN`-6@D{;leZ8R|Z0CqizvcKHe+``WIhKWuEFq!9j^SYX8xk@V zK*mm6N&RD4xYywbh_}huHGET{d^pF;O#;C!itdFn{`SmGe_q?r(^SoVV-|FutG^%? z&2+^T`Xt!4BHsFGEVfvHvQ4AJ6?8Ks8>H{^D7+h$>PZkacj{UX7V{Zo7PT}PZ1Cp) zxTLl_>c4`qGvXkKrSi?rhW_^?s&_WYj9<*koj-B=6^0fs1HmR*(;Dc~jvdzvIBEp+ zRU!INfX)D0TzDQM3i@T>$g(s-$;eW_(28aUUDmTT3i^@=j6T{Il$EL|aLK_oley!3 z(6^7QbT;3={V4xr;tlQVOYROwuGGDOmF?5kIWH=ZWq_!*TcHUeECjoCyeCI)uCr~%? z#1H-1Bo^lOT&tz*M)!B7-M!IjsYWYiWD=vzD+$f2SW#LN?JXM;>o>uurrQGcyAceLCx`aeV5w4iQf39aZ=_QaZu0mNE z5vuXZwaSujND_1C>V%45#p+^)L_K%C9xFSHgGKw`rQ?F9*aP))tsfvz)zF`9VBq6P zXb4NXl6J^d9|Fy$G_#%R&j?%=c^n7}EUJtiFhU^J({;CGtEy}AztFT7{*gAjPS5uuzp22Fb%VwSSSZKE6KFk*@h~!D zP`QU3(>l7LnD`93d&c&W803*9Y_e)+5Rh&(Stggb7RibWZNb^aVwYIGNAw#fr!~=W ziyLkCrVZnBO&7mmlF#t_;6n3#Dn9Y}r`Wcj7A%+6?F>!0uWQ0sd7_eNg7BAx0eWi{%*^z96h6U;^|*=qyN|g%NqQ%P(8e~S z37%Z-g4|;ctpCa#5*r2E_?n@Jw_$eofcbgm*0KjeUr)vPV#PzMW#Ed@_NIEqj+dHF zm40@Sm<1O6?bVkNknsa&wJJL@qR+)LMSPy{t4NfdsVXmBk}~S_DBaqn5tbhTw!Q1w z0K)s%%ub=wvnOU-FyN&(2a9*1sL1HlqT19yE4WAMYv?^yHJ+Z(^0&=Z7CYl5a}lpI zLxM9~>LwB=vllj;1xMsx;>$Y3iy#x;u3e}NCoG%iOI_mG8r53(N+c7s+Ik$K=2T*6 zSu;$T*sUa55me)W3#lFWhW6WOA{R;iuJ~)VKYaI!2;V=0{BNy~eNRx}R1K{!O~qR2 zwagg`^xP`czERndZ%VqYhMM=v8bv>m#T5udfxFWd%Br#rZE>qDDyC=XXp$}fjF{Cl zK7`o4Szqo!>1j8kMheFz*!HV^A1;(%p_?id$2)#6Rh@l6`uM@g@IDb(8Ow3vP&E42 zGEkP}<)uCNWgNo5NSt9>)45r+_Ci+a;^x)@&)sjl*F1aP4s z-wLNA<5J`*G)>EN6?OY1nV+j{+zkhZ@a28Sj*R?v-e46oLYi)AE;e_>>!D$CW}Dxg z#Zm|T--3_@{`C2KLK!hm(pN5quYZw~3p_Na?09*J1g6Sh=AFJb_>_?TyK6vQ@%8Sj z7bj19|rJ zUg15t%Pia-wibrFE~Rh7P*R`bEjTp;JzN2;5+Z=$~t93aWwRUW}Y6=frz#B}} zy0vKYhCv+i&tv{?W5np7n52^n#yme>nhIgi$7VZ-mXbGf7j=r$0IUz;3Z3*a`dk+1 zwnc*no&>TqX;t24zwh;w6pUL@a)t-qHPo7dKRs5pvJBe*uiz!gY|2k{UqbWF>dFY) zZO(Zgj)d<&^X`2eV+#=jUCpW?_U)yx*7|c-#)@IgO*aSisanVqe4x5br2Uy7LD#$F zcwjXswl#{Y0}pxCV%-%3n5^uPEL|Z1kWn8y1di74`ZrRM-@7D7QUFxXX^^?EsljoN z4pO(+Q$(MCoyQC+!YeT~i}nE#fCh&!>>kI25FE6)PP=b`#zOWR{8L=S(I-IS*p>CT z(m6Kg)sBa?|rRmd%~; z{*m;Rz0nbXd?fDE7sbuM&DRue1%dfzKZ&`(rl^A>)yDEQ-r{CoGLIMnAFQU>!Wr33 zONKOIV;iuIq5fqski!M!=sw!0g!Pj<;1cy6ggs5cGTdnypmkuKe^MwUc$J+~Z(aJPJvzgss=S_UugyctVC8RtKC;VFB$e(iR zK>;oXKN}nHaW~;{JEsMhy+3rqo`$YF++Fk0#Kk6XUON*ogzP;d9ta9BC*C3jt4Xq( z{;R{S0;9&xS9V@ULM^eT=97-4swS`CIe~9}k(LuC>=3MSGfUfk0=I8D@7re6E5(Y@ zOn-wK;cLXr^!I8D`kx$-xj`SPgVS}UQcT?1r6jr_GnSphH>|9)EGj5Y08{3fmaRud zD;?L?J6``wH4zg-kZZ5cp>rp-ie!K#5kjZ_!PhXIj1167_5Rq$zD{<88$H@hAjECq zP}|Q>T_6e)-7~^KpQ|iUe6r7^kIckVaw+|WRYdkbTQrjje7ANkUo-wZ>Ev^EmAKqE z)E;Rs{#t1+9I|oJ_t+jVfHp<_6q7G1wQiO$x&0Dz-WV0}_Q}q+0Q=3cD4#GRxY0N$ zCbq4nj$%RN*&h8mslsIeR4O&?&^YC(b5NxP&&|BYlh>YQ=DnM}PC1*j0@@(d=k4$o zSOG>rUEAUhBinJeD^!K^VR!SDoAO@QM2kvJ3lzQ9HW{ddwnJIJeyv1KZ&ys?fBs5D z1W0p&b%Pis`hze4xGqwvsdKo1Ax}Tmgv652vpIzvBGC;cd+83>JVgZJr310IlKhj7 zN17vc&qG@&4Dmp7hR6R*c5pH<*}#~asu9!v!FM&%dNL5_ye4q=Sa^3Wx#Dz#TweBF z=}C@@F^v;!@85Dw!2&cc-w9F_Zk;C)8)lV*JcZ;lU3s5J?8>HpEe#hcX}@sDu~MWM z1D?}7*BvDYmeB=Yg@T189m!<*oSw!IPt?qr!B49ItxDoHJC`A6b<3w1g5AH#DJ=;8 zFLjOtj0f=DRq^+|=WmzxwnD75f(az;;7>*KHEyTTqEIc+AmYZjIpA3!KQ%}YW)71U z-W7@6kgN;Qg1>!A#HQX0TMOK-q#yB)+~wNwH-7T~<2%HHYl18sY~aDuILgjuEi>=y z%v9ta`k8asBhA)jh`4J?aqA-1mA+&?Gq}LFPoo3^CJ9X2`UpQRCq(^(b$IT^vQZq= zNq91ivpv{01pa?>sx^={u>x{XVFmsk9e^2 zrVV=l3Hwt$R{&oEm^wg?zasQgKCi{0pxW4qt_ogg;MD6O=X>yBQ>LZ^;*hn~^HqFi zpg%?yPv5zs86pI6zFXX{GI?YDH!K+q6#Mf z+8tfpr+SKgg*@`KNq2VY$+UE^{xupJRw`_F1qqi*lg-NxBrtRh^#cG3gZF35jWSE7 zMECV157QHJmNME-I;k%a>ExOf!>0$8CW!1mC4C0khpucBN)q2b<0$8N*8!;Dbew!G z4)UPrES!H21|FE6%LPKp&5M^L>21XCt%U%+S`4mzvq0R)gt!9EWqjik$IR-zlGN?0=lj;L+!HvJ%s;AF=* zQ(?(6kUI z9>>Ii)3aY)>Df+^Lx2^oyb*T-;bkA|DTvRnaR439tyuHIVwh(`wx{zq^>!g3F!V2F=3)?WqSE&kwiJ z?^@!H{~W9g0Uu~s_mlgHhiFe)#Be#h=uW5GA0lY_K3zlkj1(braQcPw;t*#Z$i`B4`Br{B;MzEB!W4+|n{DlAD#Q646+- zRl_9)q`9DEuMc3bl6q%HB>QI-O@NUggLGu&GrnH+34sNgyKAl5qgDqq-thp*1&TUxS4HvpU7{K<80|hm4K;ai6MC z1%lB#GX}9 z;t6)Db?&f#|0;j4puOHHf{5LeH7=ku92BtbcT7mAv}_@TH47+v%HcZPWXGa98>jT$ zU#*Ye`8;G`0T5V9>HeNEUm?~GSMODh;cGrT0=4klv(G_ zX>W6bwFh?ax&1%3-U6tqsCypp8 zQMR85?H`h;(;}GYdlQIfP+E{j!(Z=@wvY?Q3qozmctp;d*e7P)dYQe+@mc`KN;^(L)Rwy zaq;Ur!wGs(>hCXs{FAxlzJJ!$U%EWTl(kDQ;2UQZn!uxyKxR=4j3n2>xK${`Vf@c z)4N}C=gvZ6Fhr2bDx@9Kn9w)jFLnCsW_QOQn^yVAs2l#GMzQY%X*uWM@kj1*!~LZJ5R4C3=+8s~yV zLZFM`kMd;`u`X0p$?@Nv3I>To5sH%<@wtLASA@OZc7g=3$ho1SaZkS_v zcgm+%J1I&4QWxUlk9#Dsnkcs;IhrhNtmAN%d>$c||6D8>91;w-VvmLweDYG!#v^Ji zF!vlW3tf$2)ItXBuu=Bs$xim`)N3pvN0jZs1*g%r2hyfn(Iw?6e4_|XtKq~Y(ZFX& zLf5xAY?_MAGM7Vk6^f=KkSrT|V)FE5cGJ#%(wQj2L$|s$n-DxxAK_RKpQ#uZd~8;! z9c)+;ayS0-y`yJu1w8iw11re2C;uicxWJ(m`4Dfa&?wU#6wd*n4Z$D|ri0yr;kwkbhqP2)JKd2A z*WAMzC>i@ac83v#2%U)IsP{Z?`oWCK4nl5J+%2YkmuVdMrz2KiAeD`-Igk_iCx1ww z)#?;kORi5l09qMR7uys$=2{k-bN+M z>gVx;XIV7O=Xl;P-StSRSW7l+8JB9mr08LuzH9eqX}vf{NWqaj_C4xay8S{J&q`hS zy-8tc`fpZIjk<=%?iPZ(b@!i_cqIZ-u_(~}N+}M?#KGP0wyr6NGWM2TaPTBukPSbE z)lS7%>y3JLcJQ5iC53f#2R*-sMCZAZ%lPbK>GB!;%{Wa)q`<{EO0Oj{Aspk2wcsh` z$A_zdNl#-%zBb}+5>M>#a8`&FP_iI~BJ-!kB5|2;8Oybq$16~xG)sOJpbk!1dwW@| zLsr4Q?7QCQJb~ScxpB+OY$SiV=$#aPQdEn@=L45PN$Pl581i)CFbD^b)xkpjQNCEFyHsRgQ*`R z%U*J|FW}18;ghv&tOuaQG@!_n0S~5X`+YG3jrAq3l0whDg54L_mZ}D6DlR$T^{|Y=+6M84hE=Hm7iAz z5{{ijSH73*@#M~?(b;+gAP3p$^wo~C&`iEiP&~rdqP{C7JgaAWVD`TcFWZ5jD>JB84bwbdSmO^UI|5jx za#I*NKVnYt{~XDj{~=xSia8bvb&CN6J<>_~v~2Ngc{%p1jAg4Tn#q|j?-J`wnnhaa zLdSr;){rjtCePE7J`WZLKI9(u$c&#XlRo;W!#SMTiSBZ#4;W)`|S}WM;BSIpXblG63@$J3IA0^!hlYf4HKN=fNkgOY6}Wg zo+q_E`o+7VsMnq+u-*EDUK`l?I{u3)hnr<%oQ4DbIauLrJ~or1s;DBq=%p(T+35a2Gbpi**=^J zO#Ie1pJtZgDth5BJ<0;zNVq|hey>@jq8t9B1@HxzTA1)*%%e{FmM)Q9mw`_S6^e*y zeC&YK=VomIM1td!0en^BaC(7Bh-5Nn*1+5+a=Gr1Z>+7a*>*RQB&c4#39Qr zD}ospJQkClw}+kaSAV33BNLHsSC>Uw8vWW!Z$Qw+-$ibumw2c%s%}ii9wcRFVeN(f z>yp0izHI=(ld;w zT?GdaHzHT5j#Tt@RX;K^h#pZFhf_oM+Zc%W`S8`&45uT7a1g<&;QTVpu`1x^QqVFk@lBfT*5Xe4cFDDCEdw|cdwHk@lb z?;WJ29HR@5&{Qp0)zgg#Zg0@a(6hWq?$`t70JxxhEgaz%vy-wKslh|%@Zl&Y~ zXgo)#YA6vYWO_bl^+xwK$B*nK1Z#(82i(}k2LcWa! zc-&6vv+%|LOrpy0bvEma+GECujW22O@(V@2B}{l2wKNAIyPl1FJBX1?(w0*z`AN>g z{(5grL=bzDr3vDV0ZjZ291$D6*V)Qrn%+70^d!6U$Dh4X&(XFdGtFfwr!Ul?Y4ocw zFe38FrtP`EzFfvuZ{h0}#1*$fZHo*^q$-^tzJ&j+(+ zZb2mWK9pD(=sdfKNYvZxsTiKD#PECj#>+Gj{su^$yWQy{{*YI-YcA&Xg|j30SSll8 zYm?#s7QJqC_A5NR9U~*DJ)SZUF+z@2&Y)O)+(@1d6YdrKy!1x4-L&2-=`qpB)qY(p17U(_HLHjAiE2k%V#O-vJoNe0U1@cN;zRs@8@L zg3CH8NA@YorV~}RfByb9Vr;xg{5OeAMH50!PpR0AUt@WSzZc2U|5Y5G!4a|0n>xVA z=NMb(i_Sj2t0_1a{uRC5CyLtKBii}?X!$FREiMG@6r+p+F`g=x6sxgm@nMTI%X_Z$ zK94SGvC(K+2c^x7jf;WPGZRk`8$%3M*HmONQ($!VQ2 z3w^`8C1Oohm8Xz-QHinu9=@vVm|D$VDEhhwvtNS{63??3dX@r zkFT|t$pz%_@naq6jth*q^8aHnbo>FmGEh4z$wTX~aX5Z;!}d2Y>*noX}&b$U10@N`qv+_d2K+_DYsQaPECt$LoQau^f|omL~sWkKH#9`4)8k zf4kZt- z?TCu9D_qaHr8IWM*InLUUQ6K*e!o#bw6@15AZ1KSPSZ-u!2TL>U_3_KFz+Y0?uM`B zwtktscSd3RH@YF^ z8f6uFK#XUKd4BY?CUoeA{@n~?U5`A;#oZ}hqnLOhDcg83J zj&wi)1lIF^*?od2b>H4>a%;0H-Q2=t?zrhG{a5^?&cydiri@2<-^3=u(%a?Y-8|<_ z%*0{}YWG(oN_V=>*-MwBhSR@JCCLQfx}uTq@7=2$YM{V7WnvrXm5v&v?RPuj0s9>E zg*E@n(h7#SXFSrCZ7J_+T8p%#s%2`MtnV4G=qll(lz>s!yy%GWQ~i#QD@EcW_RwBF zRCYXapZC>A_Q3=rN7I|SoQz@|9kt?XNg)XnZ>evy0LInd<0!t11@Eetl%mE2Rg@&; zQ>RwvcK{18fKmeFwgPw`?{|)Rq#tl3-2}t*4bpzRdGWhMy~E#!ZF}ZchHs5qG`NO8 z)#yPAwW0Aacz2T%&v`Zzad{gW-HwNe{X|CIcuCUFfni6m*tp?yUT$pbGxIp8$8t6_ zjWKzy*1n9EOyTh$!3Pb>xoFiA@tV`WBbsf+ZYKur?Kv%+;Ayp(z7OwZ8)J%4C04l| z*0i(3C$pdAxoTlDX?L!srhRNjZfkn=^*sfKum4Wh0UsaF*9RkA`d~P+l1{^Adr!}i zCcMqp>RB`RELwk>(ZqwU(2vmVa^TguaSJO?)J^$PH*l5B!`X+{>m)%}hoPGEooRRB zF2w`6LzSBtk3+7d*%)!bxw+2JF&WBHzJ-T36Mg;n_LyLAZdu>I3-xO-LB@*qJgDV% zYWB;^{qwTIcf-N8VSW%>bbbbV{iuyUBV@JAVxm(vgc;D_D|)}f)VtH#8fJ@?4?-%l zi-fP=;|(Xf`7WmO9?|Ugn+i7nSu$03^kRD*1OgKt&xG;Pu=O z8^DPRK2Wb%;qB@uJ`E23_$Jg4^^*C$cRRaP_HJ9C(qmiV_ehxdiV*45#3~gs{nOsR zQ@?|vzG!NVp+k-_bAbATiyLXcTsB=bEmqupNm1IYjpw~NNwV9^v@q&wkX$Q1w^9{h zx7Sbq0#eU%=7&8N?ufqzstFFTIzv`e9aRFYlI^$dOj)+AN_)@NDh|qumf9aHbJDNi z*cWzUsyn2RkoN{ryujluPddv^P-}%uly$%grLy$B2 z*ftL-9!eO8=RqvOK1B#T*=vaF00%`(0K0E^jInpD_^#dD+R(-9kxkj(N7fksCMG(% z0h!m6xgBkIaO@ zr|xu?r7G$u^#*&-FNP>TTmK{=c#h6t=Sum7Jg9~AehG#5)#-{{L)G_Xt08!o^2il| z?;282%5k3(^u3W9vuL`E;#kbL)^0nSUQgoS*G@#(I#JCnVb{(M-XWXn0E2XVsYX4F zN`pFf3E(&LEAI?HVfb0}7;#p9*L`eCN4gnufOr3WPU9{;XO{75z9Pu`J9G9bPGhr( zyL$V>;O;b@w~xJ!IBdJ-bOlL)L#>_FAYD2thg0H`najFYC=aKwTt!@*Ui)`(vB$U$<;5TZ=g}BKIoN(8fBfoz36~MkZuCkI8eAw z+`hnPEtey>7Ly=nf?@V^@I;^VUCu^&#-$N*d8qQ)x;PHbQeTq!K_~6%% z*Wa%7lFEe3Uf+ZIh66O6azl9CjOd1YdVgeMk=bz^0hI|zWY&dfZZYdO>8>a7u+~ZKQ|MOIWO+UeaL9^odCIBL-OgtOFUOLd+GH#I zT@8@1&3R){4n>BBO9fJvb^%t5{G~~)L2VGUx4@YR*4VIp>LD+^ZEizM=wcwaX1u70 zEz^9BWyY_3kWD+aUlshlj1}F=yR&UeiD_S8O2RDK^xM|t^-;pg_O~ql^^1UEmQ!N=Gv7uXZv33o;S~^wyE!=JCsM2QF^30%i>CO>_mwZu`G{^u0 z1=+yb+P=R|^SYS>rQ9B6_*Js9Eu~iaODQd=#h}tACItucE*qE%Rh8t%_2RApjKFIDK+14(4jKY11V7q(&2E;Gvha8Cz zsK0t7JjcJ~X1DHJZaLJvt5NhV<#C1fBtDcprk16sLZ7k&g$p0^XjbC7CF+Y4&&X<{ z9c-wlBcY;T=dL>u7G9!8ju=4kMN1f@xlpAF6x~#h>4*+ z7t7gsJlto8<)=SBk(Dp$lUci(>P8Km6VoE+i#X^OvMWXqZAkA6n80!f^WmB?dph)_ z^9lsa)iEYyNNz;a1{DWZkB}54?_I_GHl&+B4A1RAigj@cDX>N*384FP8Ijfy2_9mr zd=+YQjEP$b6I|dP@HIeS+dHqo^ zx%DQ^+usGs@L&;85SwIfQcRv-c>kU>i>t*ocAWddh3CjRTIf)vhsu3meGnIu02iY# z&E=45?}vnouToMOifuMIPx9F6f-U~x{K3A01mJJdwzM+WkZ4=7S3BUGKcxt-Vjyq+ za5wK{KCSt~9x|McH*i&y=riTTRjWoXVzL3|rBBErBs|=5O?$aYHt}Q0Qsu2S7 z$nsqVw#YOX4vRj@bYLgx^&@&WRmsGCF4WHhQRmCVBz27OOBd4(sQWYkna`Mb@?{Yk zNjpZ_T6$>+;-~O98MH}d(e~eo-PDrLiDXX1PhU=PqS3V5dAU+jfVeY!zS}N7w(U~tU-OUc z^#JJ(d9;cof&w8*AXnG(Z-r89k%d_CDjg*OQ{&d9B~|%9oz8bz1^24FqMO#}8P8B{ zSGdND8cU`45~b6b4~8r>&V`IicS{6T+{ttyW$86BX*aP@4g1Zm2N6DbIaRtXMv11F zx5=gYz{9IB{u8t=Lmqy(6#)S*!D53T1tA|@&3uAc?iVQ0*8aq^wHCwGuR&36?Ow>p z=Dn@H&Jt*{p65evt8G>L4!$)G+7C;=4I-O$xiVG}RJH9Ku%BgT94nSdhSnPe#mBz+#LVZNO|9r=UwnKlWXMa>OZ)c&DALsyuHzMNbN~R+l9j zobhBv7GHQN!A@&%xs7bI%6vF4YNom*@C_;vluY#y zyJ`7JhxH#W%*eO}cQ&}Va@!-YU0(bLy%3}11c(=jpGNSip^c-7AV$~P(~iV2iADR9 z5~bdzK1@72EGcK*^D^7Wcf9&tol#^Lm|Ym&1LfkBmWWhLQr?<1oU`hm(?kY6k9#FC zn@{N@Le`OJDFc1^_2HxPuwX@bwPt{Yv`GF$^%eTXG0M2SkB^ z&rQKxnJxl}22lLIDbFMO(lRGv2qJbJi7ME`t@aGDR_CLlziQah1YjqgbtSQfi|Cjl zE!A=cN;uByP4qyFcHgO)l*-&ZUjch-W)bN@&G&M&OTq#tvdjT8gZpbxocI5oqDVw@ zD;rVxmNJ{v?zLP#%9K|T3+F#9e!o}v6}?N3LS4qt0rZWQd1f!oh*|k&U9PUYm4YXv zIGsC6wY75beD-+JB)ct0(x&jbe|mmNrBO(S?vRLree##v)NJ>qct0BMvlAR3QVjH6 z#=316W~KiTod4@pV5^#l^j(-lX5UKY9EeuRWf8JoKnTnLaHkA!RyiCX|1yQkHfz<3 z;(4*wQG5~l6A&nSp?;X!2RHtFVJgr&#yIoQbSB`J=U!jTjvO|M%&czE!~@cxe|25B zHSVBJ*4=D}{?yreX3y^S=fh{GX)Xt(LD<&|b{W(7m3CR}Ata~u&ll6zkE76sLJXOi?u2ebrRDk?~S zoD>=4A$^bzZSx1tpQuo1fEJFlEnxyc^S=bXd3I>rKnDQfG;ALQ7Q?XZj6kcgc8%OOLlQw?p zJ@LAq+C7PA&f*kJtaR3Q)7YA#qe7Jo`^D+@U69}rRG#OO@2`g!HT5pk=(f)-8yt%8 z=Oc}a3cZLpBU6ntm5z9j_2JPw={7g!G-xX~CTR((`exRmF(H?-)!pgQ_=>z3>qPhCe(g2gwPX$H)fkTawzT+Jfwl!1|^Q6a^V;1T@>ma691HrFHvg!Dll|Aa*Mx4na z1S$dKyj-oyOLwyTM~@Q=gUU)-?$(M{hG|y&zj5()bdGhrIEgkMiS)FjX5(1<0Eaf{ zoL_nYy`L_-qmBu`L}g*V>Rv>!ydd$ghnxG479d=Z!d^b~yxgM8NYT3cfqXwytIc<9 zcMVGO2ZL_`F3)P4RXh>Y+bduKaOYKTrzK`RTboMQ3qa9UP;1XCfo=Hi+fSW9emWHa zpYw0__x;3l>54ui)bx6~EzCP_J7Mx^D!+U))ZX6xB-`nEEzRCbEcEhaei){JhD4>h zoVApyih3n=-xHJx11n95Li!ZAh*1*5<@>Y0jtd6*`CpPYG}>~y@JYy*+c*OZsO^!T zo}<3L31dK$TPn-tTjA*X%SN|NBIRv(-ExPaI&u^lpE$L+Jrgk{zI=|y#D_Waw;SVx zF1~}BKbS3}F{L&?*~ngubgIBt7#J59PJ4<7ZsSU;Ir}@HW%b#p{prS~vi?HJ`+{pd z43KZ8@Y4suhS@)DO+L=V=>E@E2#;Y1(fvV1RP=EJ8^t0s!9068`)P#2zINlRz%f=QK55*X(39E{tp&Suq za*D-g=_`tgYIJX#F{Y$lLAlP>O4)HWdyhF(EUiZysBvV0g(jQ=@e8o;u)sS(&Dp$` z(7Ij#oFyzwuWLQ6z80?KAB4r|Zbe_QFhaIE)(S0d!P{CR&)A{C;^}1ZH*3rG{w6~e z0c)1K6y#xaw?wLf=bR$VO-g7dh9q`PgGxjG1X`9x`lc!G^|;htaOF#pewrh3{kJY+ zK=lP?1q7K5XNI8Q@)oe(L6Vs%+kR%~9b9_)w;dum2-Qpy_&O4H!?^1(PC8R&HOj^RA4PB&jo?QN1nc+sUWk`88t~T*1 zLMAjyfnKBG2_oo#Y?VvsRuSv?2u>rH;}czwa2ed_aT0I4cBltHP07$Kt!ZpzV zVyl+&cm7Zlwb8P7@ihCAHAv*;&C=TZlhY;fm4XQ=rnV^yO{G$3pBt^DY{JrHJu0;H)IZI-%j9GY72a``%xq!>agun_-GV zSa|r`YT?T2toi#tUx*;@9ejCB9`Vhd=s=6=ybeg9(N=5+vK`%AC%k(0WJ6tI2y7gJ zft35XDP|xgwbFVQ!IK@K)3tPDp_7P+(Q#}wc?jK##Hn{yde=v(!sF9az z5+Qk%GITcM5#Av`*RW{xRBHg_13LEpGzCm)d>Dzj%QR#W!s-10izi_S>`Bkh);v~- z>hSeXOKzwHFggiD{gj~myO8?B+2jNdPZ$5b*UD$SgFOHz%ncIy1{nl;LqKaU` zUZ=C9)AyqS^2sEBAp50B&7Rid-KUQ@tkvy@4gIavH_so9^zXiu9U=lR<@p2UYS)&9 z-fMrm1UwhBf*b`X8s1?iOw(H*P@vM297xm z6W(*DXY-x*McfCHdrK;8MyIZP*Jj2(MHe{fuvG1|?w~Oj>~@fl`Pk~5o)I2<$jSBG z?z|ySE_f(RierajGVrwIv-Vp~L-VJy(F+I{Ts{D1n;c>)<5A#H^%SyUuLNOUSvwd* z!l@+W{2eFN87d#3FcXM}gtj#zmxmLjg*kkXS^Gld;94v0E^q$z9j9CkOz#SZ3Moyc zg?^ar-<{T+s|BIOZ}75a+0?Ws7SaZ4lp?5ictGrODQfkjK;h-B>8NuICHrT`By7YK^SVUoaB|E``ja4ys_k`5XhrRT1>l%2{=BfIai z8HlV?%2B>Z*+`}Jnv0~$VPoq{w2ajIMBMdAX)ory$?S8}|Eq5~OzUE9t*wqh9~zTF zWg75sLN?oG!RZe)b}@E#aNfp`og6-f5LYa54xQS}M~165L`4yyqon2fW5?q|>sgTC z2LT?wYtr(n&A@+QzsAvqd#NUVf9EGdM=4iXCJzbQkH5T5xkI&Y-A^RYmyQFM#0`*! z=|vLW`Gni_x`}>yy#5jKlN|dWAvz!N3?t&vL>JUVo^uHHO8AwuOlfhQSR`!=DUD+^!rXvY)#y3 zWuv)ZqU2J>DHV}-+}Rl}Bk9LLID`d2x=r3|me^|&Rbdv>F6;liV=W>dJw1J{Z`(1U zuzOGLn*iVo9M|!l`7fD@!eC%e$qNGg1K9q2YYpQeP+95AC-=GCvD!8@!4tk4_2Ak^ z@M%IFesVUMRBG(1k8I7KC2nEa7kUw1CkjlsZG6?j|I2}%V5fSRG&A|q4!IOi+ee2m zb7D^xH)LGc6HKTT+W`@500!ZW4O+kpLjssWLjF00vlbxC4 zL`gKMdj2~ z6R+g-lFYj0D}EpA+h;XtEoTo=D4dqQ8zHnbo6`Wn@2+|BYX4*N=XhJ!-koz@^ugk4 zziZd3Tiyc7S5GUpE(6Xgb75qViybB?o7GL(>1o}|lzqjI#L`!H*CMvQG^wRRO`k@X z6ToswC;_s2-=dIA(^V*PpN)445A?1S(ZW_cgW(N4qaKI#x;j|DCrhg6BwQB|9_i4JWz@>^P#lO2@eJ=G9 zDr!;On?kIQpRat3VtY4^`1f_|4$9dVSspBpy)OjuL%8f$(d$qEfpV1+HbJ&md&Q%) zXJz;3^KuXFDd;hdOZd%)zd$9eaB{n61U%I(Hu%!3(y}_VI^<60C3Z4AuxI+vzh_Md zGK9R{#X_VdMda1+e)x<3p(Ow0PCsUqP1HO7p>z%2EC^eWmCFuWu6gL*0Oln+*Gq~g zjUre~b{uCFH8GU!Y~Sf|^RCxtMS=I1XA|`q*2jelz!#|XC%w(t{GFv1BF3!+Yl&!z zb*L%|BYyP5C%+A^E?xh--C29O6ZrS?GLcV`!7wml?d8 zLq?u$YyQp&NJ#=*(FgWOONVFp^v>(I@1?vne}Bqhcf;Hvk=;U!G^J0+ub8^@x!2S_luWB8cwRabNtat*Cka8l*Itn?k)o%xM-D4IIlj z?BidZmQ0q9cskV7dJ3O)53PlaqcRk#0CGCOaJpsdEQID+WL; z&km$};0jpiT!=}5T4)3;o4vLZir+sA)A3@|W-DQCuz@=UFeau#5V5X)s9YV-k7<@J0zA?Z8jgVqVPg^;wM*-*%WKBNlBI|xo}bYg3em9M}7brZVtH9 zhhpXlD5RtWo`s1G)p-CbBH`lg!k2nU68KvEJ@|aZE_Wv3J7-45_V61YshZ?L{5;ln zA2vQcu~hyiEiHV?Td4<*@zEnv>^ERUgh4mcqS#Dh;rZhDg$kpJ_$xURxHbH%%}-Hk zLG8Tx>Z7BjwGJEEjY7juK(-hqJ`linOQ3l`hZzRyZsQ*VBoOgMNoBJ&>3bc21%$rh zxzKU)pk(*~LsWq1G^K$Xvz9Dgn$w8W^u!~x>9$c0jn&mP-l11SkruJ* zu^f0bpvq_RF7x#1B-FTkg0lxX-)3vueWdJg7Mla$e^B4-e4dHA+aL0r%-0^B;$|y1 z9i(_!V1SZoxxl;o^eA&yAQXRfIcH`GnPSY3U}hn^0TeBLW4Xv@!ha<;otx2f(zc5c zgxDdxg~*(sN}Otj#OUS{Z#TC|Iq!F~#QqZ5Lr0UvqIX$oA(3;P{<=S+@`egjVwlHJ zS39Fm`{kaq8ay*I*3|q~`;0c+y`fufUJTkb2Urgx1o?Fib%p{dh5(e4_y)Acm!O$X zwxzxCp*Vo^STdAD&|oEpL-^CX8Ea8ARTVp*v>|lNebeW4u_KttcpMdtr8BmDA;IbIMAElH2oeR!zPM zQWw2+>o+Fr*12>~cQ1mVEx7VeIKPdO>HfF`w-0&_t?^aQsmMwqJffhAqD=p<>EcwY6W{2*wcy``;He#|R84yCXOeZG=nP!ivr)lAzWw!@MEiENRm=m&h zY-oc|7;FavV2gax*!vI*5evsOWxLB9%)ad570ag~*Gn3fQ8RU5!u!^<1t5)FaHWn~oCY9vd9k|WStif1V6ZPjaKXI2>jbh|QAufuDt^cFr> zEz;4mgZw+kJ9X~K<|~VZ^sxS8oZuW_QUh?Ij1X8r@`=uNoiHwI*9Du-w?r(oS8we# zV${rq7NM@tPv<}R$WzQHmdKlbO5Db1SFNZ8e=@(IXBfTw&4FG+6bQ#0`TOt!;9y!! z5k2nf=-5_&SSqTSjx`FwUY<-&IE*8s*mz3MKoF^aISe z0&R74k~jC;L?=@E?CC&!i;}simYsqZvIcLs-^C&`(YHWYhIx<>#iJWN%SCEeR5Mbg zW*7Fz%P$r`L=d}``j)iVA&Ds9%jSTmnAU;RpAyy?E9NPB}QN{Bm=c1hSua^Q*qK7grr+c zmjIofFLLEyi?x1ZRqQixH|V3vkm9@Ikp2<#6N_Yf{#|Sm z3q;2So3ya!p72*~hzgj?!|{o?C-H!$NwQ^8+FjOa;wPXd>=ziruZaNKq_ONTh0e?(Tth$gTeIRI)$c)LU) zXBN(U;wr$Y-hxG#E*a|D|4t_2$o;H`f{b3f1E1jS zw>JDTT8xqP{mJI$RRo*gONo%;wCWIf4q2cGk$^%c0%ysRD_hb09$>zI5Jjo}glF+&Xq>c_q!PlYI{Pnk7Tu`{!(g_dcr0!0Oq5r-UjEElREirTa8}cctO$m}NXJhKG0a*B!*Y1V@%kW~CYg8|GS1$l9D zZsBX?o4Ky~vc#zO3IC~%NGZS+A|$<+;D?xJpBfyb$AY{1FzfmrvcBvL; zON_%L+V9r{6fjBCiZc9T^s^4wxLoIG#d(zL;;*vhY%hM%BDDT~W*#5=hMWNfW!41t z5uj3k0pQ2?+OPWz$ssft;QAzKI-JSxe(h@QHtuXeN#B=w_{_KU8qqzx33rdHb-k5=UWl{P!H4s+j~0QM zp8b{yk%tY77}T5qrOd$u4Zfs=gXNcbrOApi5h5p-hYH#K4PdazpH>%xKUqaAF(>A% z8ce5(Q#gGCGgNS2H?dB%Y%ri#@kIy(F9G+Ifd!?|si_~};MUAX z$dP8BNDKA#pN?3}Fzn+x12xD%I@S~5&Z4RPG|_G9IPg^1 zT&fq3s9pfL?Qs}b7#b%WZ`A(Xc9jrTKUZ*im6;?s*YqQdIuvgOc=3A;1M6Cw9)0z` zIeWzjdrPgwYJ!_Ygk-wt^X9R6@g=Vw)ZW%Czs+!say|lRLM02X4lFTtP_G8r{DYL3 zDted)ZJ&Z0%QFYm`}Hec<|JU^rrE4QV**1ZFhS8y;*UTmYwg&r$#5>ESEp5Jr_NE2V-YI>tUO{l|}$Lc#z4u6Sw8f zg=R-r$c{L&$cv}uAmGhN0nK60hzzv3%rgdAR#b6uIr9c2$o@H?9ThDnmIFK%El_~(epY(mi_q0bYZ1VRpZ$Xo-p&{Z-r4PvsBw96+fLn^%L=iJ5--~jboTV*@lU!R zw>;Kmx`l#NUL`b*n+1My7br2&FWB?}Sa3UB_OVaoGsqdF0y0lJbU&T;tj;`u+5guL z!3L|ZHUM`U(R^9<>+^iP5DF__?C;T@#!1dyG~||&%it!uQ)B~nX(#*FMv~70kG-Pf z(Yv1N>A+5qlZI>A20rBwk;0Rc(aU)4n&Rt<76(Ta{7)ML2G$eQ0CWz|j|H?mq=S8b zwPAKjzf;9OkG0!JXy*9N9+aHMH2XoE{GUw#X7L#Z)_AZtU1I8kuU}{`rOuCHXmN;0 zt|K|8%<>NzQ9;qNk7`zRB6#XvlOZa+W3~s5n^`a-eBC4n<@5ZNX4K1{P^Sjjafw7A z^SqI2Bu~fel9Z%^YjP!wK`v*g3>7OKWgFsy&h3)a|gXY&b8ANj}QjljIeg02c2XdQ^m!QFPxJ|dyv;7l(z=Szp zW=8;4220-n>YEkm}|E9em2<(0A zB(l|*QSrwx9;imGEd=pH1HlxKh7k||YPP$Uq-*Pr2nht_eF7+9lOFmi-vxLscSa+D zIM21a_<6*CfCj)Y5G4TGnYd%wo|;8|qZKRD7Gy1P{THDiUn;hohXY&y^SalVVD9}1 z4_cP4Y8mxNK|CN-H#JuNGVc{c`yP5`0Y4d3zyyD*oD(dHH=8!yn(OusB-Ko1k-Z^* zuxbX^{%&G3=y6f6XG$)i;zD z!@qxJb6R`7=#z6Ou12Zl3=wRF#Z&_+AdmMJnvfEEUYe_)N;~+drF~FsR>{(ID={hQ zqF*2T7tBL4Gg|D**GCy!Hmy!=JUNPFzd~}wBLC?=C3=GH(9xQ!i!B;79Us@m z*|E+3BxW#N*fWSY?bBtW($NCO;QgRxv45a{x?zBjgFf@R`||&itf-AaIlG8y|Lp%Y zM5^(Fd+$B4-qM<9>j^mpEh|^ZOVzJ<*O%3xN?pK?;?dC^2fhPPTxLbCt)-%Ixe9X8 z>sD4Qg+YLBTj=ru`g+W(Y$k0&W!KJv(>&$ge*6EojKrAq0GEi*n)m#>Wl4;O>1Sti z?^%6Lb5^u1XnPC$1cfXa;uQZa2BLCqz=wGIRkFVPIv$qHa2|1M%YV(dz?c)WK~koe zwr6YovrF}x`t`Sx=LDO}?6v1uU)%OQ4eGg4zY%`R$4KwB88@^$VQyU!KbwZgdbBge2x^Ax&-;~so1yW(@zwBx(!`Q*!hNfm+1=U*jDlXckVXC!!o zb$kx2&c7>Y6{N?GsZiY^E-ntcbQ;$in^bw-IS_sw^oFKsgZ3n?mxreigg_zmh;bIg z7I^*@V-&CLl)2zwLA6~RfsOgge)}`3AR@AVU{^3sLt$a2h~rM)fw46%s3=tMzsu@K zWlD!wIlS~Oc7vau{>a0w7&C=UU!kn``HuNtECJ!hcpW*m`4QX?M%Ugqn_If-r)?&q zZig-My*iJy7DV5WRNC&JXXOFX3E!Jb40f;5h21t|J&ga81HekJ-r%8p$OW@avoee8 zN%^rseCX3f2?xhaxN1Jlx+=8ghZ|cr`hV*h?5rs(j}Z`_ummyuIj*1gCflzrmKdL zkWYlT=EtNeTHJKv3VzRu*zykluV8_p{swPq6!(5N>vunB?d+TX&kqs(s1CvMl6q?Y zzskiY5NgsOs>Rc|3>C#liC@87(K+wWz~KCcLxWj~X$lM0Ml@wuZ|6#_rN6oz=ytAx zN(J0+n5^WBI*d*c{F4cYc=TuUXaA&FU;bM5a>ecC{eJtwXHlow66CKBak42kaDOpl z>2}`pPSt1ryczMm+Q-x#YAw$mxupnXz?#5Vi`z^(kIZ!@O$6Vxa8-$9! zL0+=M0pum_#DfP+0yT5w=WJ|3LU=2J$T3T;f`YdxA5s-NzhEZ2yI5zQAfrg6e7BjU zjqxX_l!tpNYeGZ8{~l%yQDWr-*Usm`6@RS6c}Fm@Lsj&(vLLHff&5#5 zL@JQk80%_D#00zZu&j7E_$Sx@Oya+`(J=%7!_KD@y^H_g+W-C!P|8mSTO)M8iZb;8 z99S6b?tfq9f4_)DW3IvnR!Rr{f8Bj$SXA2^uOJ}Z(lJO&r!aJa@D5<^OdNQW?V-SMdBod3~J_j&G@JJ0^GpS@>htvA-Ye(Sg1{q6>unTb$% zEkM4Xi3;~8=ahe@1m9=vuVDgpWr823+Szt1dw1gH$+ z6+`GDh%9)&7PSA?b8s}AIapeWh%fp@DE|uwM2pkuBOjCXR5$E zu+n*l44M5l#PqM{O#Wk&b|mTGI}!*God5aZuP+6{Ii@r99}nyOk80p$c^?PYqQeb! z>T{?1Z%_Y6$KX5gpCT=_!wdQUTbRGj8@`u<+Y7vTE%Jw(NbpKxfIIit|87oFrQYez z9R>^qX$h^rFN(L%fA^Bpqu!n0y+j-k3~vBI!1Z6a^7sOVS_r z5x(z#w1adOv7eb+;Bf2jA zRgCQ);r{aYxySG25tnQ{%1TEpte-B^0 ztlV~tdp^H6Cl9DW8&Sudxu`z*y*auincz4?4vION-m=c^7$5hD1A!l@|)nZ&js`So#c?0s2Oc zq8dCMQ2AA(>(Uyg(O75esCr0*68T*upr_&uEO9E#aiqP910#hz%en?B*qI*P%Z&du z&-b3UH{hY>F55X+@S@uQP^b zOdrv2UA(Nk4gjJFkGiG(p}!f(U;q&PZTBl0Bs7o27iIyGUxkisD^nA`_RFkC612Hu z1vXlXI!`=}gN|w2_MIthSlNYqB{0dkZ#E5EV4!P+3L}S62PR}=VgBwL1c~i6c4G{d zjf_g!y(1K-5~g-?=h-N%%nQNv@4tM!MLt`NSZkObkLT5lp{<(7X7QXpI6B`7RCLDn zT#vUHVQ*eBh>EnA}K`xmZvYu5RuIXMNM9rQ)sag$A_q*XCTq;Tc~b?4WOdmrMz z7Uo+G0D@{@WoWg>^xT!FMeALY9BP|IP5_|rqYR{)*+YvyKEt<2;7}^{7psY0OOBxA zKX{Hj000K4h@ig4QBC}Pbl*W!zJs{ReQb(2o+d;0duTwwKadKhMZXeXrulD_g2ydX z?eBrA_^|ZRA4EQ*$-1P*zoJ;D!5^VBZT7i)c|zSYTCJ&hN3NCK%W3n{|ESm4zWUg*x zmgm&1(IoPuuN_pei!~P|)z?VI>MmL*AHFMIqqHsQ-V3@$G)ccj`81DWNk(wJzHi~e zy^msGfVt34Eh-o~EyFpf;=BR_z zP5I}P;j;$*Btj0s^gT$_xz|j$>l!sZsirW-@MNRoH^m7KTZrZS-hihnKx>(c?VtpQ z_d9p6-!{=r8Q;sHUr^>VUOK!WaocDy)l=V{LI0gL>@Pe%Le?F&Um*hp@tVe$kG&C_fjVqZfJRo7_1ubh88t?5U~|HN&l znq$;4JvQe9pZu1K)MntCz{qE<=qjjDVbhV?$tzl_jzcz4!BG^4w$GUD_8`Hny=P`p%d%g(U^(_dg8GhRWWveIn)WQiQT@Ylx%ZFMG^+5JPxyCI z*1P)Pk&c^G6v~WslRG7p(CAFdZzY#|1=ZRhg-hj1e*T>NN4fG!! zPa!f^-3V8i)s!R$AS4zY^0*zjY7pvOoo!bJDubKd8P|7OSw!d~tM0>@lC$Y##plP2DA;#uXXItZiwWVVruCvo=GL_8j3xrcpA~7X7MTH(Dz?a2&9@6$q27b-Y28YA z9CaA^qv}7Qco0Ejl+;@|^&wqpfW@e6 z@{r;(UPMT-cy-|OgYk~{nc`BjM05bgNB20^Eom6LpMMRsn}bv%xIy}DZA+xbAYY%x zKI?~&vOz=ATAlB@Ypk0aeREe|quDzZ`xGFA;?&K>1>k7^rWgH!-+_J7dN^TSI)26$ zrV%pZw|p?aY2SEn&@k%hm?5j56)iWNq;3$0{d+OV99~glmCR^ugD&uMW71>#_Fbt% z$LJoVF|U!!xfn&-rD!6`nxOINQnOR+H`I6ZkhjegGa{}M`P?6SWvM^^N;%kYc{kx( zF+K;{z~d-)KY0j7&CZ1#4T${hxc{(@fyCzN3`rS3+yI}`$Dy%>bK<8?g8bz44$6la zFCs#CDKDC+bBa!cOfQDj-tyqQo102tNk54_Ry1VU!$D-W>#t9rzYO^3O(4WFi(3?s ziy}|mUY_S3R$h0HGq0=*Xo0T`VmMASFit^CntH_z6RoY82&~7<>L95(zHK2n$DDxj zFYyukRaNx_qGHA?z&sKoG4{7GSWFD&~aiq#l zus9FugUj@w4%(F%x+2CJF7P^+%1MbU?Xc?jLXpf!1bg>RY@vm?N3BfC; z3n7nX9hkD0OIQxc$l|4Y_Xy_eEynm!l)9MfZ+AqmAkOG0m(*q_&z<(xBMdO@w`G@I zEYz20<*NntG4DQJA7OprUZC0$aL!9&82YL}(k}7z#2|Z^JKHxJt&U-{tn}Cyb(P(l z%3ji2oogqezq@Qnj0t<;g?DGxT(VJmqJ#Sb$3>qa?2dA0P~Xr2iEJz?;nj*rR1v27 zdvT+JFWfebTW3cux}c9((`1Q9FCPgzI2~7CnULp>$r>2gPN%o@$Cm0P+0owcPwe?Z zumYoBa?daZR<<~JDj9aV-cv{vIIy?WqK6%+RmcE^9UmPu2iYHT+IFImMuqLfPLBlP z-;=AmOX+s3a5LsWdD0e_`h-9sd? z{p2@fF^>&yhcVWPn%YU+7BiuW+G>$rv2q)q>UCmW)a(OhHp)=1SlblP z{-kiNv^^xlAUjYBNy@+uXu97aYe{&;2Q_yoZhZh8br_!kYFEmu+O&G1Ki3)7jE|9j zpIt`Jg2QeTzs|Q#BqaR^wv0D2+oaZ5fOrfgc*_#|iEzS<|Kd_d$-pW-wDoc*Th+7f zbFNj_i^HbPVds=V^%Oi=yy_RJ_c=~iygUsd!3)YS1;T`(F8)}uc%!F~{JNoNU8)3i zKNfqdaTRR~4r`8uNUv%bQ_2|ayK`URa5m#d&#;J<;!OcgB8T1T`sJdTYiJuKYjw57 zN?v*MOspmp@zA6DXPLET6$9hQd{=T2rCjcoXZo)8MZ&1~U*$WcWMA_wzBr%ZcM}kM z*Mc2NbK)ah`-*CtXYIZ%p-J3f*cEh(?eKcjR?F9f9Q)CQ{HF-IfX{s}+L~8Xp6e+^ z@8?G}G~)~9MfN5$J2J+g2bmeMIK~Q+N?~ehOx%li zKqofJx`?sn>TgvguNJ}&Hb3?$^W>JV@w4|w<`H&_XB{^2pyp%U&dj{oLi%tJUEwfr z?Yx-}7BIWAXURQ*h{h{d0y3;tMJfuXBq-Me416wdc zts4w)0Wj6IFV2oZ;MaWkESedwAL`A{*Y~AdFKgVw-XqutxrV*(zTM$b%vz+8V6m zP_BE?ch7xsGN#3)sBHVep2TOlwGV7mdA@g(TM`*>iI?K3lUL2hq>j(Ghs&#ZX)d_pX0{f^b+&}3lLWoJ$rH^D zb5&(?PTn^nC}eDQHcQWv2iEM$z4_>&^-=yZ+dWT5Rzqi=Ofp%(3cV`n6uVoF7DwW= zk2-6Z)kTR`2%H`{DBxpE%Hm0VyF?|S7rRLPf;lTgA87O{F#9+k zsVykT*?;>C8t+#1^&L!5OaT;7mD<&OQ$wkiUYQ$EKMvKAwIExkA-LY+;kMVw^3e<$ z#sIh{+xTrNkB#t~yu;;55Ce3j_2io593|7%`(J9HW^7!sp9ZQnoDeGqU+*NyrS;XaaV8`YwGrFQ#4?RLD${ClL20 z^*WTm=%gTk3BltM8Ah9tc#T=ZYDM?u8c;pljq zHf8At&dJx4HIr!e!rRPOL?#y*KQ+!r5noHJOoef*?}Rc!#9suR zXI-4*RwOj28K@P|MElPlz{_lgQ_&t+ZnPepct-TVOHuL!v@^`zu#p;|pP@Eev_Y zDFVpB?cf(otit9LK527J^Xf*sZRnq7n876uWPD0qXQYXY>^(ADb;1#C<1%8QTG1AqHR~Oo+ zwqauf)uD`;%c(h@x++R>gZRP>ZxL+Oi%-ct>_xerSv!kZQ;H(}=HRx^EkupShddFP zfiWC(;buds>HtbA;|{(W73`i*@}3-Dg#?_?%TE}6uVFR1z^OPnzbTqy&FzM_gC9*o z9Wv7s*CSSC`34OmO(2O(V~^z2jo&LX+{V?_z3Xi=@U!LQce*Rgp}a8lGP%xJ5tKBr zE#M>hrd>5K2^Wg=Bl7S-qg?|(Eheyn){yL>vDx@$ zt47#HTsVrwJ1(7OvYEI>!)!~_QZn00Xw~+JGRLXK(mGP@!~^n;h8YiLTFg0&FqG$4 zNb6U#Ubdw%w!VgeM5G^AR6TCJGqX%-2#@uu$(#c9XTlSBRSmV%V)|<23lzs!o!>In z3D+Ux^upU&4m;jjnw>dd++UWY&o?Cpe92yMObakFww@gw^l{(-s`uRy)htH2!n zGXdAdC@$C0HI=~kI}r{5XTjNSfGv|<W56Mfd~zgcJ+s-X)|Iq<*J6 z={Uc^d2-Hge3n>$BGGNXuTaSp$sYX9hJmA0gX2P?-qSezM(4wuk)VaM=p$-S+`qch zj-k*#8ShYg+%PkOou(l^P=D4LL2c11r%mFEg1qO{3@%9wQp$XD$VziIfG)ZoBDBHLd(3&ysHl!ZWl1aCUp0bXLib7FEH|9QR-htWJO2z zHhp00OQrd=8Y8JTu-DYX4p== zLZ>FV9W_4P36!@~F3)F)i0%ibJEps=r_by7icu8m~d;PhEG zr#pF+^L0{cqTKG7f$;T%yt#WL0wiExW*@mzS}tB>y{$$hKCx5kfyY;FrwXv{E?WBo z(WXJ~qA^TIrl(lZc`D-ZN$Ubz`|=>VwrN3G!u#*rm5Ip*WhXs(?8)|NXm8mF$!X`_ zarqKhT@Dw%#WWWiVpwxUHb`0YBT{aTQjO!46tiItY@}Cu%O}FL=t08v)t0d3IE7QD z3G?9PFV9>8;Vq4auFGVc|KM3;#1PVWH?Yk1p2EUWpKX_JYy~|NwJT(~UI>M5)QfJY zr6U2;DJsOI=RdPR&%AgKcE4U00#9OLiS-ui5c>%Q)5xgM4$)^P+XV-`Z>XNUQCZpW z$~U8qh#9op*o_}~dV+f6)CYL%qKhhJ`3ymKfJJSJIo7=s(=xBS(^-3ks+TXifqc+? zZYJ8=4SyO_5!n32t$UeH?WD{3I*O-(el}F&UP)3}uD`sjygd6>Bf4R0LEaIj&K&W6 zx9O>fr(P#6i}&tVe>kC}D_vBzt!tJi8TZZ$Ul4!&u@2CVy3 z$SP6}L?dLBY0!3)#O#F@+h{5yI18{mUrK2iOgK>V|7!+m^WmkyW3APf${R!q|E zg&vonR?;Z@xrhd_4P`p&MwxxZ)`jl=R7Fdnt+b|aRbV~EaZE4yt&>pT@eR?S_O5qU zWi8qsb% zYSf$&?)gPon^jCYAnm?SnsdqLE*onFW|F^ylra)w#=bmr^k%EY7K@~*lS)OQ%lp7Mr0^9F-~*Kb&_=j-<*9vu~&SuG>lEuCaF7E!h?M^)5`h$~KNqz+`>xeiS$ z6g6S&JaAbx_YiY8tb-|9wB3t%_`Zaez_@=l@`K}D>;NtB%xa%( zZCTJ`ek0Xthl4T!f&fS$(w*HCW7X3d+lFz>X_70 zO%%n?qpuHnSww0kpR`iP^ujX=yCAA+7Y{auKR!yV(@YxRF06R7s6FYd)`^3#O=>kL z*&=|xH^3WVO&_QXtPszrhlXSAPOfzVC z1UEhdd}_Nz;z7ywI^nGPzBg1m>0SaL5b82_Jp`gjHs@iU5JoNlGG<_q&RaRY^7I*Q z!-T&+zGCh|o3#J3I$h+9dso6nZft5jT_psM3Vpcrt|Lx%G0bD`@kDO+=rWT$*U$;I z$yQr+DY`=VCGKt5Gh0jcHxUaSi&^sv;H|OR!vRdA2D)e>Nxk-i^%YF@P>ZB651TVu zZVAfzkvM-Qg7Q?Z)P0+B5MKgO(XL4Q^#4etQQ z1NdUcNcrIc)8L1}x#PYJX3*=|gYFJZd#O225dxOST%2FO0w?7Yx={?A$t$@1!}Wcv z5IW(h#Vk0Yl2bUH?C7ofB+Sv=sAkd!o)48JO0P4NEjHBV@xIxys3!oqU)T#iAgdeV zPbE80W8F%qbQXca(5e5phNN#2$u3NNiQ&Rox#F;WdPZ zrTDN_bhGk!u-{vdH>Z+rU_FJ*5Onm9w>RRReT>J?P#PLc`Z?t=&>@hcpuAeU#)^Cn zvr8HPBHa!3E^I5uwap0as+Qk2;>fu3$=g?)-#RG>mWny6+I?@|<<7;$nlAnW+_mO} zVDnD*yKR>ZWby`~i;fxb2vH^RF{G7WgJrhzl9<`Ozl?_k%e-(Sy&LeM6PNb7{==YArXI3(DZ0;%dj zb{k+DSGxN`XxT$~<=kQHvJ1VXq+HTAqDtJtuv8XAh`}8qNvdvI4-7w-v+Kf&Ps)tb zS`l-@r6S|+wpc01upXbP?WpAaLg@J9F=Gb-tqtN&ry#LGq)_J^3bxDmshXb9U>RAI zCze`LhNZ)W;drT^M`jtfi8oepiOWlAC^_ha^&nWh9Y{@zL9jcDrw>wdHRDk*b3{QM56X9WXwtjsF{!x%okuH}s4f#J7saMhf literal 0 HcmV?d00001 diff --git a/docs/reference/images/reducers_movavg/triple_prediction.png b/docs/reference/images/reducers_movavg/triple_prediction.png new file mode 100644 index 0000000000000000000000000000000000000000..fb34881d1e3238d4abee430c503848abd7b12b8c GIT binary patch literal 93252 zcmZ^KWmsIxvi3lR;KAK35Zv88xVyVUaCZyt?(XhRfIx5y1a}zR-9B>f+54Qcb3dN- zV`i;RDf91Vh{;`v`r*6VHL3+tOAw)UPJ;Ndr8v~bUBu*40 z944wL7@S}Ye3vvRBnPx0u~|yXcYaL1nsCDfJRW^&t*6GJn_upUmK^4*0uouUECOrAoX;LWh+M zB$q0?1XguHrHIRj+a@-CdhS5J^Xp%8Yu`wCH-eqJulJUPoMJfMjRJCfAwSOpdxy00 zQu>~c8@vtGk=>Tir^HepgRvG~zT0f;9j?8|BW2~fv-;FT7su|~X8#ZX+x0mkqto>w z0g}W9w_zL`9@g}s{6ycdw2jiU0Nw^aETufn5rp4NF}&sreP~|Yr1o8hpsVY;j`}c) zjrm}zRa3xap8k%?pQX>!3f{#^Uu;57Y=&={o7?HynY{-;OTf9Cqfgw?T=TU_8V0)N zsS-i=0*=0BHP!S^#Vl})VCjXPw8wcD%==V~v1xe&fi5UeeeuLjDBthXBv_j-Mj**g zc;Gv}^(7uvrV!61^C?JD{`adrDB@2^PqlWxY@h|RSP8K`MPtaJo-KiyIMDkT4~+|AV7T(uFt>C`#D)Oq=-^lXJ*_mOu(g+#@A_ZUDKu}RKcrqMTg)L z=mb#s(R159@>1LH*>k18YWSXED@C;jXnrmHy<>nLbUJj@yWNz$wubQVJ^pnd10%)n zOTWzPEod3>aDk80j3}@XTmz>v>P)O9KXvMeADkvSLKwEvFG3nD_JzFfteg5Jb;Rek zdEk_9@J>^PMzr!QJ!nbyh0~|;GsPCLDzFYa6fIbn zUWDeeHSv_?s6f6>bz6r^v8TJ2so1KRtVEqx+s=4zCPLe4HmFN(+@?k%M}MKbp{`Y4 z{SJ+xuSY*DQMYMZ(^XY8M8SM3FTM8e?({poY=lO6UWdeo?klNb0RoQ%>h$&>luStU zy*3C^5$o6%lmIP;$M`Z)zhZNn@iF=NZ9a(hyl(;HLlpQ<3XAH8JqdL0!Bhiit^+?1 z)}OosN1BUWr(pjO$B)epTh}Wh2NfoO)dGFui#-Xz_ZwWt0zm|Cj_E1u1Uewm?+{9>z9$gGKi_oWWp+oA6)e zk;U9G3oqraOodF=f)qjFAVx=)Huz`o=MOy5FF`~G>Z zrZ^jkN$V%os8KViV3r3SNt0I)+(6x+HUTt)tiRvrlh=f^<3_=gM=1`fuZy?PYayS2 zpS-Vvo{vud_S~2_A<>ea!-3q=q;(W^Viw)uEIbH@Us0V4pT3;5JnN^7=|3? z9)=(GDU1aiPaq{nriSbeDGJ3NIx*1s8_qWb35j9t1j+=Jgsy}y39Sju6xniJ`C7So z#mM>7x%Rm*#hgM;k!7g|Xg?8%V=07ZNjXvaLQO+=`c*bD&&gTdHIgo1IKwO^!|}Ou|eZOvafRnqQh7nuVBS>|=tAK-3`ceVHlk zVh;&6q3C>!y!rgw9N7Z8BHO}jJzuOlEJw^`EM!ax%tfps%-V==5uy>^5#|wWSnXJQ zsi4&D)T7i7OgjubM%bposX-}`DeqEdQcvj-7^>^+YRzh0>k4X=>aOak>+NeS>fGy! z>Qrh!)K%4;*X`Hd*BjUTtck44Ul{mR`HN(MuC}>;;tRcf$(I&~0{i6+%nji$VP7;H z@HeP;k2bZoH+LD=VK&-&eh#w?j*V9SI3CO#SQ+9O>FcRa9!w?=FaEkmLWfd~ZVRxb zwZ%E*CLqry(q&&m>$2%;)c)3fyE(CGyOE8tjzo{bPANx3Pb^MU zLJcBu)u|D(Qx{ho5&#v*MNUT0m)FGFHP{86;UMLO1`iR%632qcSCrJ1bes8^Rbxs> z35<$S%T|ctl2uWhQ0Wk^HsjQ58oNxM44pR*{<1W|ej3kD@51bCIf2IZ5wm=_r08yHAFJ zgE4_2N7F!MMNLHg5tbNMF99x5C2=p&HykmXG+dmJU81Mdpd3}oTvAa&Sr%NnD*s&e zF?YAxS=-tq`V4=9hs@*StV?P9Y;{TW>CVbw+wH~b7kFOyIMLYe9@5^@2xAIk<}~s& ze3dvFx($VmKNfErlbw{D3>?#(^tIouD6QD=xbtvzhPxX+v|n{zQ(f)BqrydDGUFJ) zNy9P0g+#VQEJqGU?nWj?7)C5fl}K4h(WZ5LE*MD9$kTdZN4QQzrMdb!El>9z1!q=l*#rxknyd&<}Gx({~q4h#|> zzkoxW({&zg>tNk|#VONBfRdP|4XbsheXRYkCEMk7J9j^JhvBgF#BJPRQF7%lV?Ls} z&86tJB05+$Y9gaL>QrRS);ric$a~}k{bl}%?Hw+78Th$hm7kNqmB5RDb?>VP0lgR-)u!!Q4k7%RbCl)yyip*fW%kQ{o%ZZIP#lhUgBDloPwjjVg-T zl7y0dvu9Ab*reEUbOY`Vm-`c_9kVips#dN-(`tm_#qjH3(I(vHa1=_^BY_V-9i9)Z zH&HK9E^jOq1MQd1z}~N6qz9z-(`TO>bsfLbp}1m=nC|Ju2q7;0W4- z-s_u)-b>q^(?M*pyZQDYk{ur4w&l*KLg;s@%LiQsAzxRJ1D*_b_?8fZ2imgU})cDZUVdG?lXI5`< zXv1dibD{S7#WF-iYzF0;99~vAd(Mf{WsY0XQ)Q)a(&@GJ+vVd**vd~H37*DI%Fb&q zj|c2WmIs2)Nw0I)9<%j@oO`{sn3c&9{p5}FWPCbP9f*zxR%C{{8_pB>UFidgW3zG0 zJ+&##sb=M^uq}kO{jNiI+sl{hz{mdLACo_>OZyS}5jbYJXLgwCnBeOl>N^Q;y)qt9 zFU+lJ_nB7qp6G~mPrQCUEwY@l^Ib^bAb&OoRGzRn}@*6nB+s$9p+;D?-_N6as&sVkv4-xL8A(^ zxhs7-V=! zufl@bmePjSHDM)g(Y;};dS$oP0Mc@BIR3ntCo?*`(V546byE9h{1VP$@r>+|&pEMe zkENVG6caPI)(zuk;x3eP!Mm??L2NVmScVpcD7LcE<&i$_s&}jI;8`VU4e45`!w)r& zLNg85SKFgs)k_zCL>p)O?{4p+^Qn4$d6l@me(BhG3b7UIuJ`nLBJmYv)S zcnsd&-Mx9Z6l#|9uA$}*=!*Q>$GJIsj*R*Zs6VW&G%c)bL51zIsMaRsoa19q5>2GqqY&+g%NFuatfI^abnTg95hs&z>H=$Tn<@EZA)UR{Q1o4O_=DpQ^K z821yO#jWxAItB6q9+Du-jN015%*NBPbH~b3O4G<=dFv|5pL{@;&MC_cebx9ID8H=<$c)_@2Xha!fVRCDGc@8i}_sjWF#4Ef)0*_1bgk?d3w)ClaH$atQmdnL)CU!LR#H}nM@+cS8If9v%LCkQ9UPwO?bG-1Kw{u$LHBPR-}pq@ zgkc8+2jE>UKj)B|kcAXxiO-K{C66VB$Ga%Y$@f#PFdqgM+dALlIIG-Y<3uVL5rqL_GEdb7iTx_1VTxh% z7;%N}4fjiOS4w-wrb5~{D|tMR?i06>oiTXqJa@*wc8yn{)((&4OPgm;iOFY7@_5sIbTw)h zHC{8`CmQ%7XVn3W7BSqHU;t_$fch)oR~caThlDR<2{7jMYxgZ=AZeB> z>7LPhZhq7ulGLJ4cA|`XeS{D{C~SR2ClPpkokH=EgP3EV=utT%w*!tRfiQySxefC8 zCGp}(^U)eX_xeoi*t7v5Q?3n^oQT>H-a)y8DTG++ROsI<47O@$*x|-8UC?ilCqfW< z=H=HZa4Ci`)sRE599EKSwnGE<6;ECV9eP8=j z4RHcf1PcL`9vT{sEV4K7et_Vpixi1!pSX~CjDm;mQtPHoE36~hzc#M`Iexn6!ZeZL zr+4Ex&+&n^2N!c2i&%q6L$aeEK03$VS+FY!%v+ogRpe!Ib8rYJ9HB0MfA~o=FSDaF zUMOPAxR`a%9Ao%-8uADM--(9Esr~s1@vPyTb7N(_{-Aivhe(fXh^OT>hPRbSinoJO zVe{-5#3SZz=pX&%JN$qtSz4AFXKG>{SL1WS9#+v+Gu8x?SUsDA8(rCUx(=r+M;XZe z#}LccixIX0szF=bR+pUMw@ORjC*BoSw*4hOkG2dywyV%DLO)j3%a*p2=e6rK;P7bJ z^r4!qD*$4jEOBQnarWn8qu(_o&Oo;yYhu#`g4rV*qHOeD9su7*5tmxUSjZI4z-P$OKRl{2W-@T&#}gJXB>%1S z2gi@ZUh7ezF{WXUibu&gT-z`J-X?ZN)t#oqN@tk@%dB{Fg+%r6m zK>T_y3e<+P`EW080{s&q9I6|g!9Im`n40r>XaqC&SBfFZU?++h3RnEMxY|$TGn2(} z6Ltm7CjF+ICWjfS?Kw%sDme-i?hx8nEu?t~TZ?`6%`inc`H9EP?kCi%$)y|`w-tUx z^3AEs3X7~uxzATJ$x<%fr)IC;dYW_hlTk>RY|&Q*PoYpE6v1bL>p2`GbQOIPXdx>c ztI4XGZK$6#7Ib`2>mK(3Y%u^2KeRJ&213YXaP4&xVX&`4CWu(WL20?s6ebC@lyJ&n z*nazZBMFfhtzErieU`q1;ruS*meID> z7WPin4#yzQkZGG_TDCHPs)qPZ8dQFU5DU*>@x^Z?4Y?}rivq-SBO{%W&rm%T=BMve zjb(DBZ6zxTgG8rgks9p{OiroSl<*mmpom8)x->}5MfH)zh+iB_E*#jLoxk>WE-v}~ zsf1y^;9SXFjNTTVah!;q(S2ZseNV))y-t5}HaB!e5ly0NAi0?rg)mS}o+q;&ZzelT z7E%OPu#lO?6Yr_QxTS1j-=s0#ynGQ3c>xVE50gmCM?Fnp(^d91oryZHoW4hXvqE>- zNz+i~o&Cf>oCF58Hazh!fW{=BrfKd86Axok8#K7|#Lwec$QW z#B4fhF*Dt8c)QMdy^*uRvPH*8J%xQZS#Nj3U*=2h{cVf8LtZDJYdh?`raaHD<0hu3 z?fWl|WRtUbs5q(6%KLxD{Cr^vYFuu#Jrq3@Ikd!S!eR62?Z)+4`4z}|%vx<*=~I%N zaddMHCU5@E7i($xi3kWd!3E%8rKekpq^IKyh}WIJ_uci)e06R*#nu2^tl8ws)b_Uc zybfOIJ_?PxYVeZ;0svrH=1OXgYSL1ihPKwU`bM?}#%v59Yi~-&z`?;mN6$#d$Vl_{3mOME8%KRt8XE_a z{~hG7afFQ>4DHSB9L;TQ2!D^OZ(!@>$U{v0d!oPp{&$_muIB%q$;RPdYP~5)_xlMQ z11&w>-($ata{s=|DQE6#Y^5e_Zf$Ji@HPi813eQ9_x}j|@#x=E{)edgzeQQ;+5S`V zKc4(clAG?g2LGYa|C;N6?!MWJ7lxbeZ``4?5MVZa;6UF$A{DphD5Bdm5#P zp+cvjYZJzWI18+I3qH#P$20F4$-F+QA6CSB$&6<>*attMXB(Y%emC`U>05nNpK#CO z*lQFPCj@~1c_B&m%oMlt8IL=yXir3{AtS;8`2l}iz8QR=YjN(x{loKC2W^-63`PE`A{5dbIN%f;+mUon1_YO1UcZwe{Pf%jA+%$NZ{1*dId!fChoS3syzR zy56b7!^39J57$U6R36|>sm$KDtjBv)HNA7*?e_DG9PL|D()xA(KA%VA=j5Z9Dl4600j&JP{Bsw zatJOp+ZLanpXXd*RmeCWFN#1S;1^6yO?}etdU-g!D&~9ss-dA#Qcw^$JTfwHPgUI^ z__X!u69iy>Dw|8UoRK$pp^jhi19xlW!}l$-A?N7t!Gk}njEx15Mw zA0(ut3OF2grS_B2pa!Gx^0Tt=hkyLYJvgxN=&Y)ul}}1blW=o$Gob2uy1)Fy!=p`s zuhn>XbX4&2;{9f)JQQ-7&~udvHGyTw;)e%!3kwV4CS&pDT|aPilpQK zIc%y?Q+3`vn(nJDE41C-NSc>{1*b0%S(RRCuy;40{L1X+t;2&|Co z&Id5Uovng;qTp!I!{y*=SOB)>>kU>5j2k_2S|IPSbT(NEOtmk$6|gM?&i+G7xN7G) zO)Q|j?Gvr}#+5~W-Q|7-Hh$1fij!9dC}*&wIz8YXKyb{`@yojFOA+QO~%*F;i7gyb_M-w^&4RhJ@x1i1?qcald@{gOCt~fb0@}5)OOgXfIGzoi`c} zhq|v8(OPy}k!)e&?{KYs6Je}A^~fW;;($@1-ECzzBeYly#v-$SS)=@93?S$}3!IX=Dn5BEj%G=Mj~L3jO%t`Gtv^~J%b#J z)t58J^vH|##(rGo0pc+TjDMm<7!vFSiU1!Uo$B?dCkBNIkv*(;jBQQI>f-gNegZ}p z!pbQoJDcvH#ey8B<7u-J-pV-V`{qFv@6QEn|7Vg6PA7?k`d8qG>oD5vrg zWC50^#CZP?X?CQc;bHsXuMv{hxf9c%39s03<>$|Iuw7Taod3si0pRkoJ?a8ERX>r2 z{Y$zBQvMN;-@fu!Aj%OOp6qT_YOTrWs%Dnwm+g60UgT2Ck=g;l5CQ+V{OFPGZa45h z#O6>#K{`c1{{a~QI45`)1&n8D{^9hg;!i$;jrB=ePqRPM3&bBl(TQkzYh4uh2zZ6Y zTL0Yv9Afol^N}Lni&m27*7m5(Lv~#UE(+M?DWN}Y!LNjb$;T%ko8_LmdcrN7k76iz z!@(SaUYIQE^2>G5g%|hh>jzcb7nZU@8 z=Y{ot3DGk*@c%F$lC&=x#H*J9thM!;sVZts&tBuJ#bY|?C7`<~EB2*lH-_J+P`=uF z$%R)))49pDk_`pTQy$K>?A?feYKtW1yFa*rumIR(DdF!TEw&Oh#V{+>&2bIm<+V?~ zGM7qSGaWS+J(;9^&0K`Ftx7|aD(A)?7lCc!OgOAF2rOmTS1`N``FUa>FbdQbJV(S4Rz>x`n#z=L!Bpy zUCC|3d>G3vrvWU`w)A8Gavw`tzUHp29aMg6m3Nco?h&6a>#3`V8Zlkz;+5FtKJO$< z(+=8iZre}Ov@Jwi&}ckDYuJBCOt8CxerPPBP;)=jz8b4u6$mfQ%5rlr?#veF^Q$AA zK1JMq_YWX}A)ZOBx42ydtZMJ{p6grSvL9CcO0q4?TvYMDLvxu-W~}r#z z!a>79f}vd!ZeG2VZZjD(pe!xPL#{07tV*{>*Oi%9DwOja;FM32v%}JMIqaUqs8$zB zOjK6V%EP*0o0rc{o+2s4TtXz?k>UUFC!&3!w~LIXTOCxYj~(J7bq*sePM}q)G&$65 z&lMzcwKq9Q$TKbQ?`f^wlsM(KM;({O{RNyY=`BVkrZRBnxCOJ>e7#o*ln-H z(Nl)e&lz)tW)EN(T?o;<=Oho3(yS{EuRiA2`1#d3MIr`gRU;|5Orxf9=z;g|^<4i{ zY9FPA(+90^z*6@Fe;Qsy)s;e7E@S0nhk%L!IZA z9=^^ypG9MVva)&kKeglMhBVC%HJvXgDNl+u(m*@T%Muj%Yj+nASaAp^*ke6+(cnfIB(m^YHbT1u96aB;f2KhVN+BXl~Mcw@a!%}`i>GUJp) zjz>%MjZ=0`pN1n)1zXEZi@&_Cti)a?_EizPG0utE5AMCG?7Ore-=;$T9nH%88;3oC z3XK6^fWRFkW@9GT{D$TATQ@^$r?otPLUr$;|>f zYUBMc)5~T)Is{ID%esIY|G7FsdRS#{d0@I;1*6lVjLnqjIU^i|HC+ z69qr`KYZR#ocv9wV2x-9;~Xf}XVM-4|1N6Szx(H0c-`fXPG)*ev+59(e}XyG2wVk#A@=kS3q#@i)qaI;d-Qn~OkHyiWPN!;aJ;nDDeAC&&8Tv{ zGq*U#vE!Qr2|Nt%bqjh_oRqgv*5Z5l^S&06i!&c`O4xQfC5UP(Us2g6WObdwUJc>f z!bWH}I2_8PEkhm86Y5Y2sjs6UQTqCtf?MEx!!AUu;ow--UL(TRk=K#1tSkTIW<%@* zYNB>MK`Z<(T>;=PhKszHh7OPp7B38ci}SaeG+&S_*E)aZc+H`<8pGv-#CE+~>h_ zaa5eL#yc7LBX5#}4(iAfl41lc?aNBfkC3__Mc(}bQKvxf%M+wUdwz|Jwlc4_5h-2w zlj^I%cHV=s6lCUf-%MFz7*sxyLXM zkS2kylo5{v-$V1-RGO`KbV*@W+k>dN@ml!R@tV_2Se}jhqa9TyU$)fY%l*>TSO2~% zTfSxD?;aPrCxwe1K2q6g7KJnjJW`sL?F~<;8s3+t`>;64(`LsGR|FI4`)y(P?kC62 z52NpNa~}qMWR_&F&7KG3??ni5+fTJ*_w$;6yk!I_Mt)3)P@%qIzBSDGf@%mg@qK(h zGZ4LYcBAB-L<`wflOVz1{YC$=gZR)LZh5_i92hp`*2GQxc8SPl5f(R1Gi~iJK_|b| zM6S8Ala`Rv*78nY+^Mb;EArb4opUMH z8R{h!%xb(bpc6%PE75hlVh(Ok&@?eBShz~{sl&@8LJy)?3=8zvS_x*kN&CMyWf+iK-rW6Dyo+T?{_z&Bjyo|<`M$L%N4ElW1*Dhq~Wzd}5p z$Yq1Jv<_|DUrxswJfFotPp}AC%mj6SdKjWSi8BL(wP$=JO^X=^{|0z z96g`t$ij!M=OBDIND?7})Si)w-y+k!zj`j0V4?8@O@DqzTI|-TPD%uYx0$-b4L7>tJG((?zpBL^CtA8ZFeG}8D|%+&YDhJyMS&h zYxAdV-?qAJh0`9T5ZRa%#^&u0IpQC+1%b0kIqdqiOe4*r5t!Z-gBJN0qXEPmJgnuJQi+~$+Km`lb` zx4C+wP;t9&^Nim1wVCd{@>*L4x=Xiky6&Q2 zzTv`EP~lVOdP5DxC_;uuYKM&8n4d|@Xyxi_B;VKB%YM*W7k@PG9V3t9s~BzuwgJvJQo{^C-!)*xmJoB^lKk04 z?nn3QNVmM~h${vIzV{DGvE;9Vv%hvqf@bG?|Isb=eFdJVwMW#!|FFTSQ||S`p>@_g zsmie!laSV|3CDl#6R3;2nc`#R6Aoz@KCDtN+BwvKt`ubZ$?DRo#95NYVL2y1tjE(B zVLX`@2Q1T2EWBqZDXz_nY#6Ql>~B%Y6?O~^P|}={Qjz{tBUtb3dRc2)(j{9y;}n|p zH1Ilk`RMKlzrBCH=Oc>XeQT;TQ|YFr){qfa*YK18*Ul)w;#kL6gqnN7Sxx=^!T?{jxr& zv^6X&0B@`&kz7mHe}wjp&o9>^vI)$;#C@f}%ED^OD6G-Ha#oICS);GA`+u}_1=kzIzGmN(I(WW{s|KOqJY9>2ZhuL?#beFI+(Qh>>w@Xf zm}lkmt&?BS9Ca$suMu6JgZ z-!kBr(1AR4R!j;q;|oRIL@wM$Ys93>bEC-{2KH@Bwn7pc>y zLs56RqtHZMK=+vnB&_j=66k;rj$Y52$8k1a!**&S7kt08Atn89=LbU!wfdd5-aPv` zGKl7ITYE>0LCyRl;S3dd`$_+%_CSV7f^Vs&2D(+rg^hZT|IQ*SrFFS=nmh*s=6@mc z4W)MyfMIam&$;+eErfP?-BTj8b^ojb-kQMt>WC}12-6{{{MG6&&@|cEC?=ojx&Jv9 zFdC2m(QEiM%Y?`;c7wxU?t8+vI=hDy)_>cUQ04b{7kX5&WJ|kdi_05#qn=7<#VX1rFCKmkAyzIC}YVuv}Im zQUU%H{6Ci%nEkfIjc&*oLJ8f-HWy0lWef(HrIV7dJ zSDn|9P_Yp8?h+CV@vS>{M*~Yu4%&;h%8eJI?o7;j+}XMsHun^Uu%nwY3Wio23?8yb zOoAv6yEJumgoFIImM73hW^1x^%+~p7UGBGua^Sxo;X?)f=XsF0hRMvYGxpTV3hFcK z!nwX>IUqO@nO#S_@>O;cDcXA?Jh;q%AEUjQT1u!BtU>U9DkMUfa^v3A?hf(B^G}Oj zp(N>5{HyWobNXugjca_L`P^=_kwqa}Gzk&)fs5@s-HT-RsYoBO`u|2?$}tcdauEXD zDdU)%-scUxuh4hyCR0WY^|xGOh5sF-oUA(g=f6frtm?>}))#>+OZ*?%H?hEz=xX7)1M2WIV-v3=5hRT2kpxA3x8(YzHk^@TLO#DrTP-+R06XcV{C#p=) zm5y#SJChPjG;Z<(ulks`#OrcGN$l%Yt_QEMhz#=F&!0hf*gyc-N4qu${>MzIc{+6f z;lKS6F&3z&ju_hvA?U}iglPAGnvInTX|Y3oy2?2DVF^+$L)j8T#;v=u=2;(Hp?TR6tj^|1-XlZ_d?h80Ll1R@i z&8w#h`kqLF7UKKA++WWbg&sZ~55NqUQP{{KiFaWK_8Y~kP=SMWJ?TmmkbY3)lcnM6 z8Rq;QQBd95hzS3ZG+_Z#V;VnLivAJ6-}6=)Lt)}!kDGv>vNXQE^lmI9PoMsZr+4zN zZAF9!{;K<5K7S$u82fei%MrYtq41z%)w}B06TAL9H1qc(fmkLA+h#s7>!k{53q>s< zOQbrmt4@5Db^SX^Sueh z$^w!gVtt|s^?}GBOm^}DjfD7}X2;9=!l)CY!R>&hQ<=%XXywgeml5grJ*lo8C8@;G zh&G}_WLwRHtbT*dj`U4a5(F(jUs>QHXQ5LMn(s%-v;&lJxIpab!=b<;#-W3Yo(K!g)z$3wW)jc{?HK` zXz=fa*h%K(e)@X%9F&QD5btKE=JVCze>5NjK!$u^e}lv-bjQ2~9p%ob9z3sNE#o3ee7FA@dKN)!gvnEt9v4cO089;Cc#S8ZU;m@s@GKQ6#i%%Z zJt-#sU-$z6i;&q@S11{0;x^6tHt#n*eYE*HJ+~X!yUp+ON+V<5{f48=8^G+q{Us64 zMAwDeBa}V$<(rzzkd$9m=3itSUar9&hEV84{}b1QEpMDSC=w!h!L*-*h7o!>zg91| zHVO3>CxyP2qQ|;?rxp@`baA|T4^D_k^)`HHBG*^9QB1!Fe5{Ka(v{bmuM$JcM+{%u zD(AkO=1?LGoUhDrP91YaM&^Rv`YC@oxiF#Mk_ru)LS)qN{EH;zMTuLU%62AqIQ=-1 z>j)t-lrY@eCJ^tc@W+@D($kl5urW*S1)o-zMZAwaX2d^?DTiFy`yR+<PEJhKLzwgj%KLiNwM|Mr7gX+7)2tNMgk&XC}E=~|uz#L*};h`0C^ zP)7)~nld&*@vS4QJ4Gl6rw7jz>NIMU81YvgxJUY!>G9=dPHju>?%*rRe>Mt3@xVit z3Ix|LE)*C_Z|P{z2Rp#A$cVns5_8@=G%GuPH8jCO8p&=dfd!wE0u1|`b$P^ieSd=z zX(w#5F*zjnje?XT!MD_n7Y!q7YDam&(enNgl=lQ-V6qon*tdh3 zY9d}F?_*={SNJ7|htx1f5iwNhcg}Z^>8FMCdEGIcd9A&H(N=V4X|7B5Ei|vadnw{s z85}Z5^hU4JR+51}M`32R&pK$YO;q49pMZ)R?$HleD+Ah)EiEON7O*iMKq)N8Y5jVOO7d_-+tV(~HS+X{t+? zea?Imm-->y)hBa+_?gqBihCp$?REVOe6JqwfgK z+c({&A#RQtbE5T(mfi@EyfsKC5yeMBsB@)_7kM3paX`#xc{p))mb``}xVySwt$>p!m<>flgO}j;IR@ zXktmV0$vQ`0)$1s^Wd3gjCk0OU^+ilSA(ePB72V(4RzfIIq|95D3rvynt%zJ^NkBxRtOve6uD9=_~H6e&Dqi8D7i`TWUma3RNtB z9wpfZIxnhh(osmk-m5);T~PsF+&6ft?y2aVq-_M&z3Hel0OTR=J?wu$MqQK;S3iRU z@BBt_?Ct;GtUo>+w~O_2h$WQqkW|932Vg|5a*^TThr!%IfHTOvIU8noZ;H+tR0e&j zuqnjlu5`JgkXD`UF&E1vTjCRo*CFqu5ZbG4q_i-Tj`=074{~1ROyfmUXEQYNkd_=`1M48mjxs# z4C2&??3PKfh_uwBS#yq0*TSP7JK&!y;P;r#YSqy}8I0e4=o1Q0{gr|cX9MUM)hZcb z;)<=~Uqns9o>FCHzkOeI$O~{v9O&Oeuw9en^E!+fYWMuiY2Cpu8fRukkiN=q1`jST z{H7;Gn>r#(2T;R9^!E#0VZ}7!WdiPmDvw*-PG)%Nj!LtIZrj%^EQ{Z$ogGIo-bV%I zAZOrGBD4RC!uhR`&YWU<*M_8OMSgr?9WK@P-qm_og*2KC%|8AmCZ_Q)FgKPypEP|9 zTmB=4QN}Cj(lKYCd2&7~e;o#x_q)8Ba+x6fk^&VGHN=;EQ4wJ-sV6t=f8t&3wU4)d z)&#DLVyli>BhDm@1})GS^k!!8zswBqTae+WS?wo7kk?toe4a_OIKYXUDkwvbnk0iz z{@vjg37Rg`yFAIS$HUs?ll*O9NgL)O4j%Bg6J16pE>4&Xo#>awRUi7)rW1O(>V;cA zbrnW-F+sWfx_BWOA(A~LU3G!pzfY_5RFL16HW~rFxZ1U5qaB0klU0TeuV@n&;jgTn zMse5Sc(+}#9r$^I7re{IZIqWag#oICG{*dFLLhfU=4ogf3JP8tT0ImPdO3>~STqxE7P|2u%>rSM0KaI$A5Q>*r_ zNRzRN(PEb%7`t4X2z7-8#-x4plHi$dNvzUL6p}0tDV>e66^p^1&_pUgC_gSvJGy5F z^HU?EztrWel|a4>GY2pBC?H9%K<$eduXm^karY<;5X5hpA#E1CqYNA{aIh%}56oFK zDIV!MaQAdcD_Pux`kQ`Jfs2ssL3%&FWF4m`HT~g2(nJfwJ1uIMy-Y~Y zFvlpPuJjGtl_VQcJpK9W@-1txeQO2yt990=sLDaB1@8n9vACJM8oiyX(LBlP8D@cO zNgcYa%A*5wdN5~<)YDBQLjL2G!}?g?H#Ohl=8pO0epyE%c;D-6Cv8*(;bifq!M%)Y z$ymX}cMhn2Qoo)qK?7unw+j9Q8kcK{3I=1RxV?H??6Z0B@}o)J2Un{BBzS60V=yjt zBh&D%qAB@%@{{2SK~aM=(0A@l_4m4if;@jwpx+sa-rhjQZsE{z@J2F49huxXyrLp` z$)cJ78>;pRVa;kCpbH;&a@_zO(YH#_RSGqE5pjmNVk*gyu6 zCB#$yd~eb1T`*2QYyZg@OiWO31l$?CleBY`*DA{f$A6f5Bz}oIQQ3d^?IhPA^38{9 zoy8ckV-U^QsieozF))RuyNhMS1`Fc9B?!A!m~-xxwo%kaAuM>|RXA=`_gF%f{Qb7e zdK};p#r$W)5PU)q{d~<@XA%UQ+w44c5a}>hT z$nUNFoe`;ZwWnxKLcv9j7(c|h#mm_E&$Wg`vFG`<%34$>Z;#Y2H729lex*U~C>(I( z$deH3zDhf)DIynwXzxh`%_(-$J)u?Os*^S^C=%DnVu56}vxghboIw zFE@{fkktix<{%cnM$KWFLw^DnvM6g1?A(mQPydgqt6-~Z36ek{T-@EApa~G%U4pw4EVygX zi@UqKySpd2ySr;}XD=`B?SA_YX6E$tbXQf^iG}x4%J^Ems1gy%9d3^6Lj)n~E(faG z|M>tw)CJXb>DIeEIVp2)o}XJ#xY*%zaP4cn4?jC=#2J_%ImZ_#WFGK0!~rKOa+z}= zX6Gx%>?57+7$y!34B8exNIl>Zg<|ktI9B6@9Nvq06QH-xT_XN@6>>f#`XSpy3+w9n zqjE+|x3$S+hd#vT+BQp-`Eacsj?T^zwX|mO{--w_x;9XHipbWU~!nH5CM-IMDd;*r&jy4o+-N`z`P27O}<=a z1Rel$I}1-wp9;&6)3|9>599YWJE38!z$=vw`0klw4Hra@i`@BLj~b+E>8jN7zWQ+) z3Ft^-QnVn(ZA{$kt%{lUCu4Fhgh1zf!2Xxl8Ignt=XudzGnKIIBMEVyN1Ql<_QaBh z^;LH@fJmzQ*@zO2Y%vDc?znAc?$gK$0t-yu3%0s7jsXI62(xt(=LQg={!(%)(qcZ3 zM1cO$mvuit07q^5vq!POSC;yf(KVc(Ls;|c_ zq))}L#gM1X1Tg=0N5-GPRd(*wcWSSqrRS%{UrjkM#u8SK7(m_(GE8*{VxyRr2r1=eByl45I5;6 zOD( z+VzTVRPhP|pK!EhN9&vQTneivm8%C*Pf6led<4M%Y16yP$TrFb-Y!MvC#lvG85(ZB z-XKnRFj^tdqBSI4eP00$R{$Z7mHh5_!gg_ly?#kzqSUdp z*Is%wjfc>zb8m4S--;Yt3t;4AUe=)+SXb-abrIx1?6MuAu=JQ8IsGgCvfC@i!)~`j zUGvjM+@Wu7mL943N!(ieu)V_RDkaq*wBp-D5zqL8UoYit%5LWEHD+})YKK>8N~?W_4-9h?^d4&WE8sLf{OBm4E;z&rQy764 zXh4rSd8k^^7N+*0(^fvYiFd}V25IhlLg`sz;zOllgEMMZ@MW1+G#N;>D*`;-el=`$ zqj&1X7`yEBo&wb2FjHzerDF6NRI^?$=%O`R}Tjztlr zB!lBDy1yF zy2^A!mqbM+nA`P^5)=@+EKZV{P(*E6G=?=zM@@_5MEa^Kn2Vvq_PfIpzLudr?flMm zBU+XIT5(=Ww4wL}WJe{z)A{*RKLiXC&5ZLtpFu({YgqY?I z;oWf}olTNs^6j62!OyZ9WC6ocp3;F16KTxjZWcR>gb16nXj6R3j(>KXWCK%=HhsuQ ziGf`fAdOSEMbmInt`x+RW?ry|=iW{xI2Y=-MtV$cFZWELR_Gez#$IT#Hi~vP{@CI;o_P^i>+eCm*Id{k<`spCx|~fJPkVT<9WnqK6zx3%1O}pzz}N;G z;H;}16hBOTo19tF9ouybtUNXwRg)LZbChjj#gL|i?aQy6`k-f9IwT0!0}J+Fki5%?bhZ%au%s8o+oF!ueuQ#U7JiC z1ZJsWS-T|+m;A&jFpzg})F~4Dx9VbDOyrH2O2!--m|6;y(k~Arc6rh%Me*ZE6-Gd+ zg;J)3$+7LJ)IWC>&2!l$Bn-(4iuo%QI)ZVBTUC~cKAv#CLShYY5f~?9spmSWu4@M{utHVL`)?X404G#Pkw0@M-%c&zSJP29ex7u#NXyGAi((@q01d zN{+P(ZpSOBZ%6BRze;;vu-@KsIAdJRA88+kY>9Y|&#oW9cp`a!Bt|BI6wISL$eGVr z{l@ovn0uW|rCT7BuE_WKH>d2x8}~z=%D(O6LoHX4vQqVweW!bx4hMpL2H@ciWm>;3 zDeqx{N~OXnY2OiJ*uR*MPgurr5ZRw&Faj1T@S{MB93wQBdEOvNznRnPLS>R_5ywol zU8@4-$swmGnSwk6KAp4}%e42&K}d|qb@FUtNCoH5U8!FVk0P4?eJxQCEV_H^jo}}G z8EnQd%i0OGZF*_IH-Yh^9mIWPBL|A@a0&jbxibN@ab_gY9JAdHQ6rf#RC{tSF`T1V zpTb}FdP)4msn(^ESxys_?&stivA#tH1lQ=j*^f_xD^jjHT!q)M*81vrJXW%7bk)VP z^0kL^G9^w~61Dp%EJsf6?IVWb8uV`Ov^g(je`4S#20&gT!Kkhp84U#PXE572W^uD! zli0ark`?N{Xus00G&?2JPM)keYkz{fbs{KS(`nwy#;0pO5l$>_dpLSB_z^nPy@j!B z)UqV}Avbm}8WHM0y(kcyqO0RWSwmGHoShaUag`w7yFf7fTHZF`T65!sdjX64DV6I0 ziP12KXgaEn5ysWdZTp&m}X;-^=OU5&TG8)^(-g%Xj2$D2}A4G+Pf90F`CR5Z3_sPk3!YLHDQ5>8#9F`4 ztx+9Pjdw^bP?UFVVADaY5u!?k7(Mq1qz3RVZ+OA$Sl6^Xid&loQ_YB*riVEs-mfS!qHsqx(Nw2zU{s6G*tas@%llu=A&(!VU;mRn)pRg5n|n# zribe10xqXE&LkluDe!%tA0IFF&$Bf#Gp8F!xuy6#rR|$k+M=#G7w#RL&YU7}Eyr|P zr7g}Z3J^nuQDwES_x1`8_+@!lJ%{t-@L}?Zg}$LIERzy|mwXN$i`9&Z-HocJ9M7u@AfiHWvxj)4(y3?NU8jFwY3SFAA?z{qNr+MX3U8W(qN}{`kQO(#=bZ+ zCY|&C1llk2S1f+n^fp3Fw~FQk@2Gjzk6cxXlRyecUFXuTY|QoOWFHS3#uPIMx$WN( zq8Ui=a&t?~+orV2Nn_Vm(a=99;ZiO+jU$5oKeC4SH_U~T_aw^s5QG!{$R`eItHou{ zp-EV|$is1j*2C)b*l}BBX*;QH5SrLNlSVf-n!tGY2o*Fq zwZL5)ArXqOCW&lviM~iP!8?fvNgB?75f3FKeZ5j!sYtdvu~b@-r+Ky_?4L5*#V0cq z$qYjr!O;1D_&9@bY;u}{uw+}EU`hVHI{)R3rno6hGFJ3pHgTsvl2F6Qh@$y%@?F~H zLS|E<&3V~eMAvULYmP~0H!vmdJnLa)*3XW*26jvP$D*U{|8TEr z8E<;Hk(bN*#n@d!E`%KxY^w5$6%m|`A9wAYUXcw%lbu9p)keTyr1AGF<58Ae5l6x9_sjC;&Y{xe*$mDxOZ^u#iphhnD4Zqu`H~I6YhWvA_C4OPp@_y@YwT)&M|p^ zf@3r!Yths#v)FB_tRY)bgW3<)wwUC8nNEnp{PPn5Zel3I!{8Jyb5zHVTHgVFmuzVV zR6s|K_tar8QAI(ufpySCux!U7zkg{(VYo)xV?=MO>HK%gb7glZ4;N*^*^bLNm3^3{ zxeRGG^9~sHKW~Kn0p-FtiBqGqSA7YPf%Nb>n4Z9{Ye3OLODU`tEu0!hl$pH-b9;1K zBk#WFw=<^WrFH_J$DK}c4UCJ^TG{s@A}v=jaG7^*1AXb! zue(y8PNI=jFl0M~-Ax8jC!f_qy(pv4ehdFsnhC=J18NoIJKQ_WLxo?s<6?W2DeSVl z7u(*();mSraH$$!dE8tw+aI`6&}?L&4lmW$r}>&Gx^+GUQ5~g|25F0msD>OH?E*D1 zFNEh4Fhd>)O)MirT!qAuFA}@*j;7WV2#@fvbfk4V&&@zHvjz7%SWfhr42 z^g3@}9&h7&S+e{kzsY=EIKnF-)8Qb1?@N zANJ{zOh>zwvnI9|#KW;Lp0-i*E-Zc51%QP2if(3pBDz**pwSg+@?DEa+SGB0U61N7 z;Y&|}eqTRmzPQ#HPYi!H#bH0pQqw&jGxmgnq2%|Enk$kv|eZ$MMO7vQp1;2{1>C{{RQ7**zxwlIQ}v=?5GDb zX?Q6uJC93`B5SHS@u<9C)EPdqTwjM;db08S$#EIOMctnYD5Y0YW?27lJ147wb7Fpi z>AP;zO-eHsuL4y$|A<+Wl&Pvx>fTs9aJxD`WpQ=jHAL?EBTr`C%(s_{vQG*MiPkZS zpLh=7r=*T-UiCS1f!L;In4D4}VcXTwn+PoXTe9eb&G*0nq9#%S_}$PS+RFQ05%eHZJS_ioFaaSV(ifuR;~es5}oJUpLo- zGSxC^_x=jYHZs7(1hH}Ai4r>4k;bHBLf({e9dhn2qZO5G-wduVCz<0i;`oV{KFd5K z+N@KYqxoi>re8zlk&@WB{629g&MG?IEc!0qO=`1YQF>7F*|!{`AkhU}tTW#JXM6Tn zfH2{EkA9xPpM#9%vLCjhsuF*`C~WQVYW6sGIX;DjhZE=HU;T}sk|P`w@0iE)(q4q!i@>OvL@|?7tX+x1kXF2W6`kGw zEqQC6ZV-<9SE*r41>Qi1O;D?jd4KP&9it-}tUk@%6qqNRYdi=pc;+5a6aMhKb5?7t4Pla+`ou;rK~sVS$CS$>TD3Au~os6=Cnn0 z;BDK;X|S{6TSYfvGl7Ock>X!s6De~6`E&S4dEdn*kA;WG%qQ4YjSYxvlPL_BtB^g% zhvZ^jjF0m=A+$RbCAiymyv24tx7&{k7+mR35<7Pb(^0QGA#s^F5T9d;d8E=o33kH;BoYwBtxsFc@^X62A1uwdj^{54G z@r`AkFOf&CCozKu@{Apc>CR!*s& z5$-vecf;;|6Z2t2z#HAlZwMyS2vBKDe+>^yis-ZtN{;eOU4C)Ss6#9pU=FJJ&9J_P z_9&Vc=?9srugmhg@)Kl>g*iBpR*j3KIx!AKvO=+a70+!s!-q{@)=6m)T5-_f6}2V{ zac;8YCgbmlmbYGd{)0`RxZ>Me#LaB*YuqI2$I_V$h^S~+Ir@hQ>eIWQe;mv*B>>JAWTq8!d! z`>0@Rb;Kjvdd$`L@F?8(S)*g`xaHWWoh*`^o;AmW$X z=1{z1dKNu|>-`a^Mhb$v=~ksje$fIu)1rv>%%wsgV3`jby}@XTkmPUlQTVY0EVss* zNsw~d_-U<21qj%CIH$QnrZUAq9tY5W-yuK9`)xZV5y6W|UGUJ{hO0lAA7$KHA}2v} zbX9ALMmhXX$_Z5QC~h+9A}X$Pc@XP_b5)vFs!En=k@!;!nHLd{x5cJRoQejO`(Lgt zb8fWGvGk4p%nZl^N2J<2Z{Qz|!BGdWO+G+uzAv|Fz+eR=fjf;u>d{QB1PtFta~3fr z8a|XT)2xORxAAMzUHA=^2087t_K;rLfb#0IC5BZkOn^)LGal48lCRc{^}(OEqjAY_nOFzGwweLw`~VFMRfGP= zKST)9`XLXjoygU^x+6%C1yZ2!JVX_KDk8al%=DmWI`1gFaKY?E-&N#GM_XGI=nx*Ys)inq^~YszC}OnA4C(St#q8SdHBeb zX(*Kf1U|vF7&Y<5pSvq{ps=rJ28o+xlS+KlBaXnu7%uK!p(s#I_mgUH1ntf`xVIvOvnTI!!YqG`6m zhIa9@eaiVfpFm@LZBODOo#La^Ch-68ekk9(Goem?=sAr1`LL!MebWK6P~x3gUon-E zV<#DuhzY4~+ED4FDS~J@V){cnl)nE)z|Z+4vGN9Z@{4gG4mR4CKBJMViI9iW?!hzT zG}aR;VP_Xo`9OHtl3CM$ug*cPJG(aMeU7znofLo#4`IQyK1oZ|heuoLGfD9n^c~`a zIwTgB%ynr+;I6$cp7&}5@A~4l{R_AKH-vv~2|tS5IK*x(zxsT!owb<94281_R7s?W zh#lO<(dD;ziilzRp_355N2~eop|cHLAU<7#sI2`S(WkEqYF#!|nTZA|#|S)ELOkv~ zX|H>sVI&CX^)bmO6GT&9pkbGYQ2{|j-zj3qv&TZ<&);U&ZJto1GX^q+Zx0Q#&=~6a zoE{=l%X4#(M|d?*OA(maBvgNChdRIoYj?Eib7iInShWavpr}r)_NjcrC{bs$r+vTj z^=Rq5bn%aoD54w$s|HdBGf#}4qnGg=Gt}v)(ua^ZeRs8uvUZpKmA-5aW#~h7t7O&@ z)a^I^cE~|2@2a>Z0w6aD$%W)7%;fnJ(|Y==pD?m^;pCChKoY4c-=nDhyg5^+i|tl> z+H;Poo9J)3y*7~0;=%i`W&wE{l6^$Sx{_sFS5+Nk2|T6)ZO?dzfa6# zYd>m|x{(y$;&cLQaePs?d!TBC2IU~c^}+oR-xf5H>(*KA)H4KuVcSF0iMYn@(%QJT zl#35Znhaw2qurf}HRS+Rt~yeM@Nf>M>Uu~|S%9sXU=m(-61TLLUq;A)sylO;69>DI z7~@7s7{U1ih`%{c5@17~Ca+2E$1kE2t0)bUF`$^(|G}617Ipr4PhkFOb62d&J)dzr z;XAemft+Rh=ScYbC33UlY6Xf3dWtnlPzy_duP@Mz_bE;c4|D-?P9p?#I$^46h|!II zc*1Sv3)EQ%iAyKEEmy%0L+2)%ig^4v!+o%G0#RmPu&iYYay)zf_4V*3p2+tnQzA+{ z#)^j&-aUGr)hUNA((t$@IYIq=y~x_+h}G|-=5>Z=trohvdK^(y5R6%=P*}jLflRg* z`%oH{hi2WPAoE8Dvao(JbI6cTQ|qw1As;l|#ie66aq!#< zJ8ww2oky;U%9N_rW}fu)!nnh`qWeFklyZIMxfH2hiW!YjC z7~qG&KR2s&V?vxS@#b1wG>w*goQmAu-=z`G8$PUbZ*%mIz4Z2Y@PV)$Wi%Y( z+|{F1L4ZX62X6slQ%+2hsOHQoV+ML81S`n$i9}GELrbg5=wKi@>Gy}W>E+^o{@Cgz z`%=v;^eP{0B=<@<6^lv@0>U(e>aX?(&K*Q^?NAptYd%@aE_jv%Bi1!BI4=T{iII&# zC-d&maX*2&pv~C6R7oGZx1CKe+PQWE>tnJMP{mX!_{~92xWew9?RcPaR8y55;W{s5 z`!NJ+YXG#+A3zRK0{-COt-pLGzc7k;Myhym@ug5*_7A&M??+_Wv}4iOjeRv6cnuFWyq&UeazY5$1C~m$8N1nl-Tg z)G#oY?cJ*?zfKEN`>ME(H3YwKKSBbdkfTza1*g7pLIE#OH!!wjVlBNi(Gpu&G#;rZHYO z-R6tp_T^Keyah%`blGoJJISitiM{Dc1G+~HDES_Rd#Vm%FbC)5|$<1e4yS*j#K z{)f_Jh0j7blHX6c(_mBCd1=b2Kg7qzqohe*U8=}QVtV+3>B`oIhli<3zf#MOCkt|!KATvA>qKMkM+6!Q!cf1) zbAo`zZS{vg#`Iect!Be+mgv3{KCv7*j4PxV#ELC=%{X*Gi?(#d=+jRpGgK^PF-rw> zf)&3+Kx=M}vXfd9`@K700EtmPi-;HT=+uvmq&WY4?&?(;er zJP|I$$x2nd81MC*viSK+HqIyZ_Pu;Vs6w)9CQAs5Ia5$OjBF&5m7{EvJY>J$WwHd3 z_1A6!ScN7>v_&UQEqMCC++CwII_YjC^Zg zS&$@+L|mN?((p5b+kDcwqCN2 z1S;>;ZT8P~b5k>1qSI7SPY5>t+U*gS0K(vhPfdW2;DB4hn@zNbhuj{@0_lNSyBmv? zu_o6;g+Ezd9S_TyOLkrIehp`dD1K@52+ybXEjRI(LYDMtR$d)-s<5yXrOy}_Tk%UY zIY_`mDX*gU#*%pbzV|EVZzxbuEzEU95|(-lt}PEPL7uMgo738zx<)&J_FzTtrJEPL z8_UfxXmM0jZULGo2FmtG*T#hWYHJ~1HWARO%{S7NA9*1eUCFF3BhV9|bF2bcj9T)NRoNVlCyg2wtTT%an zdrjn=F-@wV?Sg@h+DPCtI!%UJl^_*kWBS58ROAB)1Off$L$xa!J1&NwfafNV2{{Pz zJzRRUz2T=xFLYE9cw9CjulLysQWCW_P#?w+3ctFtAku)}I)1PI9n5>fh!MbkNRo0V zKqG~L0dcr_>UPRh1#POTiFJYjazgkA1Hyoe6XJfUB~J6X3N$hP4OwrxzV%Gf!XlJ1 zhlDWnW6nWkG``t$-EW0qf(_jT(pPj8<#S;}Ei0}68ar&Gk$^$uD7fL=%w3<}n&R&I zK(p*_C5Q!LJx_o`7<05ifQXg^1Nn5_jPWyM0;-yqbE$Ol0zN*-*;!p@o8jora}do> zlV#n0O?eb6n3wmfH`c{6u66%2)vsTXq)JV$JeD`JCf7jY@-SffMA!2oXov+#^*6(V zqsbLqgF7Sp1T;&4gr-L+WOp_pnNdR)1|ipMSwk-zG-BSOxejB6&tLDS14~Mup-y;B zQPkT#Y-hf*)Fwa;b(7FOatLRqjDMrP3df1b2EpYZFeVfs1S8^owg{B*5u9O|TFASQ zuHM`ZB(|_SerU3%bg^5$O!ZgS9LK2GFwb^~D(aqauwPRsVOFwyO&&+l#HgAkji*c5 z1xNj-2jh1@(>s5m@w3WP!>pR(WD^tRaVc$xUQ=bKCML3oA=;u1)v`{%+LbA~7*zl# ziAUPY77b17qzPM&VH2?igEH_&$!35_N*8RuIzmk6D|&)J8G07bZx=Y7bZZLr=>k zIUT0FS`%n2FJPCZ)t$u7hoV^PSt#(+hP7ZU9hD|D{j04$(u^6>@%b%2n(L6z*FY8F zzurVao)DPBGxVmL8Oo+5N&xuCNMtd5Iig*V#%WrET7EOt^^Xx}aAxF+nACP&kAMi4 z!X#e7v=_0Zs~d4)Y62sCUbC*{C5w@OevA_4h03p|^?T@c_v^5O?E*NghgmEdU;aE~ zZ;-3hO0YT|v>2lv++&XJ5SKX5NCdV1piT8FNHL{L0s zE~Q1d^NJZ>@o{`Mjnm1zl7kIdV@!(ilPYJ%*Wkb}g}WI28V|`V@mQLf{(wT0FXdXt0CYW z#84pS9S;Q(N@Hc*yvSoxD>hnEDWOZh&NWb(qmDdRORImW0se^`q?89;@7f?RM*XmeA=MpmJIQ|I zGmFvR!F)qpKX+X}tCTPPi~y0V*P+A=&(FWk?Qd8Er^`SFvbdaIJE53&!PG3BK^1k; z15}hIa$F}EU*Wd11YJyTnFbdxaL(mn$GwUCe2UwKBSWEX zkv>MVPVPSt>C%Tev<$&mh*ee^bdAa>+%L&Zt~^fGEI&$r0NR| z(TngB=>bFfFn6OW8KSwj_%raVNDWT+r_jJ&%7%>iOiX8;ni0gX7tADMYElSm=Z(Hi z;=eskUw^27;+hH`T(Utd1Snb~8yp727XA{%lLi;%m8w>0JJdFg- z%s53po%+bSzJjR!C%ir+H+)_PCF4h9G1C4PkB|-x53befi;waRE*&iE z7U$uFM2Lv7J%3fX-s#?s;;)@6QRuE8o3IiJ;5cSkgZ0?ib`ayQx~s2%JpH)z73Huc za5esI;mjw~g_~t{lsi$u$pOiVk5#$iG9$g(8oYdB=+&}NSiI#n|O$3qE=S!o=PDrt(}ywba_U z`#?p}5Ef{HgjSU6S)0EWjL1fiBV->0xBQx;3+X=h$$H*1O!M~nK9GzMwSYP?@9Bx@ z{dsKzKBS=~fljE+M+*E#;DJM-;gM?Lbdh_F1aH(2Qp221eO$#I9v01)+O-R3{26GV z9%vwk7#n@EzYg&2py#;F!|t+oOtY}S>H%3-o%=9(O%pF}91+68wdat{diW&-M8J2K z2~s!b*nkw4107XzX3yHQwu$?)=t#~U#2|ZxcjB4}kYQ@RyR5+Ao!atYhY zp4raQ^qRs>>I1@(pX+eBC+6tV1oy_JNZS<$Zs4T++tLeMA~nHJV^j8E#I!eNW=QDC zd#a(nrzrB>QkR)k1T5spL$MEY^u>4u} zTzWBkF+5G?ifPvJ1PxjR|SO$iO&5I3@vYJJBvePAvu9=-pGjGaz zvM)I!&JY|Dm;QPMJtPkd$(a5vh3RJF1LfdI)-3@Q)Uj@WWzPL3$DCq8Ic;^7vvIlG zmRlPYUh3A9@PA7-FbGfNCStzC*Q=J{gY48fuCk4%N&n>%3yQ=fatsO&p^nsnTISB_ zA4VSar~C*#aGbxD9-6B=^RjZ96YLGe%gu@DfNBopudZ}LM&l^gqmi6y>YAI$>A`ll0QT6x%k zVD7_!DJDF?GW26Ho7MEw#xSwP7rn_GMOzx@8oG_P-F!RSZq-hu6=Tj|p|5rt>VB3b z)0;sJ#P0?aU7fyFsO((CJcs59`VO;O?j*zO5yNt98IZ>(@hRu#B|9P2g)P)@2*5ny z2mJ}N1d;;5FR-+*ggg&E>reQzQlWMheXt*7df*l8BM#G);{3MKy5ePN(q`35&pp}F zcDug*E#U4!mIu2u50`0+Hxb#Sr1x&n3p;E2(L@IoaZtO>Q??h$I8Js4=iEN>o!pik z2cjGrh@zPXppNsuyykFO=0K{H>tEOEGD`eSn@?ggRH3} zux$dBdLV}Jqh)(G=yC54EE-%A7-#F)y)wLJq^}?~I(X48lEHJ!n<`;qQ#N|=2ume< zbLQm!vgNGF@{TdQcg^C&S%Pm5WN=>Cv~P-%9F^8@n`Vb>2M&SP2ziWfAWDAV<;kF` z-)>gdtW7D5T5P!Dh}u7-6NO6a#-AG*@D+r=2})zXJbm-N8^XXCl4JAYb|13jdi7>W zjSo()6SdLAfg&N(`<6!aNSGYeK`Hnp>eB34bh-5!^gF_Fp`t|0UA17m%LGUnx1+*} zXCkXpOrz3}Kj#wv4rwR-f}COQn+>!sGuR9Q3mg=E79k+dueA_ zgokmprY!X8T?fBJ2M1q8XY73pGq`E}5E644WZ-$GlzIkBz4;03Dl4RsOcFO+^hc@y|2K%Q2A%?oYaL!8 zzq*07GHaIyZ583PYU zCQLQ^f(V0sX+L=_;Dy_5?&+1f^ZvvIrsG}O$H)F0Yboi>(XzU7vzP~0{UtDpM-ZLK z_H+NY0KvxFk8|mb+?KJ{P3XFD6P#1pr6@IWOdvR#HK0(Z>o<5m%dI)p@8Ipe!_*n8 z=~Jn74WUlee!sfdSw}yQ$M&^C)eb?8>z~2gJ3SeKZ6X_O(rX50&)sP|Y-_Ej}c_0Ik z+Fw0S%{3NOOCO&R@A~pb*<^AvpW@h;#KgPL|rCLbjzZF2{BJc3O!736$#$ z@UQz(fe46_&yY!W(;U?+mt}%Q+LK7(sxh0vZ=CS^ukMl4XEyy+z}Z2G(6;-23%xIw z(J3QuvZ)0e_IA{LmgUezS|9Y= zS8_(G)~eEo*ZtF6#LZnZZS2qoB3o&A)sMQtGM>>1`UU6xz)1K3$Pm%~T_8%k20VWE zk96<=uJ(bLuS!NROq#F;-{8JU#!n+|0YXxepcZHPO27F?U4P?raM66^-ZQ#4VOI(w z|Ii(Oosf*Yo&7TOML~V_iHpl&8>`P~PQ2G(7DF@LsZU;Tw{es_RnBkFS;J-ifzGS2 zN3WC%PnRMAhbJ@m2hvww9G`ZH{K_3S05RV}N&OE0V3LP3c- z&k<7DH*uq-)y?Ro|F8h%fei8QmcImnX}FdsyQ5_p1+|9!^Z}1Q&v@S=9$ZOQQ4|`O z+g3j6?h6kh{pap%wr$K1RNrJkbbEK`5FH%^i5zB9me`|}y3=^OTx%ZoS&>NXnrW0` z)~HjxH{z7uyl?7vQ&;+v4$TnXB`>I^KUGTHh|YALxSY_L}>F8=z= zVhQ%$?C}XxW<)7|90reI*v+k6W~)`M;8R3!<4qC%KVbkm8e|DT*}$k#Z!_n>pchX# zLj3s}vS7|sXI@n0M?`HqS^d*uM(eZ9^QRHIKN_yS81W>KZ34uQAfEH&erdYO?V)KW z#0Kp3tcnGNu^1xUg5}R)6ES`uqEyq>roc?up+NusJu;He#o3yNjTJc&;bJ?^L{nl6 z6H3!uFV@dS6-*{@5%;C;W1fq0F4gKbs@LNn0ynF1cBD{6Rk^Znayh~@@JM-s8*tlK ztKDq*LNXAs!CoRfUTxq%8W z0{qTWGWj(%Q$0Rve}V-k_wV*g*(8~jFQOUrOE@#Vzl$8}jXbOTao4AB zO`>MC2zEe52Ou_$narW%=c)ei+b!g%phZ*P0z3kO-BtKbz?!fivbAh@0R+v^L<)A-B*x}83GX+F-51VVw{Gz^P{7m^(jkW?h%qCqf&Fn2so zL~Xab<6nM((;!_SDaz_X)J52_1?AGG)5-dI=wsC&g-bGN$4u>&Inp88iBUDmQ71f8 ze+c#?KK7A;c|QeW$Q*3&Q!uK*a{50ODn!3X$G9BoE=NZ9o*^<;;BlI>v7(WM0|zwa zv__UaUKBzHj7g%9oHOfmp4>>Af}YD|Z$O9Hg#+B6g(D_#Iw=A!7Bp@KzTMlqY--b^ zm$zb_#kqGnV9EB1VNtwo-NP#FNVvt@FMHzaiX?`3Muo=k5I-Q=O}bx2Rwn-&7XVs_12@@0LSWu6Zf?g+iKQL;*w1a%Y@l;+Ok>kq zI1am3H~w_rPDTY*{q_()FC6V1VPeEyvM-hT)t>{r9^+^TjB$IlO7 z{|o^+lTcq%Um-0V zK`LCZ=YQPM{h}!3V1w3|@X8@nOPhXcs)Wg4fdQ3n5ajYlstJ7TS8wHEdcLHMmlqqaYE8*7 zbq>|0yF=#@r?xV(M+n$pW~t|2B(5~K;6hdx7`Xm-|K)fg#7pQ<$G(1Uu604*jq{`v z-?hSiG#w+@rDn7fm&ShR!5{Q(mmbipDsh7C`0310go-6(P}nDb>x8RyOkT=u{Uy)N zkDna{P+SAzIOKFE1G{Jw`M&#(rHi?C(zPO+4c$jvbVok;IJEm0F+Ap}tObRJHf2s^ zhjxdjx|hWB9c$7?#VZzN~vC+f2 z762uo_ZL1EkQEP3fd|o;s9F`e=b1|S%L$8I|N9_RH!6$(_15m)YC7CyU*Czq{R%7E zwkW@I&u`jjOPCEA!XYZixCk)NsMV_ABtSI<6rySK0YC`gjzc7osdT=vPF`osYxF7vSwjuhOu zg8kI%pSmYRk`q3MmDza{6%}>u1oLoqF%)tBElv2NkUx(tHNk#1%(*|qT;CNqqNGVJ zoTA9&->H+51g6voE>Ygz4m@k9a(Ow6Ty4RIWi<|L`DWDg+fE;S8P{@v~D<3 zJrHB|YV6;3jNI>bES%~$7l$k#UB~L$Nt1ZGcX9P+Sa*}o~ zS$?L^O*XPqF@ePMbW9k1)svjdewni(K&%{bv&Zyu#-N!#alEq5Wmt0}F&ugR^aoLs zJ|^_=wyxm-o+R@`l6emk1K`sXx4BjG7JBnY)kGj$VY8PQ_sDSQ8og5({E}BX(? zXUx2t`-piP6vPo(`FlaASMM=J;lKas3xZbGiuvA9C#LJ{`N}=LgviCH*G!e^LQd9t zO<2zZO*0srQOlg?C9o^8*ou_wry`gek{Z!O>5vYe4}sbsCK9y5%;}EiQmd*f8*NS# zNDV~qZn%UE+ejrX=Ws<0HGzjFhQl|KKfh6=xbnJdqCNU$IS>$QL>X=N2TF#6KT^?b>J`=hZUZv8?H;mS{ z;|TU~+px9ok9lfUsJ}`shanXI1_P)U1V-z30Ax8GV-yBn&{q#ngu0tDxK!8Rb+-3j zI2BYCyiMUb3T$NWela=|M#F{jMx5`5*f>cnv-xS)Qma!bFglNe7 zeQ)_*V@`%oOOEsNm(L`=-gZazu)fStH+jy%VHk@E@*4IgJodi@A2JTq@q!A9vXbyn z>oct~^6G4q?RMBjc>7b)A6S<*YcapO^jb33?TRwxwX=u3aEn}$sJ z3(#)6btfn7r*vRtcjNqCFTOfN6v&V zPx!uWvnSYTkq#TypwYnd@vN(mTTh*O-&$_kx$C||J{1WBps^r9{G)^pUqK#=isd#- zw&$PE)Oz}3x!i!^nN;L!x9`qpOHm~Y8*KtCf*NXs3qEhWh$`U(ft25?f(-teAqlo9 zmN0<2&#HQC_IhDieBoXv2*Cg!4VE-MC%;=0SVkz!*Vy9ab&BC_dtS(DbzaOoe`To` z&98gyP>uJom!= z!lx_fASf;m4UIv~x+oSxD5`ZVm|E$e>o-IOh~9+5>)uXgU)>Ef{nX6@g9rd;Z^&tK zD#@f_#~^c`vD2V))(zW<-iXFQy>78a7O0Yl{P0{ z#Jk}=oJjwXW?{!iLC5}t%zB;2F#MUCEhpdvtuwrMUNkU?SdID}sSV=$Y{myvy>_k} z1XL{NgF5Tsa%1dJT<6?`>1*jJ%todELYWDWf4yP#6-SdBCr>Vx)4J9|7L@#H>;S5K z%vHf75qSm`LaIk!lSIS}wO1#hw+3Waiy1E7nu?3>&K;%+p};(KrMcPT_A#H;=FKSo zfiadxMsVBSL-^%b(>N5)0aP?x!6q5S!2#%`Er%Lo+Z_A!eiQH{R0e(~u8!Covo*Im zz?=Jeorjv=mYug+pDV<+lxQ|;O2C^!&-4#q{tI}GfiLfND8DF{wAxdo=SyPlq`^ky zc{$%E2Q9jhbK`r$m%6!l=-)_jFF=Yne-z3w%U|C_V|n zP|3^B5~&RT!9sCh_!mEWXASsqrD68~j}eJxgF-=V2FJng8P9$BOOz%`6GLJ#j?Ug0 zk6U%CBjqER{(>Dww@?Ncaw$g;OqqJ=+Q})ab_D6@g!!(44}Mh!?!p27gK9Tk`D#?N z$82*C8{2bVcynjcUDLCcrn>LA`vgt)!v97WM20-d{gGrx@8Y6FSDp!hNe6DQy#Y}= zx^U@-@H`182bZC5g!%*bqg9F=Cslb?rC1Eh_Fy_J>Oa#$$fLXZ3w;8HVqXDKQ4eH! z9OX#1b2a?HttHI8k+3}37rJKkDd#?g^O+yboyW3|?oYem@e!1{@?p=#JInYztwU9+ zhqs<{?c6Fy{TJ&RAd|am2F+$BCU1RDDSw-jC9%$nMuMI}Wcwy)0%#xZ^mAX>Jf!bE z#qbQJ*j=DJHfb9F;Pq?Bzel7e$X`xTx{I<%85zpWBT&k58^?-p{;@8Ag1HU;A@Pp6 zH2)tDReSMChi{1Yk`zxjXN;WJ;{&bcLp^e_y>T6=w*B(eW|dRE0C~j5eT>CR-AzcdLSfv-u~pu{z*+e)(doe5dd&bbqi8pHtW>Y_RGJBj0ULY z$C@CbGL^aqB+9H>F}%*U*Zn=s&dVuh{{o7CUW6Qgq_xV1nMGcMW>i)o1g7m66En{6 zT*voxbgLNqj+=}TpS2PBTo{Mayw85qK66`PC?NgoTO`Qn;u40v8AgS8*^zi&sKjI= zHQe*E06)QAv!<1f4X4|w93>U)#KX23i#HfhW_7<^eVqMU`DA0D=-F$C2 zUVo^>mrtI-K%OMhPK+wiQDtBCdIK@O6jy7jyB&S^2&6@y1u+5MH(M(zZfu1AgoHF! z;alaOQ)$F`H|vr=|8KC+IjThLCNUu}CkVB(wpM_wYY1JgD6jYqHQ z?7>kk1JKNPLXxepZu^K8j2Ur7V;^{)#2cU2$^cA>`HR(WUn}uTn@d4pj}xjYpKw|l zx<$sga1yMb$RsC#T&hon5Rgf~_F)nDu9d*fRRNxP_FtHwhq&mPaA*&c6DmKq=z>6G zijpfrVT#BcE2X$vI=DzTIWw>bex>qlfut}19SZJ>3>v_tygrpr>?iO;k|0mFW_Z~q zaLvL$CN-G*85PM=t-98-ZPfu@q7Wkf^#C~d2!OziL9XlE$sjAujfs2*+J2rQgS0^e z6lqJa$>LU|O|C!?jiotSH>xZIsZn8SAD<4cy?e%Cc>nnNd9MkWJ9iqE-fvf5#)}CQ zs2jqkKeFw$RNoa7eWyju3i%)18`>QJnjsTzz2#ilUfn>DxwBTrJTNr9qRa#pRO_8ALczcU|@>qtSl>kM~W=%PH zC?%u$eXS!LW%{N>0w@LkJvL;4l?ozP6DtCQ_r?w~O3UZ}p@|$^oz5VMMt%+A8*>Iz z&eJb=Py0U+tC%k;((=BW-~oQtebL~z-6~&wKGjgL8o(d$yGy-yX*xZFqc_wQ60p!s z_G9^%7l*F30?)CHlc@e%+&HZ(mftevME^JnJs6>dmXATCFy1Sfvw$l>OGSD4*fedw z5t=&np3~r}9L4|z=0FN2qsB8d)yct=?@n^JQo|Nh#c@rw$#CGXR}H87s^vMEn8nK!m z)^IVur1}0*hx+6=v^eD5W=r*xH?)SNp=i!~A^%!Fi~?lO-yHM43mDY> zAQ_pP``45tYKnIQyxMzsnJu%gN@9odT_Z0wC6x+^?t9|*LNSnU0K%)Cd5;O?-u3&j zIBL=uq3xl<1|lv$9>%EgqZ#$;Mf`6U0Jfs@iWA6_bPP~BtmO7z+>s>%tXf;MuQmhb}axe&}szsxfs*mrTu#;Pd5KSYb70&v3QBxYKiu6>dDojWCDbP<|b>C7Aa%tGKSSY|mubF2w+|NIrC3SWDVOZa3 zShWz?10qQI?~v$;P_Y}yGA9P|weZw;EqaGdm6e4Y9t5R4|KM%2KgvHHQ?S# zEa0UlY36+x{^qbl%jc9w7qi1$_`vQ?L^|#-efE$qyl$FhiT|C*RWM4}&r|f8qqJEK z_3Ifr36*u=@+0TO=uss%$zPUu0+e8bunX960)ctSeThT;t6oV@iV9IUfQI|5@NwPl zOOD23KGkD5ijk8NCxeJ6tGz0z|A!G_m7oRPw6C(yD-X_;WDjKXH8v}Gkw)qhkBlVk zL($3@s0i*pR!4NJ-7@A9V4>VAdP?xH;{HFa-s=bexp!`42 zwZunsT0FFU;*7saF1laMW)xQV3n9RiNLUM|G^x}R2Hn**W-hXgD!b?eH>8H67?CoL z(s+maL9E{=r7|;*$RM}$0+uBoak?dyLt&rj%=6Y>#O;olv^;sUUVi)&tQl>CU=E8o zI-AAo3kT*x06hvwjM_7lVsr+FRlOWKw5=R@-;Jgk-B$X-{bK;}>si9hWC9Xvceg%{ z`zhK>LsuRKEMh?*vn})LNk-cJ>a%}QGeD}KDDn?y&cTM(#{dUMm&mqfbNW2TrAD%; zH5^+eFSpy+JS=6!GstOi*>EiH&_PXC>3$K0=^WUCtA|-P#xNkV#+zG8-zqhlNcBs~ z88*xu`YNDIlDI(+Ea&@f zzS}Mf@1*MhMn|J#qAvD}INB?ypNpQfw8~jhGBmhC} zq@>q8Ur0;tnu^FmuzFrgdk;})llpY0J3#;sP?!_wp+MgAPI_fp>W}K^-K!jSIYB^+ zQf3`}2d>jZRm_enhNH{nET@&4>ER~OsOMGsPdZ=`Iw~I`KJECM2r}ZBuR=73%3-z` z`lLC3j=AaKu4!#e?1&2O=6;V@O40|5zc~#DU~LHdApIR|zq4gQq)*D0k0<%MKwe3K z#6j+;qxFOQh&>uDv$_uwbkv8!hGe^Pn=JwHnL9crRh6IGG{}%%1_L!q!;*VsxRCt$ z=H6$WysJYc1FCzKEF=Z$VL8Ogg62(_#t%ZE!;pD^6y?m|K-+P})qS?#>q*rInCeI_ zc*0vjUTrB;dyrU!v6>#7&_<*BGQS~N>FvZJ1`;1a;Be?b47_j>Oa-2_cC+aTd%4Vs z!|nCkP{otU7oY1J6Y@U*gW<+DJ=Ic}U*G8$SedJy2sGsbf#2w6-|(Zp&f-4{c`S}BFP8;?0by(IPl?KI=;swx> zZcWAjwRD`G6mc0}`ThIhf+c`a=!?pTE^^TnXl1u*>u*3Hv8WGAaSxT&qP zvyo=$+h5R9pj0?uer^$NS-3<;3e?G6<=A3hg4Dm=; z5iumqX#8VLg8tnyKpvl1MeERGdnp3PTJ1O4ur3w~1061|+oEnDiJWwSxAYItN~i*_ zsG~w;h2js5=U%11sZ)u8GFbZr-WNa(RQ3qW@YntjYP1|Ur&pqO+{Op9R~Ef1wLD0U zQWs<|NV)`waT_(2?9HldB4D6HNoeS%)=tp?UB$zb8(`&fgsmdYkGn$_?}&rva&*z- zU-P7&l&!vs73YNkVA`L#ADL7ypoqwz@%UpYbuubMnE_Lq|Ne5Sta`6(7r8++ig zvUyqf#)C46f6QXO@0Ynl1J=ujA2A65m+N)K%KUZ(B_NB{3;J=d^8qm7D9QQcfTLG$pi86sOCr}cY!4}r4+7?8i4kji zVvj=2U`69tAf%%~U}nZT@V)@q1kAm0wWpnvk^4MYeCLsj+>im^A3Jk* zZb#Q=;wTvJyS4aZcqaZO|?AWxOk8^Y7K$LW8{R3zG)f1Wuy#MR%SVZnp5}ulQ_t1iwF_Q|}2aAsBx7 z5jBl8t=IH-2`@{r%+oMws#jS5PoQGRNwwgmC`?a|ddA{kbIQ8ovB16=H<|>Z;lg_d z2GIai_;n1eav3h(e_J_+aX|n$5h!n)ig!PVIow|H1j-EYj@7cQV}E?GI1J+CQQ4jf zLuvRH6ZV9ET*1GW+CkUFf`0mZI3R260G~{;yeuO+1N>qyh4s%$0UwmV!gr*{5s(h zr3kgE>6KEe+tn6#Hgl@%&?2sQK)#VmNwoH&_hl#g_F!6fp7U#rwMx7dEFPEHz?z63 zpgHRoJz6_vD^r@6|HNo8pa+H$7)Cu^%H2scx6o%wMq6o&{#d?F#H$LS$YXdhr{U@kxAShr(G;irpl&w<-V6R7>DQC_O3(j&%dA_(YmKr77sU zGfrK-%JUolAS@p02kM=qZx(mo_(aa4KUF-c6^D3siv=q`!F|T0Vs2Pg(sIxVs_`mA z7Zv%zc+F55U0|z5vZ3M^xunCwSO!$mg61}L+>+){sD<9<&8%9VnN?zOS6Quyb(q z%;xJzjT*QBA68nBN+N#;mlh_})-!q7x3n8}ej?)|Oc^xf0^(ANAJzG;mcax#OEXGW zkjdXI()*cQPYkvzh_B9nobHPW`;#IsB7)N8-O?0OJTKjmLP@ipoEP#R_r{?9>{! z0sVf!p1USQL-rkKC-~))2uA!+ zSvC;{m;y(*1JM2r?42jJd0m*HQj}) zv(V&`omp_)pCD?l8dFlPzH(PW04g1vQ4Lx2l+r_c8L_WNPny?OuGjPHsv7`rO;ZNe z^J!aqQ$a;y)C^>(vF2w@WJptVT(%17Pl%!}URPa>f4KJ?7?wj{aexeAPh!%oU;;K@ zAvmDJkS9PsmMY781jw?eIotE-`&DJOxup zMvMibG)6yOeZ*;^-L11y!29yF15fg9D8CQq&#OZF#SRxc3$Y#UyzqE#W$fUMS66Fk zZub6vgkEfwe(bB;I>an|LC*sFN7u2;_m8e)(1dW`lzhQURhS-x@czluFvU2y49`F4 zi~xnw@BEn5?2UlQi(YmSzD-^Io#{spl#W3m?8kcQyI6lqTnGXdQ6DdK`f3~VXxr|QV(E2jZ*8h9bbfL`xQgBVV5dCfBTno?rQA#vhJjD{ubD`L6Z6my3g#kur*~6twblf!kt7_T1mJ z!0`qpFa4pK%?Yd8tXFNK+Sf^4P|tYf*Mt-5M_=r<Dyj@)G#(SeLG=wlW4?2#|#(P;J?|jg_2=kGG@O_~q%R z4S27i?I*oQ<`4K2VD+BJM=4BE9ZF)m@nHN0i(hE@$$FV#KkDf+^tKIZfY!phDPR}m z;`0;d{!dG5zjk!t{2fM76GMDyOUYVTiG;%jdGjV?AwpmCke1gn6E0Tb@M~kn2Ix(ER()+A_p}rKW_WZH- zxjZlYd}C>o@X6>Wg#s($D7NGR!CZmfU(2N&|J9WC@&!Q|MBklQzt%_BJJnF6aphuo zvd6>W!nz|1`9Ahyerj_yfWeiK&iE&pGa3sXM1nn7S4~xzKZN9k_h#bJlD=pK+>!_I zNpreohHXsp3bUW50BcQNN*rtgBb(AsHDl;#2!%92{hWXo!l5$@Vv3_`A&hzw){Lbv zdB3cEjcDD1q(hDv8Ae;$)(^pte{_Nf2xuQ2U2sb`YI84+U-D93kGZ)!M73EW!7M8c zA{5wZUj(*_4w~WPfvO)O%LJ%oP#krbP0!qDDQofUGkbz=~hunU# z;=I??GoN-~Nng+lOCAkcZ|s|*s2>#z3>isElkVqFyn2b`K>h+kd$;Shozcw!R@XcW zWsNdB@~DY~!4fK9bsD;AOH6ub_$VTT(KW1lQji58JuSY=B;;lRXf)04y%nb0pTN(iVAMoQ->1Syb|C*7dsX5~;^j*k1Z>i2<|t2{LILpf&Qv}Z!*bYHo6V9nT)}7U(I5W48o{%0sT8>-ci3I#&=W*4Wt76c!^}e4du_O zI+QtQV=UQdz}vOhlgx-)g)=wECG7D*Qm3nj{bF!Gbze*(jOX*jr3=56BZBoA-TCdy%N4%w>9*xVu?uw9EXMo{@?8oYpdPnVM zF+ZZ?_F>n;(=eIty>Mta@6=D=+WCI!xW7&4P^D0#=8CS&Oi3{zB!L4Y%6!v7cr1Nc zpv#T*f^5rAFAp06P?m%0^szjnPTPShrzRswBQWYh>A?3LO9k-hq^U&J~`|4kG9Zh!eh@&UQr=Bi)h%d@9FpI!%L=#>=Skwt9B>rWQkb@|c+WkmYwgM% zvc%^$1j&%yg9P??wu;!Ub(HZAjiXCRY=r_3ElFz1z7=Cg+?V=B-&M*lJ$ndej`n`@ z?XyK`ex7;A*Sd^-g{5H!*-*F}*hQNZ-a`Gbxx5_gUem!V_7p#S%EI1J9FOO2AN0Kp za}*WMcpQt8m15b-=7|1$-0mi9G;}vpuA1|o8G-Mrb;W}H8P{8E}Lxqo# zuzFn2z%z6*rCJc7ua5^~UCMGy+G(bP#Jwkr)u-XYGuKhmtc^QzKNZ2a{((<1m1o}Y z4Pzf*P71nsIwgwdqig1-%VRC(NEYQOkiCJ+}0!f6)tVni4z>!)&Lc$iT&u^24V&b&O6<>$Zt>6&(+EPBMk=t>< z!bYRV7|JS7a2r@p+ap9*(I|o- zMaf8N!F(>VKAFg3$m*B*8(+Q@y&4419FIk@P*N?FKhG#bo=#QmwsrPssH+`!aq$?s z&Fdl*8;2ED=#|EaV`3rWTOWba&}(O#(B-u+?rWnG+uzj5n>e=UI_{}pS0QdAsg>=q z<7dx*TxsF!)p`)VAKQyJ+FdZ6Ko3>Kr+%K<(UYooGRzTYQ*>?=8r(kl99%;`*-R3S z{AUHPpRr?#Q9A(nd8xkWDR1Pn z_KEVMqdY__`IgMw{-^mSCsHS_(UM5X7jo9QsELfGvB-5KuxPjge4mN300ZPul<=$EeVWJ9RxMt11h&u5+ZMfV z2cbLgu5WQM{iP0(rk25!2T&c1bFaHhzg;Q6T@J6Z$`HVEK3pc?*$<+?Kf(P{QeqC2 zt==5j{ry+Mfpt7zSdSEG-X6bnX?)&)g}u|J*5eF}+9Ckds62)%75}E0zVu`vVOF*ehM)tuZs)KD|F_ zOe%)@9T@7(Z?Xi8i|Z5G9Gmk1=R{*55jGs#^(3hJO?pbo&I!REhoneNM$(s*3l=-O zkk#ZDed80{!AN;>cgFEV`%Nbi(BQ2{%5B+_meN~mbrdamRR(k3WLkaIE);CSCF?Dm z#|6PZq(+D2UrB&bma;u+!PkU0iB?&y5&~vZ1xsXeao=|`SI1BrYlM$lMq88a;6DpT z3l$3Fbu8wi?!@cbAjM=4Mi=HCGGreHSYI)I<^hS++~`ythIR#9sGI#PXuy_3xyF*^ zJh*w!n^6fJa-Xz8dN1 z&MuMdvUJ{v8{}8r{bOU$6UOK)E-(YUOkJRS2cFIdlr3N`PI{wDF}SwsGA+c*loji~>5g(9KBk`E`JWr4-onFp&9! zM$ZawJ}$Ci?EL~m&rv(De%DO@l0xveg-c|Y_WA5^pcJy~egjMaTXKHRhbsNSMm#{A z3^h}t3X@pb@KBw5v29sw6*p>ZcT&&fxBW59=YQJJMM$w$)-mAz7UL7Z+L=CdOln$p zG-aoK3p}E5OiJ$NeTr$s_G!D@gVxn*ga)nE>)`qHu-ldnx&6bMZ-P5rK}>yu4!1-(rW>sMuthnukdn)6v*B!0OT9IfCVI8B9s| zVA=Z7n#%3!I>Im`Hy7|xc68v;*6K+mc`{86@HI}*p*D1Fu}pNg?)J)`{xHKC`N8b2{MMU)&! zgbr%Z=NgO#Bhq1kvochJK++rsb*KOfgfL}`v6}CT@cDI~ufs{B>LKI9eFFZbiP(ey zQedc@O3TKa_WRKY^<;brnpdI7Nk$0^#vmwDfEQ!Io7uuLokOgObvFSHottK ze=6~AbiIga%^EZd2NI~7y3!b*#Y}F-&2>>TEZ|9*Q@lUcY#Wv7wlz%9$I*COOv;=U z-+db4zxsA0+MLS}!ulu5pG`~VHdL1%>(EroMjuVBhmh#TFb}HNgM*yvi33m768=vU z#K{WiH6y{Kdpx}G)T|(Xs{YM-QGcPR^iJ@S*P_HOZ%~YO6*+R=)7-@8&va+^3vv|E z1e1Js`UKuAH+SD)jvMt8LqnJ&G^I_!qypKpxs$8Y|+WWPI>~vKeN&Xvv zckeqS^|A5T9ctu{WO(J6LNt?Vay+dv+4+L*dRKms7`G zf{|xEoeI;AVlc#IS z`lgX3PJ(wu(<-{>5s(;JHp3u!!yMo}gfZu|UEmyex+91}Pi58}QjF0X3K9hcSG=RDJ|i|B{IvA1 zkL#-{+qzHrA>>cpN;O@x9P|74W#@nTNcnAkWn|!ID%bienCRYI(?OY2I8LrF{R-M{V*(i?WTeHE6W^h*-zVarBsazG_6C_awGd*nJ!igQs>LlF z9yZs?I%NG|Zb}g=Y(BG*-3ukQDtX!$gYs2qah)p#u=ZH$B2J_X)E7d}>&5QTqZo}* z`&+J5j?K$dmbB+SneENSTnHBC6La25=vOX#&xYTJ6*#-Y&R^MBiqUgpN zpb?h8mSE`2)FDm8`d3iW4e8EU$tXiScCEFXDSGG_ApmRV<|&8CL> z7TyQglXALrV0A~xCr)rQZar9fDRkj=Z77Mvk5kkg9G<>_H&gdV^ia}|MfzEe*o-;x zy}g$PDr7n(Sn~oJ8Cd8@HD}Io>^T*S4Ju3`;P^?~s(?xi<64&Bt_IrJi)GZHo9}N{ zmW;*cEOg`Uhy;>(1;+X;OZ(fH<143h;YZ>mWi{_i#fTGG%Jeo1WRt*vB?fry^N|=L zI=)oneULlsk8wd#D9juL%8R|*e-^2+_3-oWOBi*{jH$tYSE&S4VhrcyXLLWqxjXn( zj$i45l!Sd#ZXA6ks?91oh!=&BqUly)#i`2=etnpA^!C@RriaQKc`(3|?}?O5^!Gwg zIuJLVmDqv&08U7^LI6-NBr;?;e&(``mLGz=RWWf-RtP{nDB;KmJhoY91!pBQGoD}t zAL-=OCp}409)a*6Cs154YZ(x|%-^z+LmiQSc7MCbMBDZ;5K_HyPQ`G~oD`+Ko}_r~(9#m2yq;c-lg@k3_@tWFQ<_=YhwiL?U=la0INqoS7)!ZkU9? zvWJKB<<@VsMGD)p>zVl{6x)vRhYQ^Bl{vnOUjY+z-Q>Z8)_+1$XTsYeCmdKp^5dy$o`b0<5NtkdWvZA^CVDQuU|~ z3?+Ca-RpTJ&1IJ`k2lowxZJ#4Zt*Gp(r9sJqpfY}skYv*>94oH77pK#u#>HVnxo@3 zM6At}OuHU0l)5F6o)Dwp%Lw$n71p!ikZM-*GB`?58dG5ZrdpuzbPt@K5XVd0n&djS(zZ#s$>IwS|${=mh=CF58 z{k#Ah(xkpv5A{K&NGU6FLPGIaP~jScctCD$+4umw6r60Y69_|s|^udZ>0`9rJk>0WuYiP!s>$`E}n%|b;5 znDFRzWV;r|>Lq1a=+*C}fe}lj%pip0v!|Jt!iU}48ZdR++}Clf#8q-EDPrj*VB?J_ z`5^q5Au_;QVZ5d1kL~*{??#t-{LKhj)a&O;PjpV#<9$k5n7Y5u;6DF$DpDt7&e9# zt_Qz+wb8^vs@(g^kxrho*LHSe>xXB2|0l&)+ixiO+~PvoJswF7!yN9st!82wqf%0k zw{1vGS)wP`nfIJtOS?-PuP*oI=il4G=1F~D?R;j#F}iv4C#p&jWPlXkZ%`?pd^X8t zcrL~qy|95zJsR^A2=uL4*pucCaS;bAT#F+ice`A-gFkG3;J#(efMpjJgi1@H!Be;%eyHi{^IZ6hVQiA0{6igv*F@fp^vU4#ZG7AU7Yd>(LfRaxC<^ z$lm)tDPZ>#nGvQ>g=wP3P?!P?DIyh3TCm>WU0W60^xRx_y{a7{x)AaW#76S8!7m})_XmRLtt73#$hirHZHOuHo&&Sa#bsBq#P_1ul`q*N`uJ49ckz$ z{H%W0{VTnIgNYs*IZt`}%=dMLjhM$VrI>e0#At74=va)AC)#8ydNOm^ZSjP2)3=WMEPqH+mVV_q)n|Etb8a1iJPgK@)T z>|xZ`s@3nbk^r9^9xP!3i$DQ0-&We^e_yztEG?KatKfA1&x-G_02ygUd}Q@VT+K2L z80e&?y>toROOchZIAXNV6O9ND+}q-TjR!(TY0ZO+1dfi^*iFapco5rT0JjN!pPE zLCqgF=6n?d`Qt2z)>p@spD3-hr&^;=cI9Yq(3|J1+$wZlg%sOJ6>a*fac3?tp`IoN zXug(Sxn&J$|Itiae=|aL(UKf*(m%2ud-Mb7)Z#;*r>&{ zObh`!#wm>_Ppz8I@{5e7zLNaW`A+XQO*37~bGO>q_f+#*S)M8CZpm+*7O?0wN&kT8fzf<6uLvvWC3fcZF_@@`{DN%H& z(Szij16m2QY3MPni|cJ{T0h{A)>CUn6*?qbb7HUVchsu~=4;IK z>?xZ)LSD5tcXt9faGTT#P7g zE5iaqdkq=RreEe)xy`>8q>qb-XXV=wz^7^wlo2&1^#N zAW|+OU(pZiq?l31!rvd0zs!h`r|xiJgkPzrKJR+%5jH%wnHIOR*63K{I7a5tn3K8X zZ{2SQ)hxFXWPmON4r?=KelhgBP)TZCemcd5_W{Es0 zgz{Gw$uvunWx>n21n(MF4$_!0ru%0&_r<5X$J>td4V6972lhpl6|~rstbQa-hW>Qh ziA9>v2B><}w3l0fS?%Pts6G@B;jeB)QYqce5t0*SYU1xN%hX*z=j3y-a!cdAL14PZ zw>KLV;Cg=Fqyhh7InY2qwsfL$Rmzf~g+M>j(%_i6Uwr9xxVLU#{A7%KqMl9CB=t0< zJenb_fh8cp*>ZX4VvaR$6!VlOocL@gf0=}@Yyn%I(=v1xl^q8t42Uz6q_oq6=IrlV z(v#R82UgF@-7U^|xy9jmj{-bmhy!@(Zz4HfyAa76NNZ=<>4UUhexgI3QmCV0!8%zN z4#J=ZT@>Rzt?xfYFK5|Kc$_B<^l>Qctr<4A^@JU#=jE{&dCt=%V$3!r9?4VI|t5}Hd_A!;7)l8nO;(2QTrBPb(SF&V+DZan&c zWqqrzU+j0^PKmG!I>b@)0#-j+WATAb!f1ZaoVb?ebJU&+1*|f6Z6CC-jG;UK(sXMH zq!n(k`wS-Xj8!%NNO*QYp}*zIUv=8Ckg8=szMy4jX;VX1QoC~q#|9+EmJRTl*B88= zKD}wFZ12bw5-K8p-pbH8ruGfaoIH^_nKhIfgY%KIfOoi@((x*MGd3_WrfHjzIzf$k$JP* zAomfB_$h{5^5(Pr%TfQ!_0pW4+cT3D?CJaMxVMCQMcgAR{UNnW&K8F}3+f3UV9rsk zq0&XtYDc7k=f_bRYINJo6(6`8`=sg`f7vLL`LY=%yzfNaEY)#(3^LCh>uA3(!u=&5 zVP)*0hE#1K3sv*K>RRYCZ}bJvOVVb?v6_Z1uDbP)PYS<8t#)_vwPND~+%{2O-F(3R zL3+`%D;4eWW`$^k*x?dudUeo?!#B0*5P1FNF)N-`xG%Fma=B3BUll73f?<&*v%5EV z11Ed*ho=nL^0RYNyoJr62XZqJiOUr`i)E`zgoy>zt6(Z=)mY)B&DUC1mC(nJTVr*l zPgS(9+_!s*a-P7&1Wp*g^!cALBd3y_xHtmLH*JpF8`DoI7GLbfsu2R5uV zd-mulJ;#XsliLLz=_b3nhS+l+;(aMg2qTUl(&MD8+LR?IL6L7gsB?vm_0Qsx*fJ>S zD!6^7HK>@7B^>`ZM^r)|c?K5Uqym-Fes4>?k!p0M9;_ev1A$FV46;F{(sD&G3QxW4 zK(%uv0dzrf&2N5io!->x?u?&}8jc_@(Vt%wUuI6WX1-^Nhv|yjpWzlf?aW{j zAx}H&={352Lzk6fFj8t!v2oqnfZn=sbUM}z*h%>g%k9*zM~KYxN*S#iO`;$tt}u_X z`e0xD&Rjknm69g|3<>PfVOj+_BGMs--v!J%ehw6 zEy;Jxt?akxU$pE4d(&urj6Wo1lQ1)y($%)J;vrkkpc`=%gP>&~t;EtqTJ3*;W59x5 zuM%zOsq6wNS*pxk3$?O^T)G9jObwILlt7=4afT+w|G6L2bEVT0gU%4HvavPjhBF?l zAgenPJ;i%|7!p>1@#y~mmufn_%ZGP3!^_3dEup;9-u0B+hE;~*J1J$iNoDc-FK&bZ zy;~m8(v->|1Cc?eB>9WP|A8sRO!nay+^676pY>4CQa9$04w|K=3x z<#sU)sDl-n>BXn-=o(v;E?{qfYi%z`neUG5Tz!F9l1l+Q&0}OI`-%9O>c$I()%dnR zsw98J@uA%!kC7#{#7pK>uoZFM+A0$x$gXc|SaVSkF>^Z&0jeb(rTL9ki~%Mz#w2Pz zdQa{OHAosWmWN{SnPEkZO>&cPXmZ63+eXwrXj3dCaWA>g+G5 z%4RR?&Y2-vuBc6ocD`WgjHkeuN|g)+et!Di`nP z-RbDd{M^fB_v3@Vws(g$Joj__udUO_cOq`ecaL{!9iPgz1X^+WXW!*kek#;x=%|y~ z4ZYUi?Aoiq=!;*(o6DBq_uY;h6xVn>nn2OLmu|dtC_OnX+CC4y*+>k?aI5V+aB2*O zyyX`(G`MB}HtORM);l!t)ae(mppd*lQTh62q(~n^AiE(w~d_fD_v8bS2x$)^Q)tA&1ttC*w67O zuf)&?o5ADda@_v>Q4%URCPVX^hyZn3ntK;)pH$2ceBE5Ml1P+aTlbQ*X9knwrG#J8 z<;^ZEtr!yia=Smuuq!8R41GlRSM5YYDeFTzFR_S!Cy9nS`Un4P0bRoy94Ds~uI)1> z#5l(aKJKhcGk8ue{-C7scK{RB==y<%JP2$T>`5jiA1G)~(^HEP`1AV{UrD`7OW8cV z6qd_fzk7kncH$#b0pKNxpxJ+8$o%X?q;E5u?OWLk^V>IMon_GAO^3h*)q`|(8b^5= z50Dwo*@c{?g5BhkeGkU!ZKA$D>qb9Ix#5Bd`X#RKL zW!HjMiu^x&A_25X2r;z5{KQ^3`vnJyG+Nn?ha)|q#paC8X!ZK7iYNYUerxNU=xi{@ zG^efdi2QS)Ur|4i08v(hNsvjY3JR4S`8*2b#;dR%y)i}D3X;xR zgre6pOgbzi?Y8>20aD(8_;I&T*~5o#1)@ltwKJ5u`N2Qvb03fz##cdx4Kw7#0OcAS zDwrhthij;KTHn&BSBT9UcX4{h$ost$sqL4E8Uxb=2OI2+M`KU-00hA|Q4Qy(C!N|4 zMap8`q_hTg6KFQ5>^9wiBjEiQIq<7jywAupmuqM|tJ8m6UN4(hh+L_5vW zM7xG-2zkXJY=C^0dBUfe>N^aowfnqFedScSs%uABu9*SIO`PPTEx7m53-N%#c5W}1 zMbV=2y+}+U5=U&&@_^R7EA=GhLp#VnX+dWm_FQeBzjSoTq85DZ{}12O7AC-VOwK=o z8c6j}5SW6a)j*!K6OCtc`ODp=o}x51Uc?OWO9XU7cXK_`!q^b6cdS~9fEP#{Qv0#N zexVDa(iGq9!drLLQWTWN{F8UTh5y?=F=ij4*C#@3QmUehf0v!7;{oM3pC@aRx~zI>qtKGw40^Rq$PzGny-0h0^UWKS3wQzo zr;vkjiCm*K8_hFR0mf&IEgayZovYP+z(TTbI7mn75VhZhNfLsVd9@?aS{yie(7h-K zIk5zSI`nU1U{4ic1b}+9W44`aoat-jwQI9fT@5=T5u&@CGG1lTZgp^+1IGCh;jOI? zKcJXa;4{KKq3tZ(s4V8CacDnqWDk*|5|oA79Fk0Wl(txvzRqo&g4M z$j^_D<`YbLaUP1U$+~ek9?rM)=HdoOdEv{MO#KtQGMs{osatECDnHmP%hXH_-b~-x zoouQgr{kmeqtyC$sAUGo<3e1GarhyYE21AdLLxGpBq3(^9E&rkX_Np(n?RAiCn ztJ8$uvf-B*^fPvz_8=R^t$-&lpEZaiS67Z?R?C>DRPC1CPl)Hu5dH7#dY8*~Qx?lV zDDP%%yEhqs0}C2h&`mF%i>Xh(yh)ZU8&On{Lf`+Av>;Vc!i6nv>3rG{v^ig~m?Xqi zk9WEtYd=@PR5bkEUJJD9mDTPz@-q;!e(KVG+UEPf`{=~f4uk{KY&Bc-uEs|=^YDp( z9^N3C8d<~qq8k3d-!KW*3dgwkd35jI^Y7F2cBc7qtI*>iFVInSglwuif8>xpFeO#xPbonG4H&0=~ zYT{*L^nR6<&4CVd2jph3c4dCZbf7Pcy!hqgyO&w@{2=6fD=o3}mKV$dwSD~olu2;Z z3xvSidwTr*O=p@%_|QUDa}Zzgy%La;cmk>`G1oMThIWt7WdE!QTmDH{vtq&_(+84% zIQ0K#@XO~kMdVQ7YM`Sr8=h)wJMhx4hJ&{4Zidp)&QapVrPgFr_*j)wJ-Lon5RSQh zKsy!Jhs}A*e|NP$vPXr@Qoo6YBOZWp$7Lc$@Z^r}FO>WkL`2*F#RK18+O!%*J} zmCj~#uW!C}@Z+#>QuqEA4}ntS{qdVyyap8yLfO*_0>anbb~qMZsKMK5o-wY z@^ZNIT0%-Nup>=u+|9PVB_SMoWL2@v=UrIRwuLKQ;#eJs*jB|G+EnwxoXynwTUjPC z4y@{Ez%FnFIH1B2pjA~QwKn(@m+5JFLMZN8BlsK9`k}r#*lpDx$0Dg>f%iSOcI$K2 zF~I*{E&!J{hTJUeKj@&LHC^E4>n27?kdJ)#STqC{harAVwivwE)$n5eZ1Cgjf3&v@ ze7^_tSuOKC3ZBR4H|1h@ffsgfH0T0(DWByZQsArVs?aXf$=_AO5SGzkZJMH3y7eeM z?_?3u0JWIyRQW^RE@+T}&w}5_?^u;?2;}urKZER~})a_mgmPsnh`p*`^5xjxKIk@t21 zJ-~dFCP;EA#g~|sHJ1?~LOlCS``8&V#lbxh$XW$kX!9ETyMV5)We_G)!)xWYApZML zm7T;qWA*;xNu9|F`qb1VM!ZlkcVjT=C(Zp~hCkqIbx4}HwxJN&gk3kb0XW*DpOae- zS*E|nB^LY}Vl_-(O25I_<0)n$L8F20fp-5mc=2iYVmYuvuA+d;+iBtrAN{93++X^v zPhMI{rvrLqn0%(t^yB|Sr)*8nFOI(C##zu?FmKA>ZqCR3*rWY)Tn$RGVhfd5qisX+ z|E5$<4)3;=ybcr)S0j`?f=Y^>P@=t+E}L+&yZsTiOBZ?CUJ)ZJ|M|WfsW&wwjhhJB z{iw(*11#PHGd#itWTb4wLbB!_l@5kwP5I%iKNN&KwVdjSC(KFd@wZK2;1nLxI17us?1%tfVxMb6CVLK@cO|&D4_buXAEP3urEO zD<$?6es1%c(4zT}KWX-BLIf5LC;7P!m+=31{)A}u<+sKRvh5}#D63Q_PBHo=uw|4e zj_J>jMo9Rv8Pxpo{C!wUTw=$K3yV0Yp+|2TE$mtzcUzz!+g(=uN{E<-{*lN$=I7xsr5a$AiS+aJB2+gSOf7GwdzfrkSXv33#jJ!- z=cM&<4rWTyX*d720m#G5oEsby-}^2%c1WrGb7f_vYd*7{KQ1%P6;m8fuYg3g<2hWTY+f%bRS>jU{$0y(RdJSN~)N8$!ASVvlisD%1Uxdv^I_59fA zBG})OaG9kKY1^-BtNBKxBjDx+I2tGbani-dlq-|R;`J}4PVtx3KDY4iuRc)c@u2 z`p;WLEehPplG2#P0l&TLAtQEVTgH^HD^|WPTptxGppmu93zBBuKVF% zrHt`k@^Qdvbbe_6?>_L7P0bg>3m;QXqC7OvfCEYyXbK(hg}+n+61%;>a`o^XH6Hki7<&T8 zGxJwViH+h1pEHQ@bpj}fY6ysui#^g^f5?Y%7#_{Gx7^mCYCEsLWoZf8B*4C;;%>c* z21m2f0jcdfbK_H|>R5I0w$a35n2hu3LKoK|bleqZ${sWSfrT@X`VB_J+tkjZI2QdJ z{pYAe^l8I2KYjjtXaKb1X{i43*_#wQnee8-xM_J8-*A+tL4*xRg+TZdx+vBh^Sjy?VS#ZE)R2<* zW1$OyCT=H5KuWz}5IOa2)2vv_w94($!rh+GQPWLzKP%fXS>#b)-g-T!`U~|hVE3`) z>;(K&&{B&~EZ1+IwO$C?9}YBo2o(cj`y}y&2>i^b9AcEMm&DOb4E-$O3CdmaziGm^ zop2yabXq1<$ih0D6UIPq03whfx;6$KD7WZtBMEoramn(u?Y88KSJwQhR}_!7s@l&@ z3MA_>{}=dIeG7ejD}zCEl5dFkKYXKZvazKyaL$tbc9PHogHx=4wSa9aeSPuTJROQ zezCYpzps@~=4fSV&8(*+PuRT0azD%%f}md_5!w{UMXLvieUk6GjbHAcIm8N+z1D&p zvx^5#=%9;=mdxw+sh@SAj{_pwG!Slj{?QI;oYoseTpk3xi!%#vfG#Oau69 z7NrtsXthZ9s0CyArG8a9G*h9(aSN1`;T|Z{-z(*N0{I;BKs@2qi^l5JL|@ibk*IfG zxqvr{XL`tklH?V(fm-br&>u|K?OQ|Ps7FInBT(oAOOd$_npqYK??=L2E=t(K9RH0e zjt}&{8EP&n>eIa-=nqjz?MQ-7+US$2{^Jo5HX+y<@ofPp(k&H|FIX87p)oGb|5)hw zsr7$Y#hyaEZ5&bVAV2UgLXhO7PHnDLJ+RnEgD}|w8Tb2H@_o&;;lI9hr1x+n>bSgF?pID`O zXY$%7Rw@ape|P$28u&90NnsZ*$a~(w9CVOhiy9ZS`nb}ZGFO;0&17Osod(X}2e(N? zQj+yQ@octdE3&623v>o5WYY6yQ;#@8!QLO>ae-aZVOv!YbwXhh$}WI`D&ZqoR)!%j zi&+pGe9-s z%_*5X#)yz-15K?s_+O-OVRr3n0oENiA8WtNl8#M5V$o($$M3+1YWg6aAL8e)mP-Qm z*RPrP zZwEG^t09d^xo)9iSOvGqD%c!xLp5AGWc@|`4rEv%*@KCejb@p2Ur`b}bd-aXwRN7d z`xm!S#~+Lu9N(;|YOp$*D>C<-<(el7DZ7Vw2lLnpAf#@6);yAJS!gY`U&gTep}~a2 zvIqGI|7J2Y3n7)QrgaJ0r>w?rEk6DE+cve^!fK^$(aG(#UON5mzLnnMvV8XX!EAS_ z*dtw0`Yf;EjJ=+8wBAs%fn{lqMuwO&21MMOm>&f1Zej%&mm381N;l)<^|g=rTO;PEUkGyNYn|B}S=Lk=158scwkg9|mTOz}fa5N|V9b?%2xe4!fBL%Be?tmHJtL%2a5mx~Bw=LR*($`w>VNTt9GZ zlr))vYVX;J=@z&UPgU%dCk=hHYYjyRwL!3v)!2$ z1;&P3y*q$o`c%B9g@!#H(840eY#^e`q;S~${#zJWX3}4gJanUD88}3-{5K~+y1R!XtTbm zjNCkclf$NCuQ8OuNgi+ukRJ}9{cLBPaf3Q=D$_z^noTzMC?*bG=+LMxdLwEOj=~SPVG3 z9A0Oh&2OW$ZXBMDk6P1X*Q~^N9URp69g~VA^3kQ6w9$Rq9rQ7p3D>;C%7^DxL39#R zvPbw1p^+BDQ!>nUGd?M!*gA=C7!J9gu)blRt)_zavSTYPQ5GMKrC-13!LwcM*I^mWdIAhK?SL8lhf@ zaN++!`+*#5Rpe_ZU(K|gahEK(+UffBQ>h6DVCh zC!hJNkj$;Px`cm@kCnZpDBW&CB!co83rnWEzKBT18w5Jf-&-C7{Ph!$-bYbx73`31 zpj7fMcKd7LV{s|sAR?wpnGeOe?E#ZF>i zMg=QU|44^`Rh*&Zz&%ovm1i>SQhsBE35_`$#I@b^5A#=^*`UR#`f)}9uE^$_IdTO> ze?W)=as^$55AZn>yd|t&ZX@ZT1yImc$m(nRj*dk^MtpxZk))wM3e@Uq1U8)`C= zxvd^Q&r=|(t8}>(^Rs6sizW*vyL+5ydCg|}dFgS+Kx&b*3(+`6WT<1eGI998GBg(h zoiE52Bp7_DzR5egamb!z7(f1gC-|^(eO;H&>^xm4XvvbV&Sb`vNk&nffY-q&WHmEY zk^?@{Oe()=u@rXToba(nCULZ77nxR{V?%jxp4!9$tty??0bXr0`c{;cb5np%7h<2% zJiJdK2ELuHkRwNxYW`^Vgu^&7-N3_%3>8JxBz*Tyu5}u`q1u#s{nES5G(S5i(0}Ct z+nQ_gR9KLce}R%}Kb{|K`H7S=JTL#a9*AK|HyexW?a;5y@UL&agtVMB&O>qDv~KYv zB`20L$6E_Q|ARW@CoG3ntc)UC(#n;5B`S>HLL&PCssUcwPsBBx4aZhdj9JSwgX-DO zjT>FWpOlROd1qHaS&^4tQ)sQ*OF76ydogVrP4)Z!RczEkQT0bWgk$1ZQr>0iBS|!A z6>S6%lWHQpYb+7&)1y6@2f`AFfy#99{yXmJUUSQhQKQU(d>E^f>%aM1FYTwiTHhi6 zmNJFfb$`q)i~*$)Fi+`$pTljwXWl}$WQ_pIozGT1Bl4BdF9cl<_AEg%# zpptn9NmipdVOJ)qXYrxD_K$LqgcOzX0vb8xfbKG|rb#vyOz_d7x;foafD*fip3Vo||`c7}YTwm{^=CP@y|&k89x)51H@G%Tc!z>$!F{wa-z=Wi%!x4t+E} zXydw{5iY?z4E77`K>#pB*0#b%f+iI{Ib9T6r>O?!f!CDSoxLWlQk92unh;Gut)|hG z?gh|z^w4++SCQ`VdJU64;@5c%j4O6p!&3iLl1l=7tnKj1urSGoaHH5uJjbuM2Y&cv zK}G;P;oz6IkzAOpLfm|WUT>=!?I|91t1-D&s-#+I05H6doI`(;(NguV)8tq-t4Quc zCw;71?CRLdI4Ab19BziQEO%lFPS-v+OQ`^L>kJG}#T%{?0f)9e-&{#-&MGk(KH}TM zOug4N{hXU=0&le~*t&vUV3nf5BBY^U*^AMruVCe%P!_JxW6`&f=b`yjB?oh$o6e+x z%%l}O!;c7F15s~B`rJWGq3gra)N^^|tUy6w#@Lz?6C7jq3|^ZZ7?EI7097aU5IdUD z8WkA3F~Db%yGr7j`j?X>AM7zuYp{P?j|T@JcV6p*s^!@F_x$A(jcO4q(%rTcFaiwvUDK{{I^ z+}{}X3o0QG?aIY?NKh-8dbAcOiN5*O2v9faSX$S-taf;+x5wNbv~leSIbHG4u)J;y1A2Jo-oOC6+i~ zTwEutPWlnKh%ozkzXRHR=wr4(yVI8zkK4N)3b&jz5uufnoV>3X#Yezeb(4c&0|169 z`QUdUqVJyE0ZfTv5QbSS^4x>`I>|ynxc{+1%_?wqOi5U1oWY)Q{J>h=%$!4^%UD@q ziZLT&c7DHqcjeD+UPLpUIlv1H15!{ZCSr?G4=bKNbz~H5h?KDad{0+eHuYro#HkGB zcCRt8YlS-32TC_qoi7MBf49x4F%rY^W@l;$V<--AQ}bE$GaXVU!j=L=26Qf$rL5i- z4*P2TchE+-n_I+MxR)HdqjfkJNmE#qh$jB+1kyqq06^P)`8zXVYJ>*Het;MXpj>Kf z&N0Ai#=uZyG`*6!y1yF-C-mEb(k{CDL+XEP`{S10R&3fxn641QU&F#Q3<@ zX>Yo+tUcb*;pc4i=BAL&(gtN5WVSqgsVWWu0v$N}TV(CJ{*W0?Gz<}*nL32||K?>P$T7IaY5iHe3EOB2&FRT{V=8Q+7(dOS9Jcx?$vJ&Q6*$Ks$YW4qQ zn8T*}I+w$|i&Z-q*m;`KVR#L@P}#3ud6cJL)A7q;@JP8@Susb@?e1|0YUe&13=hjl zGUG!oyFwurEiwxxD?8IiVDm_c3Qyb*ZXahtbzB$>2Jfl!41LD4^HuK>EXUXVMhiG) z%7eZFbxLqmao`WbUnY$)v9s=lN$ne_gQmiKY?o$($@f8Koa+_PUDvaE$-Un&hw)jo z`3C%!Hu%b4!xAP|YSCeHfJd(U_U%!Dtigk@JfTNA+tlIlb^w3oU?2wCHYE~ew$S&} zcsf6}x{44JJZ)79+Vn!LMotquA^3Ck`lniq3HfcDO1Ru{Qd$(3YyG#Hx3!H1NC6I# z7VM&L01kfs=AZ3P(cdykGH-w-8R>(~&y5_#vKP*!NxH8C_}J674Y2C3DZMtMhFf1Kcd>r>MhexFVu{_p`!o2AG0p2WexyhNLIZ0sbGqZ4vOPI z{|w4(8I#L~$Z)2P=wH%6xv46s<&L{*x?RN`T>H>L12Y{p^y1CqxKrk7?yX84HQ6$Q zxj}k{m*{B?(i)|1`iXQ5X)|T9MzP%Ytna|B>x%UBz>_IeM2Kj4k#&;;$i~uz@CgY7 zKnMs{7g`)v(TL5IZ`TFqViczai>PHO{}^Z=@g?~~UM(1v=N6>*zhh-9Jwwl?sn(aKa+y-KZ=ZZ3M zz^^%e;F{E=feb3|=BOayL0A0@g7D#$bEA3Bve<#)SBr8U=qvqHj!ZX?MRB6Bgl4OV z?hcaq76CK7*_a{G^Nj)ge@P>u&#ll5)Z2DxB}|+DA24vhx{V?kIG8kZMM%E|$z+&R zDpvTFj;0&^e1dRhbC)KK!jF%dPpo^5eA2-Sc(Yt&2n*o**m`$?Es7a-GY1-`D0w!2 zJ`ZBLGdD_)ULtP>LpuRuyIg3EEY}jC9CZ}cd~6s zF~Agcw;R>No4*EnPH{XW`yumOb|DIjJOUheI))_jMaXrwSF=R$9LlJiMf{g6iHu?UVKZf}-9l^VPwQZOvWF06d z(hweiYjOCJGU9hv2*6OU>jFj-us%SXzWliH=3PTeC!P0=DZk^be+bQ#S#g3+gN>tL zF49g*szN=?P z&>*WryUC&YJBx{gf>e-Zw5a*9>_E5iYpgpP`0l(K0Hd`m`Bohy*Q z78|{25}d?;l{;mR#)p#tPGjBIM1l30)T_;gr*4|4&99$mL_|cLdF%+pS)o`kX*fwJJnjXko1N1+<{H!d^(-^^hrucv80%GVg2mzmPooW3kY^!7 z0V-!PT5+QSi>4e81urKCq2AX(S7QqEhP5q1gLrGt-l?FJtTHLci#zuz#?C#4vp@j` zYrcKV1)hgNlV`H;ejRI|L6H=;B;L1a7!Mil@U2C zDc~q_h#6ZM;E<4)l3l#*PgG<-+-dqe?V)N;y$}- zw?3ao6B9?8bg2){B_Xcs`3!aUAYnBYc?%=G05H9n(*BsV@yc1m~H@cP@j ztmw}ykz^jZI#BGu)OrX8fN1{rah&=4H7vU-F_g5T2!xE{;8FnrFyBLQmtB&zF%W)% z8W=s3{mlJ5=h9P(+R_NR00xXe`=JQyTFm>>^C13KXwM={x*7XnGPicBTn<^1{3|wE zpuQgi2=z|D8il{nEo`Qe08OpRHLM~5-|JD^N@k9 zEwgGD)Hqo|;Rdt+UjZhIoPm?4g0IgL4<;^Qe71LS$h#f0yn6pSB3G5sX`YQHo^W*) zSmxn3Q*s%z6NCbqWLegGw*6MWP#)(hOwFc?6bBPvDE%C+(Gbk5_*c}%o6q!1b{%{p zp*Y!;lV`aRsWPjYl`dBaGgX8ya$^oH@nZ@GxIQ;9ikQQ7h8T;&H&BB?jL6Qd(ZN``GPY3gTtbk9@EIa@T*Yf3BAb{%{;T5k2ow0=SH1=6vTMPa*-@6Ya z;Oln1-gZBkrc{adjdYTK@4ZI!X#3b55c+-xOzM$1e0Btr=l5=Y`Zi(bKlU5z6c~2i zV&0Lze$1Mn&Pmz<@}Ef!Dz2#Dq~PeF59FU$mzk@PSh&UuQRKNNLVhw+e|DaNp#_>p zWx>hv+T-%vA6dF?r8vOgfUX0J8G8WGd~0r2EVUIOC7t?1Cu~nXLD>Ggk2&CDWxd)h zt&r9bw!Ky?LI}8ZN2=&q;Qdz;-NW%A@q5=F`JATx1DW^tBQWr#Z;Hh*zZiwcY`}uH zw6FmK(PC{YXF(a!A;oZN!*~B#KjI;|xGbr3oNiYG+=x`{VDG8>O>$93=(@&Ms^AO@ z4!B4j&`FX8RLZONPe~B6Z8xkoCBjMwX`K{~_sgn|6|oi2V%bF$`I{I(XI+oA>bya~ zf0|@Uv5yKO9>MWI`*I06@UZ~-=%e8r&6~fL!7B(W`=PtXk`2P}dPO>;dG7pwizyiu z3F3QDP0UiWOW9h)A=%=J(Ih4FYPxoY5DW#J0Zp)9F1jqT1$F5uyyFaG=Mw70%)q9o`gH#EOyt3-)O{c}51D4CNbbB#Xz zUG=Q8g6b~47vT#`4X8nZDgX1cO8*@@3o-RKc5TpB+^SWdjrbHWeB14=Kv!yT8x*e@ zUywKmn+|xMKdi2|Js>t+1z%MZujgZ~U2Z@NFybGA1hAk(=;L@O8?#E0Z`(aJDiAz{ z+nDRlU1C8ci)UH%3$lV|bc^1mge8eh?i20Y4T*-#lKQbEpbMz!`eUAal?B;A_eGJ$ zaQG`Jo^%k;&=%&)7@V(O>4U_Jogx1IJ5%=<#Yu+!-gz1#;cAgh6LkBo`-w+aPRm^P zTJpgD7;yo_$eI9tivZ2v4z32)jpoTJIS1F#`J0ueZ1Px*T^G>>*CX+LvJ7~_(%#HO zML9Iz$|~ks(kNU2jTn#M=eTj5k3Nfl$Y11!qIom=+_n%k-N5*O{{d-=8kRljgw4sK z5Ny*qZX^`eUkP+lF<){lL(TY8>S`2y&}~c^f{)u){%)yyLb`FmBJ|JY5$qRe+a`8# ztta;FDW$X{l7#i($=YI;GZ_g`&2IJE@fKf{IyYR^AEa5bVkQq zwUHPmff@tdiJ&eKL&2^0d1`|*Kq|9)okc}zNKfYAu?rhMb}StDwt<|DIBYxi_n!H$ z{sC~+(EyH#pO1E%{#rpDhb^V=1zl>os(M}5u=2^s1F{r&SY#lEK6;AV)V6X6hDue{ zv(k@0*ReW4E54B7r;pV2xJ+Q`Pfi{gp#oLfv;sAnQSMgYsk)<4bG znj+1gDdfdTj0-|Hf7LYQ)$U_A*b?yS9%ssmYTN6LhFgmM%Wscm`15u4Rl#3o*lg7B zAQhmmn*Vfm{?fsJ#TMj0AR9r_n^hYlM{J|-Gn+4iRi_#?{dGTJkAYe9m2!fDDh%Us z6y64DCEZQ?%<|XOp=2-G1RELU*K}#hiH59B`$Pj{fHAFT8?unLgIiMwYOnwo{ZSPR zaSPUA?hW@%H8n+z8r$FLiWx_#I2gSeJdP`TZ=mPu_rXy+O$Y2o0^gNPGPV9gks(ol zC=+&-BtYT-+1pS6?*jW1>UkrTN#4oq_8ZTds|KE{2<8_`$Yjm^P?2^UB&+o(VI05A zD-Se(;GQ0=v{)83s=fOotF&kE_!btGzoCdf+cw4i6SSVG|8adsrjs!6%r{FS%K;SX z8CX{+1K(rfr9{`#z%}Tznn%$1Z|yJuCy%OztU(6a5*%w>bn95erlBqW1%!B5oz6RH zK{sj&ERyM^qdgT2eR}Nnz_a4ZgSO_DvXZRSq7|{~loNw?7h5Ou{r!M_l6^{rF(9Xn zfP$#^ESaq->pZftweG1eur(4*^5*09QanUIK^fQh@yy&8;+Ks5Tm*5N9kzdtgcij;PkVTni_z9>~jvxOYSCtXkP}q(}k)9L8voO%F599Gm-)$dU=^B4nHklXGq5!NUpWO9Km%w0vbrXW+qy$%sKajkBrF*WoCVkk0Q^Z4TzGMI zPI89u8BLiVHE~li9%^a&&)Y)0DG@u|LPVNqJ1IERZwgCF6xGqpEn|FkCc=9>Nbp*v zIv37Q zitL-cqQJg@gTIU22mIYt&2;mYetLS_bcEU}tM#%ok#+IppY8o@C-c6*kj}9mgqoX%v*|i$kV+_<~*dfsF{Y*QEB?4yqb@Dl61#U zEM(w$7z<5UUsB!L|9pn8ZRo?K)Mlv!g}z5nXi96TS>q>Ef+_N6CRdpTTscK*j_n{v zkG4yXGa>rgB60 zo)!e>^T#hYhv1$IKT&2{Tpekuy?anL1M%@9KQ{r=b!(f;bZ=<%ZpN-NuYR*F)!oMX zR=Un+RoQlbU%|%nyB51`)CA|4Vj(T@Sj(XpkjB%>5cC({$6zhJoC@0{Rix+Tz$#KH zp(g>+9XNnPAS+8wbiPqqvi2Y?$=*LU4#-(VSm9t8V4|g9KxWs^tu~N=3Z*1bK;M+k zf~J1mViw}@CjQN*&2T2CkEIbwih+?tWPuuL4G(Lm&|{Ep%J7k1#KE>-0!2GteMGuQKa-zRseREu`*U$aVi-u;sPXDKoUE=`KzYJZ~wF-C%>HD{DWkzvLud)mXxpJ zmuyza+z%W?DJhXnaez9fv6lUVd$b&yyPg|4&-2uIouUK-c&}7fIDaA#0w_tc;vOZ` zfc*@0j4BjR(pKmnghoEW7oRJ;cSA3uDbzi;ck3SNx+Mj`%cw|YO~;#=Zjm9JR=F0O zaP96jGiI`ULca!2kEVakNdE4sbw21B?K6!v$5aUx=czK)sv6$L`(K(D2F^ayW+DHf zcuXL;P#M!nb$O52#|}s-qy}D6#tkqbiG`znI3_gfwDSiRS6B9fTdmaPe@7uO2|<3j zo!AdhgrgBs9LY}pej(z0;mv4NF>ga373lG}tQJ!ihbH>jWgk>~Jx7vqc&zdHSIBhh z#HrrK#^)c4Jy=uAi`oU2RC%x8J0o$xR_vV$S*e%?DV@#klA;>D-02hAH@%J^TZ+^yUS?T=CX?|p2Styj`7WO`$PFmtdg)NM5R7}Nk z4n4j;pc?P{dGgY6TmtDdH&!7yAtc1t*$$2@cZ{mU*)Ilcguo3T)v?Pv4i^!4jRI($ zw>J)TvUy7i@Y%d#zAxV*u|$P$aMV75S?|cv{~jk(uT+6Hk@%NWp!}EaW=fLYy(y?W zg~g0wsZ#_U)qW=s^C38+l4?l`(*ofyYb9Y4CK)tq3xp31x!g9qtI=(qO5LNd?x*;t z=Z=>L!uVy6F1Ca8l)vvaOc)gku1F~4n5+2T>uBypkK09b)?_V7Rcvd!Zr^|JwrDzq zw)uB95Ko4~drQ3HPK{Oa^VqseFtP4k zXYlO8yrQ^epesGjIn#k_ZtTolO@W|jS7fu0JyJ~9#DkHI#axcgZ)i0hN=#KB!)gha zhM(0AaR19jzJuPUj`_eN=W(=NdcDO%D9Su;Zi?5E>*VvjmU?Afb}mT5YdFnX+&}-c z-QnQ`%5kAMz)xp2EUJ=#VNs5~p>m%t0PvMHWf7e!vMk3*w*^zUf7!_i(H#tz9`9FZ zS9bz~r+y5tFW3hGPax|W@!fnLKCL@*`Vchltp~;v5A8|knvE-!C!)7FB^7PN6Fk?M z>P9#kwwiM^+MmxUH-daF)={|Nsy_SH5)cXMYNO%Fe-gfy0V&N}A{Fb23%`wZ!U)I5 zzvy)Qym98z<5tDth(~ym~tP$xtiHG|8s>nv>dIz1@FQD~#Ug8(M%J4BW8Ll5 zC8KwN1FjC{x}tbCE4|kJj9mB^Odb}u7I&?On=?#TV^f(K&LNI$Yy562m+-4(3!%Wm zy)_37oW5al@Hs7AY665f?>q(Hs#F@3!56xIeMrDjbk(XD%&TR7S6Q<%KdP!t%_{J} zB95X%0MnRbS8&#`u~wL3V7(+pylW@n7}8~IHr7Mk@&Ht**G|jOw?b;>9^OiAmJGqT zz<@JdB@h9*HI%ewiH9jXv$={Ux;M)z+~ZV~ub zYvR3adv`W(!4C7N3CqfBZY{|b)uN<_Hjd8=dkG{GVJKj0jwkf#fQBFbkp;lGN!pHs z#B<0PJu7=lW=h2{CXh?e!rQx2sLw>ZCkYNF#MzQvL6J*_R6OvpN4i^wn~ATnQ!?ZX z0>l3D1wvAAD(U{v0VSJn7+R`3{_@;-1h7^XO~PzVX@tNy9}C<~KdJX8%KB6bECvK* zayAYyU2z5+^q}YXk_Q6tA|ghVmahzk5?S}3;>}bRjjw13Ilq9dFVfp}SY<&s`k@RB(J}EGA)cw*oGcBn zSoQ77&}(%X23zcO;(q3Ao}XC}7)pWoRDs7)w%+(5YH&!>QK4G0;KygG;g`~CKudAt ztzs4y!7I&$_=m`O&5$u)`zOjEO$oW=U#au)2#DmG!(rk^uO_8jZE>0-igI8HQ+be0>!!yFT&zo0gi z2n_r?I_W&~A`5mMB8i#Z^|jEaNV{ww@L75^1R)Ap%!tuy|v?P5x4Y zqd4`^Whym0{6*GpDAO1e!vhYRFggatYNb1@I1v`Foxn&L`LdQT|2+);E9^cNButa% z(=zbKn@0Ao=>lP64O9Y65PTlTIf7LBFIbG^SF3ivwst*upZH}vbr}b`m{waMGOqgD z+Td(2=eC&h_gMwH(4yja%*deomb;gnjWcfsc4dsCq01Dfd(8OvWr^*aae?pS>o%_1 z6>qooFo*hN?z{<1%+!V<`G|EaGFelFG-B$T#5P9zA;gjGjU$sd>HyCg@ z!L;_;@$zI~I=|)F=^|6`+~TSf@3Znh(+m@JQKz3RP6~j@s$={@zL?c4(#Ccs=jg%0 z$~WWhxJn+kGr0Bldj3Ni#PqB^0xO{9v}9;+6v5mZ;!CdWvg)lEt@N7_Gm4d-$#QzZ zvM5e1=-;5BZnw#6@UPZblABLeGg6vw|Meh%MGaP9yA;H7CE0_%S3DDS#8-99`dM%7 zAbg*-h(69(42@ zqV}fa#+825&(|ty64%1w;cAxc$=z1TN_<}B6Vp&rLo{ls6Q(tW&s20iBje+$t~t@% z%CHQo8cfbLgc}vpSi2cSZ7WXv>7wH6WUJ7MmtbUI{n}c25I)XS(28$<@5X)_xCQVg}1ga|fEZxw|bHEM8AgY_|5?&ob^tW24Q~mfr z$vIS`Bd+vh#LH___is5I4V`Jn`k!Q!oMh@l_YpvE#KYRC~oR-kl1rAKF#(zyPfe|P&TWe8oWf$fesaMqu|GR{Q0t)Uip4W<=T*i@2 zb2O{{t?*;`2_iHInj2#|-9u%*VnJ zV@Q9G#HMlGD{8~O{W6c_{XVIpvTMLUV*^6LRTc|AbIYPwtZND^M5A-Qcya-3z}P;B;` z`;)QJms;q1QD7o9`6A+a`o2gAQew%}5~#CY?4Hf87EW%3Nv@A3ci2KZ-537#kw!*) z#A%Aeng%h>wv#p?-_0<@3`@;(?r+ZaK%VgqID(2*r*XNd1 zuu5eQx^^&bN=+yfctk@ul)rVIHPCShaRB*b$D!+7WD1|y=ea6y+U$Sh>9|=hXcLx2kKZ8D^&6UhnettkpB6 z_=83^EWLyX5!z^#{KfL zy*(-PYFN^fL)J~bYwDAE|KOkCS@IRBPTm2_#abwN)8Qm7VVv}L0N7JhQt5x&);X`y zoeRaXY*gLN$2TvIQyLe&+{7N&5S-0pX)5@=!}pyUBk5X^*o^uKd_qk8C&C2m&G8Ct zqV9Pdm5}$h-O;;m#;xu3viFD=u`sSPb;m+h>_=~-cN{*rDIhBcQqF3_|XkaG{0gP4CDw550{yuw%{cRbN4ZMOQDrD#)!RUq zR=op9^*aJsrh;PxWuv(U6GUzGqe_c8>e!;1M789r5zyq zkEO11w_WPEui!~XOKP7jf7?Odj-9{oBiHu232r_E4Q(>E$o^(kBekfYRy$gl#rjhG zO{09Nx})j(gqAkORr~m_3Fzy5K8;`D;vo!#BqqHSB#Gzz%^`%C-k+Y;q^}@1TEC?! zui5pJ15VT_zp#>0zvZ@GRzd%S&gH1sstg39+2pJ?%M7$n4eI@dq2 z>ab6%hcVK>Zq!&t!hu-}xC~+>CsZE)mQ?Hp9~kIB$%#=R^;uz&za<>NZ~9GdDuiR$ z>zQV9a`U3qdV0@9W?>ug(&}Ke*`N(FlJ_!=177@d&hi2cMK$?Ly89PE4?kN6A4{zAc%+d5`1J1z;qBK^&3PiW(e+P6MQ<& z;ea%m^kyH}ns#Thgwelfvnkh36X0j1s~@;FwqNXDPmoe~l&pFggZ;8+2&uS0OACjP z({-F$YGtaD8Rrv=^to=h!TT;cynVqDKL3KE6f@}I{8L3!b0acuBZD+hKB;uT4k3vs zHiZp(3DaNjCeo-~=xQy^%BvlB`;Rt@H0m`VTb#Q&ul)*w?%iB*^{C}ds(PmDhMyeW zO&2``-8g1Jej9dR(B7BiO?+BA!Z=o&<0?r5fq)Z?p#05Z+7=T`zd-s+qdF zE77*U)TU(t!5GG#25P+N^(WZ7Q+SAI&igL<5#?@PT9CFM<4xB(6wWwfADj?aI>8>X z4tdn{Ab9FnBPmdY9ZYHlLD9l)6iTF(^`iR5);T6B@rVHJsmkyXLNUGWH_NxrG8B8<^AA29QpF#lKtJ zeY)F3AMZBLI97OA=LLTufIRA(j1P+E)+UlK-yynGs@cN<@6qFd1-VfhB(z$ zu7A5|tmblKw6#h+1uH@srdE@q>xBrn^51VPe*E zCN{*@Nh&5-^af&QFzW$$JbGmDMa1A#oNJj?n7Hds7pdQjM>6}#a3!(^VOg?L6if@w z5ErUV5sTxhCMiygIe>kD0A(W-mC2plkD&75SBMIemd3*n&Q(^~C$OjBs;&5UT9mR( z#=cl)Vj_AdO8V(GDLU55MH9hg^&LP&cc%QwMs>?{=rjtS8(v9R`PmVbFfd+9+S-XM zyT)n-Zl8xtp4k`@^#JzAPGofFd+8C~JK^d$7{B&SOP{)wYF}x?c9oYhoVw(kxqbU& zih3%7U@-^WDBR4VSoP99#=EH3kog z(6rd>JYvw{oVF)Nq^C5p50ea#*#WBJ&q^gm3`}w4s(hJ(9Tw2}Zr@RMGq#x1tm??r zUH9y6=LpfelmneOeGq(Z214kF2TC3GVN+;#Nzt)W&mLz($AB|B=zi)s@56!3OC zeeAI>w5&|ZeEGvv(4a@fG;sr}jS$L622OSd_^V|H$4ppl-b9B7HUsPjv9XtA1To-FfeR6C(=#Detm_V+fV9y@K46@}{LH)hO`Y#2LEV1~_T$E@DB z^z8kfM{>{`R3g`EQu*E89&%mASz{Vxto5^0@N#@G*rgXf4oA`oFKCEBxqtCsMV~fz z>|t6FAD;p3eVmnlm0n^+{=}PVs~5O!lw%X+ihl|T0P-diDBnU}MXlpz0m2FxEOgG- zr`c%RTMLY=HnGfQoq7dBp&QaSamMpGtDpB;WKBE<5}9=gsY%CsFnSZylzDJ)+&&Rn zz9I1NKo<~d z{pTTX5I*-RqMI`$O5H~fz;FbJc;k9|JL|`s(5_i)c4RK&*?OUA#9rGO10Dl7cnyL# zsjFTMVbmYU)($BKDw8JUen1nC@Q0v!CH#`#baaUm_ZZ&}VEry;)rJyF7%3f8ACtRC z!sz^Hk));Dgka#rK`j#fRV*tn!bLiYiBypTuUe?ZeCK6lx02R-iq1n2l97R#yMC!! z+TBU(%BPM)a7+!g62BhaQn;XBE9WC`I-by^MJP61K-X-9^u65qvu}c7H5*@)v6yFt zUVkA0ldAg#rUwXYQ@YV;M(K5`jq8jC=}-28Sm&e)r84_x;5;*y#RcxJ{reA2I|`iJ z-^_7mPrcHwjf?j`Hu86gyJ>d_ROqs^?CUuFM%>EwTcx{?SDa~@K9AXREQxWA{|9Ls z0&*lX4XVZjf>jkf#NY{EBO&Lk{E1zSJ!hLeoz%T?K(Ns7zsSY}J#{A3ssp!9j>U}* zfLuD@Ai?xhlE^9$ZvSzqnf^IXK_IX5ertMf8w}6O;F_Sk<@xRh{C#JttYLV+^h@>6sUpYfRWS|K|2zHqv z7yw>Gc|%*$ZwwVqu9NKY&B?$~6?E^OlHXtuYKZ5{*S6%nxKfThx>Uxp^Y)2WO9mZO zbQj8R08^gCnvk)~g>a9T+vg@obruz7HORjAu;G5YNv|C|K&O{2Q0Q9o0wI90k3X+6 z4{;YAb6M;*aqC?}*U7?aAy&^rM!R4T@NnABL4~Fo=lSL;*@zMcirp z<0|C3=tgfQ)gA&DJ(dQyc^Yr{>5=G09H#*2!yq}S&c}BZb>@DUO-X~44aRR^9S)j! zHwf5m@qF05U&qv9GL%MZovci#NYj=7YHbVsd4NxCs@b)65wVyXGx3J!P&)Oc)Ht?k zpE>liB8C9ri9*Qp;W^ci(gEwbL{~7oerHyo=cV$&-@uylozUwrkhiW6t-27ZXJ3wI z^28PH5dXJ1+XE-%Y@F@r8j9}RIBW@|@XebEC%zfM2rJeIl&tnURSIDE=WFoDGOMsnqtn(M;9nG>EBTjGpL6a|$_Q8lqI4o|hPVZn- z%SxiQT`H__f`1+n=Y-{|7rlN$oSv30jWM6MZ&r$DN5A6L3N?oVldBhjHB~)$u;0KW zO_Rk^SThQF@a3~M`Hr@_>Z9YI@3HTwgE2dYFDQ2LqL2YCKJmJ%7-s$;QyHn<-O$(P z_mQ{{;~d+s&f{;Zu1qOV3CsP`?5W9cZ`mTvhDB=)O*@Ok=#ai^I=?ddaNd;U+d~HB zoq#>y`5lqP<(p!%F!9e;zv29ehs}x29ftRF)g5ms?lc%6N5N(XNGGTIdeke9ai1KK zJcx+{G~8*-#(w_C7vCmfI;04c@Xv=ew!-b#-T2Ar*e^a@ezPjAoqoJ5wY|f_E~A+P zL#58{Q0Y}t^p+HU3U48rHWO!9HPVL5MPl}aSITaMSRPPB#(05XU{0UlTjB_JH@$%$Yp#Eaqv3g%=DrJX!Lz5A!1d}UxaycZAC)Tj- z-oah!zsDTVn14?q6&*>3Zt)GwC~ z+U=Q{xCsuMc^s>&96ow(Le%3h7XUwb%eo->N|WmQg}an8j?7#>8G;(HHfs-d~L{I*+S6E6UEEnR8!hGV^6;UyP?FTy<9|sRVzb zdF)=`4<+-K{Gr^3%B zzFi3mFhijB>kW~HXpbt6P4OrHu}tmADe>Trkoi%ZGjKl(*yt(%RDu_S-G0u3p}^Z^ zu`$to^=f56I;TH&ZLls7XgiuywOB+ypxJx6+i~NA=tZXb2I+I(CY^h$(v|hs_E7E->)N)G*g?cv z>;)91ys>e)+l9HS-8u2~c9kza-`}|zq{eYuT%e&Dp2ewcyTMRUOu+a-ZD;4b@JlI~ zrwlZd#=A4&KNM)Weo}4h#YtpbGJWpo0qQXjkd7hY*4>)63KK!V9_>0ZlLD|^AozP_ z=dM+8nJS&b@pZomKTxv-y|03IKm_rpl3`? z{d(v=QmI0;b#Qaap+c&CNS69Zy_YxuPREXQF0!|Sa-S^qn=1X%a<4wJh?4vNk6Hx zW;%`@f?TvOpEc#wlP#j+&`X$f@yrp1IFddAM9(9ESBf>U#A?0lQ zH>!V6NWb0#`J{-4tJ`5M% zw=pr$ zs-k7+Fd3Zkm4Hbg8ZLv)?vZjDt})#gw1;(8@Qwh72?kKCW7)M4QUj!>?3F@;((QC6 zasAzhe7S?1ti;eKnYV!?ClwPo`M-OlZ|WTj5STu&0nja~2uZY(Ru)oSG&5$$Eb*N5 zeYK#H&@)N$EcuH8k(ucc#d7#=JS|_PSQV!8h`Xn6h|B)wDo8f{rc+dlW(!YEc8*=o zutbUXAfCn4KsUHiU77c99Oc-X(&8N_djp;&*Bc%SS850{{zOo+K#fcKY(A$$b^y7) zU}BVr=p|V$)pgm@k8d5aA)H8JDyk*on1isD_c;V%UMZ1v1yF)|Q9 zSsOOu*E72*f$C|S2Ire&^bIieO%}MgB1SBDS5DpnK#h1$%Ipy)Mk=Lv$*dB)H_Y{P ztKXARL1ESl*ltpU#?D+c33-{U>ixcfY^uN3v1vM07@`!8_)0Q-tj1Z$+;~rA+@q1t zi@!wxEymCKtDh27$X@1qD%N~1$^Xd*7ty!dh`wpKbV zerASVbwx=B@BkM~se!=N&!I?v<>i_rNreVM^oO30l-%6BvhJHGPFD|C6I)u#3Kc%L ziSK%_#*m=J7G9jCg&cVu$Z?-c)u`MTGfzAmiZ!{5zqSGxDUCuQt#8E@^>p~| zYf_1-UCK5t#X2)@F|-?yjMO7`OSLIqOptoxWt`e3bOJaMp^o0lUF}wc4T2=_V@D(J zI_W1oToLu54)Wpz$(}f9YB7RVJ}}1O@q9;6(WlJh84Z6KshRcU6|hh6*Q^0SQe5~~ z-6+4h+B|oQ@?(hIt|x1A9Rl<`juKOB^wm$FL23(C&X~(?U1}w%HPOq863!{wM`fN7 zPf1yQo*k8{@uo1&$T+~9ek{L2 z@o?M_ao{v%_-xW7|1vtdoJ7Tb^rnS~@C111%MX9w&!LJ;)-SYML~)Q)b@}l-A6_LM z@o)X{NzuXuL2)p0_)`;Du1j^P>=ib7+U(VMnL+n6L#L${i-Dm=@N$HUZQ7uN(=}d(Ro2;YfM&|?dv9NFSR2>@g{PSuh-Xjz;8Ko+L2*oKTavJ!P z?3k9W3?3@u2Y{SlaifYpO*u{so&sx<%pZ5FNuuLOD)W7|&ISqRHZ%>AjMLLBaiRE( z{)`TKM&a?%q?cgo0xlM$55~WMWGru!8dK!eJPvo?@9ek{@!j@c#H9_d`S}0(BW3Th z>V+X^+0I8XHc!^)(4Lj5gSm#dfpwE3G4Gu<_wK2>!OPAIPXdy+N{qD5 zbR!d59sON>{Q+ey?f- z^9dvpCh|735q!i;TSc+7XAg*hbk%9_t3?fMlaM|KhvQ4GoJiO{WW}_U=qd^ex&x0aaOfnS5F{HFPN)N-G0F5$B8sakzKg*~KOigNXO>$hRJIV}u~W?X3ED zDQv`?E612#P9ACP-Upex)DL$(P0J{=len`#_gj1_Jp~U6r3N-Jq5p~LjX)>rI0{_L zA3su$WO&6t9(;ZDYk$&WX8W)#+c3R7*&lMX^8?{V@LdHN{uI@UhViLL$2-^>n-SaE zH4l$Q$3Qzt_^t1&v+!`wJJ<;=3}dT255wYSX+h=@9Zn+I7j&Z@R5br;WT;$A23}Q8 zbCb={0_1};O*+>r>T7J+oqNBP`cb^Eysca9^~cuTq1wWS_AJC4722k?H{#&FpJ(6y zgfz<)B3S%m&I`x8@^jCX)_5H))}i|~a-~lnd^YSC%HGlb%$lChifGlwi z>uP&rUtC2-O&ec1?`B*DKlpF%;SRYX@K^|Ey{pk7Jo{ObVzf(z`NbR;cB{BD%h%`H z>S>IeQfMqmg*2WXMVS>U3*~PI1HmtVDI04a3r6!h(&RI12B5vhsN*Ap3fgg+O{rUx zGWh%%Z5lWI=SQ^^@0IETi}5;>bcHH79Y-bfRXgcer-y60neTpQxIMe0(-RNE`fzu< z;#o5Qv?fknW(#qoL6N=>R=lTvXhs$&ZcoiB*GnjNV52Q57<~UTA_oC7zetyE*;*=K zXj}-hgrQ%tAnDn^>Xs3NS+9{F#D1J9IZe6?&->J|>MLDGG|j?c7gD>d^w8^|+e6$> zAIBX2@h(h{+p!{Q?$B7oJ#0s-N6T(_5aLUk%?nB%AiNmy_op{vVK0=~1 zJjhWFG)f#Vs`JpHk^2+SkL4z%(}aQH)pOywf?x@l#2{~*)+7qdzS70;C^W$dLQj=uU2ZKQ)4=6S zG=rY&>5!mhjFrl6N4RnKdipM`FqG--lp&nloFW~QS~AtIqwpu`2;ll;f!N#xv(F98 zz-WS51?E-L5UD-#G7`AZrU|$|7_x&tl;~T=P%RBc^29*u-|wUqJ$A6|lUv|iH4iTP zdQV#s=+pw9<;(T8Bo$Pet4~ezr}@C&2CvVt@s@k6H-sd-7*W& zS>4FAiL3ig59X<3$?Yj^Qx=1QL?yU*4H$Ss01meCd!%m)ApXQBLF<7*G5;d$V=-Ko zRp_rXkN4|H!V_@p`D&V3cC`6|4^qj89lOcY^-Gm*lhmjhc}5#Y5pvVeiwvY$kwV#6 zP=qq)Z?#Q_Lyy~T{8kv^jTo>Ixdnvm{>174)dc|pur?x~J!e~o&9LjdyWzUb67|QE zR|S?i*cdjp;#}mvT}975oljdmObFgmCHNENds#4+QpaceF>-Lu_zwnm80}7~6$r{8 zCj)b{Jid!*wy?6h?L^wN$TnUPl2k_xE=Buhk5m3FaI9dD{`OMH4RFLhkSP@9)XNVQ z9rAt!`Z%!D>7I7?q?#5U^`1InlBNgw3Tr*L>=KJIErAIp{_(gvKGg8@Y-lmRw+3Cq^{N5jYaZ{JkFp~ty#QZ6j}P+V zu1x3qgx)G4n!tv1hu!0<|g~+YQRV7c1dRB_xc~Iw(JFBpEuTr9`M(anTra zHF03?@AUgq`oUMeZ;qKEm7(Xe$A_?B7*>l9rV3_m1Wd# zTD>mEE1{x1G1QlDR#$fAribkIS9Gk3rqV$T?{C%K=Q@?Ipb|o@$H&ZU1_`>$%1@ou zqzuG50AdlsZwhTS-Xn;@84@)&{eqzIv#7EfR$i{8z+L6>heYNu_SB719W0p}%5gb@ z!x%w9&5&*u73D65c#q?^g%qahcuXri*JSo+-QfDlj4nqv3-rPt!SfZIK?6F4W|pL6 zz_^oGgbn7{X6><7*67ixCf>UvcQOy)cA<^?nbXSY2YV>*@>FmLiK6VuwXCuk;CmT# zN2|i5S@Y}75*Un6?F*~yKLf?>o96kNbxUsc*K}nDa}x$u%IU}We^1XLodw6j`5`P;sw{7tjmAb~giT|1n<(ZS(Qq3MU&gwsWa@9~2N7NH zaS&0;Jl7qiuH5`J0~vjoWy$;9nGrFkJC~aqWEE1pBV=rFvfyEPLZ&wXC0?P@ArW{<`hRY@7w@$r~+bU4MO>a#Z$1rMd63-vHH)~Vip zEzDhtqV)hdJ3H1oY9rLK4wS)1Yfy;_&}seITv6$cf+1vVYZ7x@5nH1rQ^u)gXx&^d#-K zjFEz=wvL~RwCkwnnuTvKk9PEtOA&F6FpA(X>gg(FrDg^fNR6qHI&sm9Xn|r{8B=Pb zTB}YoIYUV><{x(gcnt%nRSAy(9`oiaXbB$|ua|vsL1jd8JNaHlIGFKET`~GQ0=Bqs zAF~8?>uu11QDcd?*wA`%sp~05H*5VM+8zjcM6h#4RyZLtzyzwJr0)qRBW{@^*AsGC z)*pricBBQqXk`m973t}^HABPm!1g3xp(p_;6caqi3k+pNTkN-~P9`(Z)Kqy~PYJ!} zvV4W=J~TbPN?0a1))EAakPq)k7x;htNzjnKfu6DjIv9tMJtc7n$3WDCj8-S|*S1F}Gn2m5^O$_K4VmtxwA;eujAFt(gD${89A0pdCY zraU=24s|2^+NHrf#s}l%`z=A=d>(`m1s;&Bo8=bX@W`Ut3ciasB_%Rdtss_SZ!5DZ zLH{afKTuy@z3|I@8mJLiH{hND4A;*BD%N>5z!GPl93c}*lvn}o$_JBy7k;V6gAj|b6H}XVVz`>DWE*w87v)Z48u~AuZ=A0xtTm@!)}qRp;X=j}k+qlw z&OiUcC3>ao`-(eb_GJ_e9FxiAZXH0hSTZ0AU5wA3fQERBY#JKo7V@Nwm&AxeE&x_t z?!5HEkRY$KN2YQ?0!l`c(tj`_N;USo$+Jj?!5`=}Xbo@8t(xPE&^Z=@hWv-yT%*9v z2x0J*{=U-9Vpg^+iQ|F%9O+eB-zPFoOfsqk#dNnqs^TG~TrhT68^(dMzmgwQjblR6 za|HreW;eJ>e)2w)7%cguMxZhV1;kIm1i3;}(Czj^>}#uE%#;*#e;E*#X?o7s`d6 zQTv+5v5Vo&$%zr;+ruXG_N|g;6aplkh#WL-bxgog z6wV2I6id$20=9r55l4K^jga;sB!!+q`zebkgC}(%O69(npu?!Ju0Zu5qW(*70yOM^ zht#0KC{A>yXvNS4Xd#OE?&nIGfzIDObZy@wkA=bIDhg6@ib zX!hTCrCb#;saBK_Kjl~K!%WVq-dUFJ!4Q|8Dd5mx{Np8Z!}!B^1rtTQ2dY31;gRnVY#=sgXfm0W`1<2fog zB+?Wz=)$3|V{aKaI{o1+&o2viH+sFr#!aSty7Y5=jVFlcZ%ToQwSl3)e8XGfvl5Ke zxhDd&u@i#Z*n#7R^P;YbB0oI~2_aEHJqgJ(uJW0_*O=zzbhzo45sT|9Tps7N=y48$ z*4X&Y!`gXrDk^_@lp3PG%V<7MLr?oWPsxvkJKN&DJLNx#Z_^2Eakly1vN(Kg%IjBu z@prTdc7##onS!% znJQ~0qrm^U9qBoY`?L9gd832$8Gpgxw>@RQ%3etXK$t|q06FjlVmcyV@e)_py5ou_ zP4`hXadE+t_~#&10y=1|#A1O-p6?b|3-?NKAKc`%R+s^4DhPZ0Be4hv8WbNp02r8e zjgyQtO8x1ZbKQtRcH>N?tLIF?A1AZpq zRmU4-8A`{T45{oLCn=7Adb+ciL&axD_|HQhL8DAsXrz%t*Gsc zGLMFZ@YfQM(dT}cy~=zp;&ylm_7 z_0boIH`vZ*_<{Kym^=p%sv`z^#={sBS&lQYEbOltiqHE)`^*3#BO#42(bdWqpW5zA zt!~TPGZxgfX^_tID*p_3seI*^PBfAu2h5-IU{o^vp`~4I!lUw+@IP~b1PWw6#OnCP zonPM`J|OZEBW>S{o`tpw+A@sZ8xLI$RVKteDfG8vOce)~|KAwc6!f`?SITT8oGY1T zF_h|r7i+{eQ+CeeO$A>|FHf9sBY43RSAkn#ApfDHC7)kFzjnX5t69aUqgPo({BH?m zg&wA4@it<$t|>?P)>xW&WxiElTe|XMMt#+p2FYLX`*VsC!Y>LM@ZpN;-_aqA>crT< z&$4j@mPI0gm-^>G17dWBN?8gszcrj|@~@+3>`QFkZ5_>>3&oDDfz_l@#Nc_M&umrM zTs{0%&ixGo(!KBMpUnVXNi6}k-WOdohRRjE^81Vg9S0IjA1B$rg8C|=nH6mCj4dQJ zND{Q>8iqpY5`K$$kz}=FkV8F5ID{H228$rs-~5mHdK3GOg*=IV z3#CEiUw8ak*kHTajAGiMB*G(vgJsvw`I9aZZ(rZdJJ`_Hi1M3r{;XB!9acJdFjrb z+z{<^H@=TsQOy09T?6VAR+@b2K*`@Mb~rFi$?!Gj-<$$~66O|E#Dle?uC)gT{Sz$HQ!6Oz(8j?Ocx9GwoXY)prahSBAfScOe4T~hU!$j?79hia zATC4Alm?PzJhf(03d^-A+)IhO2OS8_uUn#LO2$u3(o3w4vc9fVMchW*2V{6ZVDIf+ zV-L;>W2bo^n(h?{Hy@we6)D!8xGXL&6LE47{!t=%zJjt`A2N>Rw^n*f(Ks6T$bDwR)fx^Nn`j-|yAe%qnMX`s}$IIO`d}03@i*B^Ir8EOLImC7A%` zhv!UiTNZHLZtfQb@_Y}360fCrt7@SJQ<9LD=FMJr)`44Gh zgbij(2aUGASO z)juS7Ug{D#TDh}dPC#p5DD>ZL3;(`Klo+%yEwX>U6ml9n0}?1?_2sk1|HmXfpHwFa z(op*Q3@YI4SQz{n_WYk85QKPx^c(L#uKb5!emWGDan6uNI2#MpptC>e%m4h?7eQc> zP#>!=0f?Xc9~%DqgdbEU3P^qs)W5vXf87CqoC0~F{zKS*pWFkPfz===xPSlJKj#WK zt8f4HGyl9#x(dL=Kmvr|;d%dk_*O$?{;&5TAO(Ot%Jr{~f7#Z5ZBrb~HS)jy{6E&e z1~=EffcxL;2N16B|KD_e-kpv}Aa3;G{3k>Ee~Sm-ktrlU)PKJ7m$CH$G7Z1G{Gk5F zeL(=}*8jPIkZ8XtfI=e)D&hZJzo4e?*nf#J_>;ci`GWU$tN(NT;4Opw_fGwD{{L^; zfAZx2zh(a^ga2PrCg@{?rt#myi(paNB)=X$a1r4(%BZMFy9&6GSeItzwtakJ!2Y-w z=Je51(MVL}>;IG(Lxfk|)vNylj=_pe+WTueGnXQ9`26=*eo+pUhzWWD50R;4=dtLK zUDTp4BL7Ru{7lf{b&evt8s+c_srMSKIIMMaMfcO4or=apYHl;$d+wwg<`57s%I*(s z7oAMLP+X_XF;b~WZAf1grSxSK+@*(E2ZSqAJ_W-C3}Lx@UH+%k#qO$vXP3R zh5TN{QLGZ3ZCTi??fU=xvMaB?!x!To$3E>dmvx2u1-Y@M7Gt);ido`8-v49O60Cjz zOrn+CssMUgwzRm*J9!i}EeolJz@i+VQGTYlc?DI1Cw^;OzWKKO= zXOY}b>v?PszbaqWU?LcKcsfk?R8UdR==-L;EJ9%m10x0vW09!GgG3o28IJb_pQKY1 zsa~Q-rEI{4YGC-_NM$RsIOXGIrpkzl>7ctwtjr)M_orM+*i7_wB)0Z}?{lc(QCY!K zf@C1!!qmfzgX5nCyW3(aADT}*m7+aI`L)b(cxow~*-f9GjZ=Hrt z7$2-h=G-?z5$nn;@XHbhg17=W1pYp*57 z^!*d{2jLb*4Xtz|BkkiL3ud19@$kqXF-ZUGXk-Ra95V=lLPJkiiv)ZdX1_&7qOu!H zJ+d&Ql!>=&5{_yth>c^FF{PjYRdpXq_xfoUa@Soc=VX7D=}yB2P{^4=8gArJ^{ecG z_paS{&2l*8|8)`IvM;Ipj=ks&liZKFQb){!_!l4!>rhe8FE1##+0BA^uXGrZL?Hka zWGDoMd$EM4dnf#v$O2s}is)THgZF)1ZrgoGfO^^`0<$M_K8iIPR{C!qanHa^*>}24)$ z5(NGC0R_!jay>V zeevsp-r~ZP68b9pJ42Q9x~%0W#K{fl7qvD>J~d6SPrUD`a(Ig!raqkt#Qk!JLFh&3 zzls_9)f1*m!3 zqoNb5o2~}?o-HSlt@9FRY&gw(bSFoTYFPKuYCrU;cdaWrd%qogFQgU_UuT3m4KKE1 z&FpUZ$Q3%q__1r(66oq};P!gFvKda{8#&g4Sip}k?#iUM7d^@@^?vhuM8dC6ySQB4 zL{m(9@y<$Hm#w8L8$ZcP<^6RJo-MBU=mn=P=j3UtNZ7i+VvbNgsWUD;3y!}A(=(Nf@`gp8we|fKea&fIQ zoQ8JIWcStKfmyKBy6F^VZ&N?xLCj$Z>TkOYV^%HOY3U^m@j=-tE=j$g4|b=nMYkb1oGKq#6@n)8P?{tm$|dV8+j(P zF^g(vZ(OEb3~a{nvBvCo?K^R3z73rsxafWm?+jIS>*PhPgQL&du;OkdQk9isV9*Nb zn>?h8HO#Jf%3ejedb)%$1`g4T6v+9y&j2Ui+A(v zl)8lUnM6IGuLkYsZBSZW7IA@s`P&0NwE?_WoLZF@+gx)6{2>$AH#?TxfwFxzf&%@N4%YQnebIFa8vnCvCf#p1{zXZSubKid;LWC zgl?Epb+xG}JrO50$ONyC#5lvu0roj7vlL>no(cPf z?P`noHjC$&eLFkoSv3adb3wWzhq4@gwSgLjDWCf?v_f>oE8F@aKBqAIg~;dR8F0%4 zj^!%w|D4sX7|`V5zVjVEc*W%+b#?{agI-xG8IqraKK+5suEv8=Xja5- zf2&i$`Ldo6*2sy!_-FJ_Dx~H&2>WWu(bj1}G4m%oWwyQ)_S_jX3>{0Lw*{OwS=xz# zv$MmeOeMsNHgyo68RB@^Fq64%Bob9pb(@SoPtXg>bW<%kE;?LxeRdm8D*E(uR-+`= zdZ#{r8I~}@$DkDfMst_OP3s87DMxpq(zbG~cUJJIX2@Vrq7S58l6K@;cT9GE8jr47 z^}BM+ne`{^uo&Xiu7+Z>#%-5{TLJmb4x8k{4>g39mzC2-ke4FxUJG@*QE~@6ls|){0G7 z_Ja?CKK2M{JVT~M{G#wti*TvoWISUktB0`_4ps9=zt__Q@p@JF4dOPCGLL%6lB@Sc zVI7Q3&HVE4kZqWE-DpJ8ne06zO6~>T36~+*wy_i4ns0iP74c3meCoLGWWF6gO5l5= zO5A+Ga@OVR|9(@50X5@vCH{ufrGgkUlHY87`u)*rZdpoj@^&_$#=M9n^$AooUqpteq^*Q@iJ;E1jJN+(BXI>&>JyNZR#j8X|Hz6M1I+|1`l6Jy`lCjYZAtmnq$wX zcKMd$xS-Y5a^fG+OlW}M zYe#nCl`MjDW=3o~GhVS5`^`@Ud~15OjOor})i3NnmT5B)(pYNjkV>ZTdkrObwT9CV znpXI*Jtug*(?09SlU+#ESP#sL|7sh1AucM;=PD$s4;xQ{4)TCnJ9nA#(()U;Ws*s* z!H~{BD^dF)6~AhS zNfkmDKP#xqMTVde5l}#d0waD71Dx(-6>foN{ai}#)p5lsb{W_$zLShHrTnxjd|X;X zQ|g>`U3eCKxygSK&T`!_pa)IBZ~b`*!;a=H2r*7MBj$+06>h@m9{Lvf9hPYmAF_l2w~7vju0PzC$!nB1_PcyLy)RD9Usj15 z8fJ1bB*lhT$6{Zb%3`He2Ml4~<{vL{Bi{v|DIiuJpS2^Av>p>2wA|mysWTpB{Fvn7 zj`!ASy&dg5jZ|2{9eh7Yx{`INShjIDOmI{qNvw}p%OZoF9aK=5z>bs35y)`p-OBdPs^d)JRdrCKcy;G=p#Ay^X98TK%k#W ziVCUBIB~|wkITVLVCHncN0T`cSdEf|c$<;DeH)YfMm#}eDo1_qobibfc+|<~ayv>K zY^)BNr^2|lNResABr??<=OgXKl8U;BnU-}Z?k|1<)(PI%JN<00wi!FL$5M3kukX^< zm=iWhjwG>IphQFVTUV0XD%y@r%JCOZTCU=g4K1%PNa#*-r@8vs=mgLiL|>6 z4kL5)=$V>@$i%t^qNWuci|={WNW)1^_2u2heWN8XQCk?UM(zw}%{{PxEu#sT%LEW%k@f#r6w~ ztptX?rNXVDr<&h=7n_{RKR$hky}~_uV?unxg4x_IKg92$>2krVH)7ejS?`l4KW%ti zPnRbbx2e%pJw{aLU42Z@$ShuTto&XMre%7%Q_di>$ap14fj^b?5OdfphJES1@|DS^ z-%E3Slq&Qz!ol`2zV)~86{$F_HJexm>t8gkddc%jKgYZF7+!cA-^_0e(P=F1P49$x zNqh7dVH&Wsth5owqo5A*4Cqf8EHQ=R2ptu%ywR}vq}bUnBFA$xz^}l$qG$tRPpArofW3x zN7IFm$()D{Gqhy2&;Qfbd4)B#EpeQdAialNbexT0AB7ndhf$wf9r97YtNc**35r?^RV4l-@fL# z{w>aDmh6V3Q9ENk3o>gpQaHpOr()M6n(hb)%H}2<8jdwRLlgjC3X~4$SAf)2$Q4RK z%b36;OpnnQbNS$kItj%ko{99(8}#GA&RpBfdp1~9n}K}_?i$D%H2C2*knPK~dnviZ z-%T;)**b2u%J;2h?e+pwr#a_dBko)V${L3?Fwj`U?^c`ePZgh;r?2y3q_r|HrNSMM z5MDqnfvwuAn4o|nbPz-zgq2)1NB(tCd=&cd(PdAopv!bexHJV5FSx7R{zriNu~#vn zWwxR0`V2UM1gD5EXB0n>Tsk#Mm+tJTE_o^)I>WZU5U!H4%EqxB$`NXSRv#wyeua~8 zByXxXDX6{v*FaQrm*rBZ$L$k>Fkb9BAAAM~pRD6~NvCS5{-V!Umg?%T2c44ZO`*L} z1~H-WgAja=9KAQ9C7f}ek=e4RVjXi-sWU`l)Ea#?-)>p%cy$6=)7>LXtX*+Ut>tMz zcO%|j$}$B$wbJKUOxPh(;20+d%UDU;^gVrkBS6da?3Q^Y8>{5yPERbhG!|Zi3p_0` z?!cwbg{S@648|%@4u3BT*K(-XNCsJGG@G?DH!Aje;U?#fp<7kfv-f7K6Gls&7rtdh zJ34helwpYKif5$>+dXRSS?OTLTWFcR!`zT$aaI~YzP^7aSNa#D|HjOV`Xik%40D>W zpA1tls`q=8Sg{tZx!p>MZmpk+hxaebn@{f5WX;u=li+woDrjOb z<%?Q=xE|CPQa@H;`Ic&IV`kSR5ykbCm$#)hUg4D97e)!r?^rF?XAkBh0&tpgw5$ZS zqz{iI*M2lk3Mbhe#GU`lOk@nR2;_VietytREHut5oMcU~NA+C~UK+9E942lP?7q zyo|!+PK-5Am3kE}1x{6&D13KUEg<3NCv`Oo4CV61a5!CF{<`(ctlAy#43XQQc(!as z@eswwLQO?Niw|~BZF8QQ-3q#vpPp`=(dP#3b8E2jquo`$3;CAyyk#!^^6Sz|Qi8O* zeMj-mwqwKzZ=Y1Dzcu4o^P1mnMO|5Pr{&H&ed%Y{-uVDrF1eFqRy+!H8*)mc5!`YD z@*x!_#95A%4r535a%&SyH*ZuoZehX+~D zzPD5J&1n(3|OE;F4|2K{a+guq}eWHV>#8(qkqnf&R0SYMo>wBGEuHDl@E_5Spx%3 z34w%W?E^Se(QKbD6Y(>nadbjOe__SMpuULOlWKAgxV|FT_D1W^cX?uS_gFm11>hVcz4b&)s!X}aTiG2 zH6HN7<+i=-pDkozm8W%#t7Exm|In&yDrhg#=V2&qE`W9eeCA_4w^~8elfH-bJp*HL zr;b(z+nP#FWh^hTO++?wd&|qYO?4nd$j{t(t6WL2H^$2tL>A2pRv7+^aUZ{Qx zU9u>lIUS^7vX+aKr9+`^$72dfmoohGyEzi`%+6Q}&FhKI`2L_vHX0IZxG2Yng}~pwR&=3| zhUk07aFiK&#wgW%P56=RIdCh&Z;23qq(0fKw=&UyJrJPydbe0U>;Vw{Ua4hxBDUug z*^Au+xs4kZ$Wl^MXo*S_2EBJh&&pMh?48H^N1>-peRi}_E{l|}&z_69oK>R2`zbgkW zbaFhpoOCO~(JwgbWI_JR*6xth&OR2T$erJdK8%vG#fZc?H>GW^DuzS=vb2TTVpw&M zU2Axw5ub92(G~v&E-G#1dC0Z&KIF@GJe8xtq*wmj8%tg9Y>OKE z|0O&&carICjr?+|b%B3k4i>GUXu&E% zo5(opl~QnMvj&`t<`^(i(?PMjX4kw|fv5OkRHhb3Cyt;_*?#ij$+}o@`p=XrzZ-(HMB*r$z^#_o~D8G!|^OrU!CC0ljkb9UL_hkx}YpCb@7x>76XDhJRGl1C}c!^D8i|@fBnuo zFav%?=eFyd(9zVt>rP|>RQA|kav`Eho0&y7z(go^I>P%XaO&gr)n3aGoeb*6B7XD>9cf=fkYER7E{a7aLYxSq98QK<>_vL01?iok5zj10a((jV z#=D6%l%Redx+JM3Y5-86Fw9C@;xvBcPxW zU(I5o@nn&ebHYnqQY^BnWbWt_I1h+*i-*N5>$%0rKFKMxL5o1q*CzBLzR zbdScTNIiEixb=poWlOiSR!k8iPx2or5&4GbNa&J;*OY9NaE9Wl`FJA69~(y3vN!p2 znUXKSV}s0Hw4s$11GUcKZi?f+U!YO=RAbtnLO4OVG2q$>aMLbiiaFSXeNgT73wrvG z+*AumD<1`QI4n&Ns@^}8nH>QjL%yyK%;1<%FWFX%Dz`U6Sfx%u?T4K?6vBX;_Vq#= z&?iO}^e$rQf#KhmqnOi}mrJou2wi$tawo~_1qVM5nrw??W;RD+x~!{bM5G(mzuWzp zNx_nsj=$D3D%;+?$@}~A#|X?SdLpYfTZz3Y!$^H_h@AR#~Z_0CvEFKmkke;CQuf%g>3`CMVses|l9828_{M||*R&|xJ7aR~kR z;v43y6h>0SZR(P^zW$X>5D%QJWJbh!VBI%oYaE>E>7iD7LP-L#=n}EzvM!qwfExVI zOns(?)__!f*Y2Ocb=Z}SODmtWt~4t!44SsIV%gK>2jtr zN*oyQe!fq6P9%r7-t1j2N|ur}4Kp?De%`MX=<=BvrIoZ^({4CPjrC7ns5oJlrQBkd zq53V`&?>y>*8n;OVYPp5KoT0mZ=cFeBKi7TQre1C6FBD)d1IsFJVkIy4!>3XhvJH7;_!r@%q;1ZFJU8J=eQD?_V}QKU%xWAO$b?phAvI z@B~ANq|goyM1x77umaG9UAcRks3{7sTo;kTrqHud={k< z&F|U&DCE5c>`n8D0)NqtW4KW!!?0J6M80m*l034U0vNj(b{+%^h1Kf$4Yi%_IZ z;$9LSqSgBCkvXw+>`L9YLQbU9M5CCO8^422IiaizDj=9K zlj4fF9Lw5U`x$=pZQ?-UPuAXe{Z)4<0fU7O8A76`EXwouO(s&OQQQ=kePUYjUcXVT z=s8e^=~W3$OwsSJAsShVG6tR{^#V-ma+;Il2q_!sA@BN$8KclpnKRHyRfta_R5jf= z);%yhM6?dy7E(2;#d-@#d>y}IjDfDwN6pIdp@G5N2?T# z%Yi=vzk~2ZOvPkj4Sqfz*McHfu@vjN}11>>Jq+oRR1~ zVHjcuNsR)K8NdwkjK_?PqQEgrn)m{RR-(^{gE6oNd`qx*s&}k+`dc8ca6&$odiAPMQlA6E58kTQ`&5phj=GL`E$EK>j>Y?)d!hSjH>x+p zH`q70`-o$DiGHPmij2xv3R~2Bv}bC~B~wHFO!uj}(f}v=TcBI!-+Bop<^{v5(kgU{ zyk$$doCO6rtV-11Eg}?KW;Dnnm}Qsc(<(En+BKe=9h+{{&}GmOlSGr?X^IWdvk$4*-9neg$ccYF=oTYOXd+SiqaJH0U%uudFsqT6Gyx z*Yc_#S12x!FT}@&#&*T3n`O&}O@vn^*cM#itb49gdMfilc93IUS zTP>JHV{2fOVWQ{gFbVy&oOb=kHQhyqQray8kgChoF_d9@VUT5`YqepOYmIDNJD4}U z^9Sx7+kvHV!aV)DZ)Smc{l&Rc^RshNR~J{%hnI)z2lNL%gh+&31YGr7@I z>h6E^_Vts_?}iWI%1Oy398yop>B{Pkb!fIbZ1`@3Y~b_5@F(z_@XPtQ_$2yR`Gmb$ zzb8L8JfXgUzBj!**03#WyTW@>b@S_cbQcPG>bvV}Z2cyhB~p-gV#L8iYNvm?whmej zT#g||>P1+_xFMUPa&|bfa|(A7KKwc9L12oWjJbd@0jG!M%lT#$Y}S7eE|D;purs_s zwOrz%z^0I$*O4EUPtL?*m^`!=t`fID#6FO)lebg06MFbEzRuL-mv!DA=&^nqbHI@1 zXA|%ecn5BVISSaL<3*MWkBb$+`;0;DE!~5otU;wg32oM0%UoOJb=z>=5X_S@bl8a^ zODWhXNErc)tp0#hTvPn62=T1@#{99W{5jWAnmr>tgHRY#BvHI*7SFQ7A`6QKW)UbA zRgXchF-OjdiUCC)m5Fhw&PxV^reZ>pcFU4phgM5mtYb~5OL_6f^$%e3BLg+b$uQy# zrRT>(VUZLEolnAPx|cn+FSeV8RhI0=N&Ch=!PsM^pek8~xlU;lv!BT)O_$1id1?8q z>ZZ0)OOgAZu!d=?^jo%Dwf)D+TP-y$A|)c4BaO>;-%Git>?Z*i>zd^ntG+m4-R*ml zvAiF7cDOCL)t}b)PREn^=|9+AtQ%L`&0rm(^raqEn=0oAZU$Ngd{zUS*sRTL7}h=v zhI;WAzndnXt$$mm-k_$2r?FWn?6MlGvikzC$5FnqQ(9IUc4hCCKdU}oaj#je9lyMy z9(>B;Ay2EP#?`M{d|c|1XqQ^g4k3}RK!#^5up*jmGzY|%r7m@;Vp2s zw_sGFFCyTjxfR@&JWdyJ-?sYPkKT(7ZKXafmFw;Lo%0j8x8Dc%MYJYDP}0gW$)U3o z!As$W;wa&e%hbx~6obk(vGZ_bzx>&05;1e0_?fN3-S&)kul><7ecN&vrWH&dg!zNv zt^r`}b^VQ7;^ry2>yO_Z8y)A9z1e&xyAK^7q_=T)@6VIilTy0o zT_qkC?>FQAmx71oE!8i2uzGQD7;n^{W&^mp9F(7qpXEMOrX`=1N9RXvF_-fjgfEP@ zBB%C8SF>^0gcAJdzN)XOEy)c0w86s7J;TZU(gDNrij=dz+b_+}e}e#= z+8i?-(`)-Q`2;y-1bo6Ah>ZaR1e|4|tnREXBh77OX9F-Wwlg#VxZBu&IkP}Oyzbmz zk2WUG21M>Q*0xUE?tCPFTX26p|5MCBLiD$ZvlSnSx{N%Lh@GPe5gULNz(~RmLqtTx z>u7Asttcw~Z}G1;J`!_hXM1i21~)f1fEx?I&e4p4iHnPifsvVknVJ5p1-+Apt+RnU zy{!}JzdHG^end^2j2tcOoh|HaiT>%=z|hXcnU93zpMn1S_pk3Xaku!Nk!+p*t=5-< z4FA+HFaa1D{=4rlQQm(_x#ca~O{~>LEo@9|oxaB4XJTaK;{7Y|zpDOc$o~-4_#aVr zM)v=d{EwP{OY$=Oqrrb@^e=P$E&Z|=KMXI!f7_lP#^st}83;%a=$ojJvODluCZxB_ z;pdRRauggiG|CSmp?EBXa?|;;Wcji=^U@-E?)sIFa{8Co_cBc{rE=wg4IUE7Gcb)n zI%FXdP;8X9>`SM0+FjFcBplv^)oWfCT$3JyuiB3pw_KaK_o8U_1|ZV@(2T$o-C(Ie zsQUl&B1jEwp5o}Yk@h$3`S*VPf$mFq_la|fo4k=fw?(ORX6&DwO9&|lhV#*NkBdD#db<{dv zX_#*JaB@DGqt;umQ&+f7hC}RS^sB-Im`yp))HrM_p0)qdz;N1S|3An3`3oolE(M?y z7#D{~z~`xa@-SKl?C0kP1_41rOG~Tw+w1WnX=nKUbWx#L7N3)oQ?l0Ja3XW5)<~lB z#iTYbkRCg@;a-xr2hzR>=-;l;?oF*KS) zi%dX(pr@xd#ksb=uHrIkm@6z6YW3Pgq^*|f6b|}B3U(&fpwM7)EIkBmU)OT-OUYBgOQX8V;b&|->5V*#6)9fCZ* zeibNRz>oSM;BoqoBvS4kP3JHDHj8s?@K&Sr4h{`HSZTCf=Z?nb!N?bnQG|x?>FHsJ z8;e{pzy&UK#d@ZOg=^Wmg8V;hJB0;8DVOyVM5J1` zV=7k!(t4#H+E`3*0b);AX#f3jGb}0!HW{E3d30ocbaIkc^9~1xmi*q~^(g-OaDF{R z=;yD^E;&qe3WkE-`Fw!8j1YlE=DbI5Tq!6#n95{yWBWt6@9p{4;cLlxuYPaK3#7xg z-M*9|wm;#4xF07;pf|mqdjV?=Gg8#5m#V5l z2Q7kj-c7vPQx3jXlWojwv0V9!^;}5;8rE0OWZ)1ew4>Zdr1x6H zB%p9I3T+2U+eB^b(nuh*S}z$CkUtaBAW|E8x(C&2zctHhI}rqWqO&L(_J{m~werW% zYm?uH3Y=i|^$IC?Gx?Ma`^>c53_|p;F*|3AF;K+35=9L=k& zq~E1?zTCivL?lo)33PuwZ!$14BK?$%CK>}9CrrF zRf4GgnEp4RK!}EtFnCg4Rx1!*od1|9b57t^kIC`zpUDttO9?R0h(FN{x4Hvn*bLrZ z?wK(_HMrEZw8pqk9ujdQ-H9>b>MZ3SRODUiI&+>7)vB};02q7XLe%Wv?x^B=Nkly4 z_u13_!C@RxB2F$4LfcQ=;z`^?+@#uRQtypEU~!DntGJd}(6I76PZhqkkNYTZ7Z31% zTQwjh0jLEP=M*P5GHZH_lP^mN95rVv^7i1*n2Ckp0xX~BUCCBLlHTK4u)MEzi^)G) z_=_Vw9c)n~bziwAdNybM2yU+DG1nYIgME1jODyaqyR5fijKy0I;d!h>N=t=n8wP&lIVb@Yf3@K z+&(UmayST<+D$8`|8k7a=^WXI6@Hl+fy=^Ei?B3Eh2|g!804&IB zEz^Pk;dkI@8NY{+r0$(qwMA7FR|xV~>*2lN>Kxj2-}zuFSdG%fZ*~9OWM6Y7D2BX* zfOVg_)z7`aU;F-tjup_C z@Z_W}vrHrE(PXQp@_jsz`zDe*SRgLT9S2t!*0VLv;ntR&d;F&NC>FlPTCgk7b^rdq z0C_Z~yNHD;&qhbyv@qGF+~|Z~fEW2Qfp6x*Iw7d-+$Ju_qG_dHIs!b4!pu_qJ|6^Fb@5JJ2sGR9J zYfauXlH*e|rUWZQQ1I18AR&~iafmT@{dCk+`I1ah`eB3mOC0pfS>WdK7|xwG|fDM_6H1kuZfKHYB7r*AcbrU!H*=Ik}+Wb4?}f z-t+DEF1S;@9+?otZ?Zg6zcfz%2zF8!P7q0%zEEhX>@_t>3k;oiw2I(xMf zmRpwZGc&?pA74AXEr_AX-DtjMBgykZ44(h=8+)%(@YIjY*uevYlCpGxcQ*x_D_3OO zw+Jc*(D3kZs4wrJ6HaNH>o0c9p`_OPQTlG87P+qS&|4!VIwJxnAj<0)XSMTMtT}DQ z$8j=3TDb~{(zB4RrM_^Pf>jRWhTDsZ9TPtk5zNdu>*qi|`7zdgiYbOXr97C*JH)-T>!u^J9kA=oxEyd)ko1X;D0#$tNUG2+_FOJRd+cWNF80DQjNc<+;abB?$Lzq$9IT&#=h&pW zd5N<`9OU>H%345A*=<3ts_yh_gs^E(q&>Pd|19l-#@Fh8D@9Mw zhQh#0{1rUrU@^7m6=-o;-`!N*1zD!0T0r{k;`xyMnA zK{r0jb^W=ZBlNut;HbHazS10O3)hYwVGG-C&mo2jh+W})J}wrEWMWO)+Jo}$yd->j z!W~Y_{4{mv9}|JkbgT^(1LAS`zUo}+c%?FSPsg`EH-U&l}?dkHc{gD;QT z`smH}>hD{mL|51t>2szvTcTLq4nQhuKuhq%>_Z-eM3%99xUyJ00(io1)*iyQ|Y#QZ`Mn!H)BmaJ#vbdh|vDB&*^njur= zo{T7nXtvVN6;rpKwYUrZkzsaOJrFsdU6C{T^0h}JgNpQrBKz0xiy9o%P9c&zRNkafo^cebkYt^QF0+zrv2o_c zL>E;(IX|`+kLH(<*<{M8tV%cV%jN=g;Q8)s6+kOb(t5_#F*8qBbKUwNh{AYpHl1tG z`0+x1E?O=GeRk|JR=!|$X#UzBx~MjK7DVJ!{r%I+#o1o4`;U6?U?IuTfsORpTyFdV zs}rip(p3!X;5?Mi8BbcoF(gRM`;3YxdC{ID%k=p4ZOLD^Y*NnO_M|v-4K5oM#>;`Z zg>bPpX$6A1NSN{zV~Rye_?JONG_w2=xk z@Ud0z{zbqCJeX)95fy-Z^OCkjavXZy|MOfRs9fQNac&kxSk!N>8WGaqjxi?i4OoAY=qU*M)_ z9i|#P9!?6Ifofbp?s3jE)^a}eMb>?Qn=$&`xx9xksQuQEa2LhD7Q>Yx^c4881kZwg zxnEtlQI32?Sbf95oh~1&&(4pXF)@$s#L4{Q~fc-h!T0yCN!wZtV$7x?FHH{K+L_dZ6C{MV21~o^_ zEOxuUw2YY_96nTAXt>MeK26^lC2zch9VQAQaswaB3s)yYl!giL_c;45>%T_i%hQ1` z;f)ULAJ)bJm?`dx%tGe3;*^Kj;91$?U8}^&uT29zdMlut^_uHFwBgp@(i98LSIYLG z;drms@>}!dMO;M?Nbah3DNi%}eu=;wzC8y!=D_+?RTD2Cj{oU>#r|t)i7H+BRo2MD zY8HtDM<;OsPU#^tzSsU6C~_is524VhF`+<(q5^5l zliO|Xszp=(rmxOdAy+#jk>@1T%fcpn@bTS2XqhSk1Xb%%Nc%>d`0SVdiLW2F`qNwA zE~m?R6%tIA3D3#pNx@p>l-nrt))rFek)(}i6w%mkpNZt715cqTgbFS{f`sWvb+ouy zc0-|*xF~eQ+^K-lWp@fD(S`?eNfHnM4)g=JeVIX|Pc|H2rcS=W2^co$#HBZOk-_j4 zS+WuBN+T^T-(QE-*TsbtcC9T%b&s-$P(W^2mF}nLVC^Tmb zC_I$i(z}E-f7tB0p3U_MIrvdjj2)zA*Jp=|Qgi9-$sf=&E(%K<=F(SaH>A20kq#&+ zi7r{x$}f?-oQI_l--#FxqD8Y@;oL@%t!_J0L0s;q+l@Tf(2@^+McB(ie8SKxZTOn#Zeq%KEbbZgG&k71gprbbi6hR-gj z{c|M0H^M@^kEuyS7zTSX=ktEJA}k!MIEn*B{V_yJD7Lw>04{t*Hp{87hD(iKT&Y%pn_Ej|6w+8>4qJ+y9@p)n!s*3op+&7o{(^nZ}ZrAbTOxe?|0 z9B$ScfX5JKLQa4C8$*w19{o1Q?R0O$fLOTnZ>3DhB~D%FD6W`f_zS0_g8mZ@^J3EA zFERObSJ4cBP+IS$PyFf25ZQp>P*76Ky!W@HW}Nb))(#PR@42G1QNGG%P?D6+Y#oVR z#B~>_>SuW{@lbd58gNKn*a>|h2P6tRo+w<7WAEyi(1nAiMl0Nm7QotJtlrNZr$nnU zrte^%?=4=3%(dF2i)dY`SYWyZsfcOjsgJu04pjo{P46(bPD7f7a1HKB8(0=h56bi;U)_8EUgjYlMe?08opPU29ln z`}X@zw`f~gNM^%|$$8j77Ev?eWlp8(4U81XIBMNV%Cn4@~2>lm$SAazk%M@EcS%RdF{6{nV`q)$2-1 z9@e$7tGB0`-Hct~U5s{I{w70YNNY|`02wk+1B)au0?(I=znG;=CGwVpKik!qG_Z#= zZT1FZo*I=TR8IB}_r<#=_3-r~Roa;M?Cw%0Tx#n-bF}{`WJYwB*KV8F)hn~+ZT%qZ zBn_lsm!DsCq=w42T@TTU;4X8Cu~HLS*riMTWopnoyvN#Uf;iu}1CaOM{zgKoq8tr8 z7&Vvu6@JrRRFPLn9Ll01OGJRBrsRqWgb%SM7;0utF%}64IG305%UgW33>mH4b@g!E zl!?^{cK4+Mz2^BTSSp%r-q7~?=w%>m>KjV_PuW<>JM&wx^(A}PQSRH z0};kVkTnfQ!=~1j_>7fXrg5-H~$ ztmQ^XOhqG7l%mXDR{aXLu?vy%Vkoh^QU0`HcS~pe=yNc8q$)=kQ&~A(qDk*u(>yAUTGoCNMc31x&CQ+ zV)dxa-kQrH3@!8d8tPK~iDsWq{f`Y?$JH&sVCq^+O0*uDJ1q(+yN$Aj9U2B?Kz}&) z$7_u?)S|=8%C?4@>VcfS*=WnU-Sp=5B1Xu*WWSuzck*t`-(x9dnuew@6Xc=W zhBGSE?5u3nVF&XWkx&onK!Fvk1rb0SgM)j53BRGDz_wpL50)EMQS*2`BY$ za$G)5u7wMqZgPyyGbu@BS<=~gy`}vPXEwlwL8^vYJ)pWwbL#f;_=g@FdNMfpsV*<{ z++PzP&iXE28HD66YwHd3fys)GN|>QKHfOMJAv-k+aKRkX(QEMISYiz5*Y15ce07_3 zEw^WF?`yVf;tVIFcKF~|?>Imhuy<_?_M3d9uOI1MDYX-z(GtJd-vO|J`M)v936%QV zRT!M`)3--%^}7Xq7ZrCME@YxyqL&_-ev0*>SSdARUk-xlQa1-N?JdP+zQ$};xpz`~ zP}NMhp>rPY5`}lATUJtQ|b`grGFlv%}jltcpr?TsKkaxMLQRalE z^o zofrd7?-U!))nv<9OKYW_5mYrI1fScMX(4P;fIE<{(@8Ul7>vmwY;fww`APW?KcT#q zIzSgozzs$B7mt8?@bJYWxLZIw>%OJ1f*0oaUe695deyzp$Z7{siua1d%|~sy9VbM4 zF_lH(s}ja3MnIp0UM%NgV{~fOU(!Oud)YpXteu~%P@lRuy~S=v!BZ-z^uI{H>y7*| z9rNT}+ZdDax0V0$<@Zsij=bbL@3m6}6~G1wbb||dF8W%_fOq`x5k2cVnNApe9dMQA zs**fk7hty;ITXP_WWAjsL2(=C?!8Z>KMAXbj(2PpxtBf+rV$yBK1&Y+(3dW)fV5ap zwXy{p+^;c#wba>5U^!eWDU zDy@yfx5lFov{OKWj9gHs5;LqE?nLsR*-TrxgQIkU!}i_}3tW#Vk>zhaWBkU!4C zZHp$1(_X#SuE8n2E;}JE6Zjp9V8hy8vxQQrJ`Pdi{`nca>&b-pxlyw# z3@6!7Re|&>5VS~q6xn-!DU!e!1dCc?4${(>t#)VL?DsDaLLnl2My*vtW&jMziHdEz z`|lWk>}tte*J@$Hex?28>p#=!}EQ6tV7SVlX?6{6bZD z%3H5IWbc~alsl|-vZKm9(QzNmP(frmBuj5J*>LhY*2@N&;uZCv;g=q=@SF^7(Kp1K& z!sAcF`jJYTPC`-lYE{PI7CFCrY9SKFPtdH5F8I##x7HidE-NhH&7-mAub_?J@+LGi zroy<-g7>lus-q0j%!(!W{mNm)^a5rm zN7ldt;$#Ph9}NOUJ2u*jVez{9v1>Uz9MjY*Zz)Z!2QSVRhgLKR z1pHDH8v5vsXsRNv;=@!Mg=7k_Py=bp4ik2h7>PY(a$17oF7Da&im#heUpH4Tj#5({dpv z%ZOFn6w#jQiDD;_aOaDmKT;gO%b1)-zr2A#dD=drz~fE4qsoi@_5ds2!g@LV6;iqBunRoxS|NeKWSRZi)B549a7XVViPp*|WQ2>N@4)8iAYD&q2F+Nzc)@%W%|VSfshLjxOH zUOZ7ZioLhNHO5=mLgALU$7P(G(Ae10NEnmjP45xm%6!}Zcj+NxostgSDIZGgJYBAx zgIa%1OOTZtLCus3+T7t<=h>+1o7WvcQPEjpsGX+s;vn>!&#&;^?eGFf zoajENnJ3FVJu_!qUW#3~KA04cbUhQeL&zCuz^FPRbL>@BosL-z&bsd|ZaLEH&4!ZM zD1L~ytV4v^C{v5mBCj}qxVlQ#US=d|Bi8Cj|Q0J=0Bk)aA}#xZ}JR(ef1!U}2dG4H1D_9Iq@9VuZN; zT1df^gbi$j4cIP1!@|)_-@Q}IcTA(|oC?%t`-%yRKiBtbXg$RnW^k_(JSd_X-}&Xj7N6-jOo#+7Ywo z007{V6M_$CLBa>tic`51yvb75z2Fu|P07fCQV3I`O5Di!w5TPvr-IZhm@U$_))({0 z>o*4-TucS}CVE|7{DO}}ox@88lQ#%U&G;h>M%Il4&+Cft9V5dX)Hq)kA|$Ayfrc37 z?-w=O$9iE(O(EP&>C>Ei8Kt!W_>8XP&@qn7Z9{96i}l3;TzD%Z+fQ9|R_2RjXf)}s zMH;w#h4BT>xUje#%d|D+6grxqH_qoM$f0Zj6UjoXBj`Zm* z_h8ot4$k82V49Yi1IIT^s|nq}sGutQ1Wr3e51xHcxQqrSzpO z#RY5w;dk|4WYv6_!MZVcQ7$6aFgs;6)Q}sIqQ%drwM%;HAkA$Dt-a7lp{rik(-k;0o$Be4nUz0aSDqC6p}eKCEhhIA#k!nB@!B7KPafc4Q7cJ<6{R56KwW?76&Utwy9dH#@F>X}Uq7;N;Z< zNH}Rgo0RI183Il3tQG_xns4^Vy<3lwKG1FHxxgi}pKhOSx*^oHj=t3f(&)*36CTr` zt$FJ=v4)5Bm!fB(Yi@?LLLeV=(x0FXuTDj)zH_dh?wJN0Qq13cam`N-uhb5G<)Co~ zUt4iSvLOa@G#03(=FY{jZ{$clc=LPGCVDIXGK*{c33u_C=WcSU;4$=9T6BhG5914B zsmOr~f#g-dIQ01H5jPwGyv)ox+Ir0nTJz-**GA2YjcPu+-Zx|v);CE3IH#+YJrm-P zLkzq>i!yX{D(s(j70Oyi!CU2V+V6la$oRcpBmpSt5*=ej$KNI2baTOmGd#BU|ET*s zVdNRYO^r^YnNdVnPBA$g!hqbp)?VBgSo6QBM<&)cN}%GN;ob&liaOI#!@`&#F8LnQ zJOqdC8t>7sq4rWG4K|eLw3hN)ref#R+MtV)Z4Af~Ly<)_LyZ~|-33V_U&g%EhTSoW z+BdUL=#2PhZ5XAd^9g(YiXE=Xi}@mO4V7`lxOw+GK{?(+{`k7>4{@Zc8-1p3kIIB} z#;t#1CSSC`iulm?%RIjocC_T~G7lZ%HR{5Ig(M(TX$okxB!w-Vp}RmuP2dGhUehq5 zL$aNu$ieJ=-%O7~pzro2^f8X$6#j&}3<+JBpElX(k{ofTiqj#*bsEP#h&^ZOTyi|i zf;;-Q8AiC_5NP&W*Zj9VaUdzdA@{^x=E&%bxDhUFQi-ggXtfHhvWrz3c5Y(}^sl-4 zen|Rt<5qFUM0clU%lOsYh)#BZ&!a*M<0#vb+8e0OXS0l6Na~(f>T)$p=h@CjB6$h_2^U7U=Qr zX`nPk{?FKR&nO}{(})`$Bh{I1qRyjP5R8?Mzv@Sc@{x*08;gIjOq>*Mui zW{N$VDzU2fzbUtBUm@Bfil*i0!rouvkXe~HhPe`7!AgmYjHX;Nl~1yb=-1y1(t>{) z;ceD}AHY#Ptrb+Lt-0-ReT+-H6S860B6&WQN@z(Rujfv!9}oK0EPl^uDM3W}byx4= zpck%me_~21gw-(R79<)1X`!jeDbHSnmR&;Vvrd`1Pz~-1Dbd5BYyiExZ5p>9^m>58 zIUHlS6g#lAODXC}Y`I7Gu)xh&^(z1=;W>2l*)H@qQ8ubP7a&n=7E=phx7Y2NF!iTV zQ?>ML+H%|$rU$57NX~3aCqA9bcLJ#UqyUv+Q4M1%Fa$fpFNe(-HnDbQhu{j`7=W z7(AYG+CM4j^Ywv~PsHkYTQP*~f%`SXG~3gOLy}{%65ccUYaG79WFRWl<`GutSM9ym^Lbtc2hVnJlA1T>BO@=22MB;e=T}F3->|ZDA!{$J!^d}odFw+q zmIga9+QmD9R!m!1;!E94z;~i1)eMd*mhCYyS_bGXgvPG+N%R*2nHXiEdbeQ(hiqkX z;?E;jzi)VIs6P%IO5(ln47Oon;IRWsAVT)FXi+Zh+2lp@{~neXCu+g|I(T6y1Tm?d z6||+gk{RJU1FwSHNc`trkjY{5B zaHX{XGQoT+8v4_#1T%Y2<1tj;oi>QFVFK|%RL8R|ff5s1ss5%YSWTCN zX}r2Dkk3EjB7GMUG`qF^u~3%hgLuLvm&2`X;C4NLTtl@V+>JN64MhFaS3-k!^k zqC0uZ?L_^78N;+Ji*k|~zjp_fvCjIOse7p3L{7;G(Y2kgHJcmOCas9(5MT*hFr26| zKi1SIn~>;{o0MQ>s$>7>BHh?K!&bkB57%P5+yE3jvMOZiTXydEEbR?4cKH$PY@wC6J2lg z1DvjD?$?lR`_GA~^}C!GWYlX2|8OLq9rIk9f|9IkQ_~V`Mx;w>r4bw_Tx#JVXy6cU zCYR6^I4eAhF}9t+m`(#lQ%>$~wM`Wrj_Mr9dvzh&1TC(J?bV{hrlZ9J)}=e;3cP>s zksTJ(KyY8-3n*7E3%qBac3LM%l9xl0A}99dwni`(tEv~rdx5l&@*=4sT99BsB%K`W zh*9EtzQh%xgPl7)55?_WQ~DY$685g6*&vSzLi!C#B%WEhiFQIyYY+l@m+f$|8QD-g0YZTj5TB~EH zZnNQ1wl7wAbix+6ybB1=mUzsd-MJw9Z70f3EO~D;gH-eVv_;Vpho7KioD$T~rkda0 z$)rUF_B1@E3jC!T3sOXIH84tSrA@+E;IQ{fv1Iv=fNfsl+gE*yxMEh@RTEM!py6u$G?dX+r zdW8bTxhfPKT5}CiK!E!6XG@Tgc;0yh8#9I2F#LdRs_{(Kb9oUKA(l+qWQ@NQk$ZZ(4v@Xfb5!W~!hGkszk1@(q8_VnJ!Oy%yMS#V0sLLE6Zv%pxbuLGI9-z{(tl*|)}6cT9X!k8SZC5$O%b zZz?YCT9g>5%B=3&Dil|-S~_n8=s}r1i@M+7i~;=S_omUPJx?2MO1$3~?2t$~&IBc$ zi_myBZ8cDTzuZ+KjRoIinK`WJZTKN+>gWZYI<(rOnZ7DhB?E&)y*ZY1Y*=07UGwk4 zf*_0nMY2A0h28(5bf1062ni1LF@oHUTIZP_t)jsDQ^~aU2VYI}d(U3Ivvsimwp(HlBfET+ z%(p62l4(Ruj}ItBW+5a9IIWIDVn$wY@qpKU3`*$_+mdLXtqaqj{@U4EmravqUKQ*z zt~Mh^kdR@ZFDPJ`(&-(qB&BlzHX{W(oYN<^3MbhZkSNGTX7bMn6*hDGs%D##7(nu z%w)mNix-e%qH8FIfzp32Ib=(Eo!@|q0*fT(2rt5g=lhlG6J&utYjFV4GW4FR(-dje zViTtw6UQOuq*VsLPDkr9ZAmy{s-~KvLb_J7p_g}Jqnlaab{U3bfMz>nkkF4JFl)S zqFhmjQR`}ooPOfD;)lKeGF)Hg3Xk!=!uDWF@98?8pAqy$+{+$VPV_WaMI;m@gA$Wl z&xg3kbB8}?gg6ITd8Zcr+Veh^3aXg5dL-+XxdenwFgcDq*}}iE^Os7~BpBD6_N#3X zZJ#ZO4ug+n-DZA7L(PgE(;(FToO+bre=i|?PL0*V&??WkQwNRS{=rRh z!+ZEn<|w=~e?(K1hwzfD&w0^Y`u(wL3yL)Ql&r$(7D9gP935Hn&}z&C2+*sm4?1lp zl2vCBzIzFErQmQHcj5@|PRk_nRvXi?3_Bj_<|Tp*a-h}3sov5pYQJ^_T0F{kTf*$} za5*Oi7?%Ky=9nrNF?;Ppu=M zxD6|R^yB*=bWzd4O=*|8(M=Z0k5^xdJjuvxS+<@P&!Wlaih5faWnx*3ecouT3BaBj zb&ma-mp1Y7;4_DfZlXmfZi_0nH=q`A{tx(+i%cl(M?xvl93npylrvy1V-h7l4PH3D z2%5*BKhq4R-V2EI_PePsMK5z{dyVOYNwxr=UL5|jQMFXSkRkrjX4tdGeceCNZaO

    )^zc!G-{18Q3+p&yf zW#0F9{;5(2SD;G$1KcpFNKUtG>TmtC%+D{HqL%>6BbQio4cXn@-1_TSVfRq1rsMV+ zOi58)sbW5~%zTBP!=g&z)CT!GZAsVKs2-?KaNHq6l3ifZ)2wIIkM)0)t46*qdsp!` z?JPgAjaWcrjr%K`#plVaJPdw?#WE4i!0&iwaRHkzTf+5ga95<~-=5#*aG}>oGUvv4 zz~8jNXd6K}f;K%idHbKgBV}Wk$Rhsy)+QN+)`c!sD5QIneP(cS=Arn~02$CK#40>) zP^jqR2sWe6*0l*aiKp%t8{bvTHJGw(ho84TN%&5j#x7M`950nFnw!4M_BpcgxLXA) zBhN?^D7MJS>Ix#mldy>JCG@ntMXu8`F+XpyfjaDO+2?gub?l7mpzD**L_pBTG1SZRn%UA7cwkfz(IMjXRB@!c=W@~N& zTCvRVNNtBc9)Hz~!{v#2F{Q(lVR~P|sLgK2X@_dOlrSO@xVOdGS`O+nLw0nG2?7{+ zb8jk;NK_vk5XNl8*OXKe4f|IhjJggUUVA$AShUB^5q5tzvrYgStQ64jhH4SxQxQb6 zx>B=>2gi02wBCH0R$6;O>B9M=Pyolu)-NwG=v%2Za9<1>=MiZ>DaLjBwxodU_h5vk zvKqEk%>TsebB7 zD8nFQi#d*ciCg419Z9A~IYSxWrxD&3q4Sk~V4Y#of zm{T9A--|t-m3Hq@<88RGI9S8KCzbH<#!Eh!4Rax3iC`Cu*8^r8`q zAo8Om=&mpv5#giLe|mY+F!b4*tp4iqSB_P7optMaUXGCoc`pyEy4i3(&$z5`@tl zdnsql4QDE?-6_@vRndD}$w8uXp?f(kc#n(yD89BvXkv@dYx!UMfpf^D>~KQ8KRg~d zXZ?|DNz5o@fLmSxQ}2)LXA0#mv(qBK1of1MpMYSyz*L!zx6aaMYIr0d-0H{kcd)yn zO{7jUbYbUqY>PhH5$p45amS9LOx?R8=9)0)E9Tj=K|_1DPTVWk^9dm*A{=4pF?D=a zJf{SCrM-QA0?ZFz>jL}vXLFC0B(pQkw2msR0y~T&NL?yP#h|pJZs*KXr!EG{SWIq5 zPd*tZa(E09`0^>*zZMUP|#&LXIwx}VmpToi1N zTCGdC!c%4#K?|o|Yub5#JRBP};L+9FM-bj{`C50f!6+ly5(1MiihH`)G~Jr>z(`&c zK9Db6z(Dx*#Y!!Z)6{Fi=*R5pb>@8La^;rznBOPJNom(NKPhij+WEeZS2ESbpX@Cd zJe7CDJ&p9j9j_|QY$rsT+SjQw!c<>5>LRMle^sYnLJ||opSg_*nXyHSOuDyC?Ls5J zq?^>wouU4OoHC=H*~ZYE(L_S_nmUT~n%ZV>J>Ua_uh6>^rrB~y+P4=C4oUOF?S0Sp zl_>W0Lb{DI%h&8rWUY*Naa6CeuvX$<9=`7B7X&)3hf(6_h;H@-9;7cRFE?`QbOqLE zWWUMRQbzY>mN+XEo0SnaohftFLuB&Q}zey6e72ah)s2?d#aP#TLMb3X6r9X z%UIB4v@H2A==@-D@Vg(7a>~~rM4jK3htiHdPKKhUnI)m%yp%KQe~sI!G?Me^C?F$k$Btqy-XIlAb%*WT^H9Sj3 z63h-|o%!;71Pm=gdfJgBuy6_}K@H?kinCuEEi!|o8zU*$Ep-`YsJZ0EM*P}sqy(W6 zVjEQ2%>Mon?YOG6Rn_R#{VqA7AI^*BW_60eYi%g2z#-?jTJB-(|XIMa(&_v=bWY& z>RM7SDsc!-W1+gaS={i-QV!`=IcNhGDLRA8zrP17 z+hBj((QILOaO$bIj%U>7rB14I_2b`X_G0-!o@q~v=3%S4VAuj5XtCX(m&-iPH!0#6 zwg>Tx%r;R&Q!C6s(>VI2k8~=1s3xWj!3_z|KxuikgXy<5FK112FiX1;MPne+-z@p)Ylp%t|?El|7i@AUoaY4my5PKl|@I$&Qiv)E$j|L zXh?~+2c`{o!WC!m6g~;m4~m&-3ekY=7MOSu>||DT556v$fm09ZE2!N7GCJ}ndByXB zw)voH&=@>yQtDMlSv6EQ_`qWUro+V?#G09ByWsxiOc4PETDwe)sB`7aY=F5fmpAIs zAc+`B@*K6O)s6NM11Q)OFRo+IC!H=ixt1BI=C{xUa10c z9;Jw(#C-P`^fmo}P-D!D(DwpwCtgd!+VkP5-_noyzLr}vnASEnsg6lXmDH6}rZEH! zJ9`n8$|1ph@b7tB8-{SC6drO=CnUQcOHKWHFGEOQoD$vi{kzaFG|NZ_yriP)#{=7U zN6#B!)=qPS8y-eH=WjVEGf`S6TIZyop~mKKRQ~y3?RDb|$p^LTb4)GS%`b;{hiv$K zvx`^lSeXeBNxk2WlX=Z}q>_57jM#CgAuFC<&6hL z#}d0JCwJr%vem@Bg3zVptsj#~-pEZadRqS(D5n%5l2VZ(@^uq<5JgYfDURy7q4Nwj ze&(epmr25Z3q|b?G%@!?PRUhfR|-9(ea)3I`o2@7v}hE!$;HEE&-`l`##Mb0(RylplKWICIttB}yRF zbz`j$XRpWg9xKK#7VlM3btf<6zfyip{<5u<8$Ff58ZBJ+`CGn~a(%f+jVQQlylBIw zfnj}2t83hJ0=t293mfv;7i3qN4KlSg{&S7~h$$jiR?+XPY7tZotn@<)+&j*4H!Wy} za!~@0y4gfmN9J}`g2KFswN5Bb%n~=BUiRw4C61R7%pvhmErG7;SGlyysyg1yPNTbU z5>&F6>Dr{jB0GrwSk$O;d}o^(#9eMxF3uml5%;hc6F9gCTnRu0>_JM{=~BHSj$=j4 zWZ5G+KVPladoBsj(n;5+XjtpZlY&R{1;2cNUBNsB>KY$e;3n1hx`w8qji`%8H@rFt zRXY5A*+(uo!w+cmbPv~{;*lCgn7`qhfLMg39Px8mCgQMr)_%^`YPGwNh`EJ`}1a;>TYV};h$-e>W zoMdO8*o7tF+T90Zil0>N8h(AVYjfp5r5wWC?_7(Ja>mA<@RXz}HvWP7s+MV$4iZc( zMs0>r=O>Sp)=6)e#k!AZ3)oIhBDt6sif#1WH3IDiUDQg;GadhPHHVxX(4ImC>yeX0 zo;0iYguy*Tnlo|xQdXv7$CsjrRr*~p#S@J$on#7)iHll5;-R1^+ zC^<+g$ZG`UX5ft;d_#Te@#zacnn^OWx{BO+;ez0K|L>mqVYG}x`MxKSqBN3(o>fa{ zBe_R=D&_BB3T5`C%pYJ#O8DLj#Y?Gz-#;!oE*%ToV}MgC|F7- zqF65|pF7WmT%wkXSldH~mG1`1ay-Qlf@X3sc#rZlA82ES?+o)qZ*|MLu*HoJz*+Ig z8PpyXjz+Yq{kpsO5MQTl^3_L0=h2SX$Jby1Hn?d9?U!M%81v`|39XdB3dL=n{m(r{ zY<>?+5~y021VJBTz0*eNX1CsxL+22G%v(=;#1~>TCMdgpiv@;`C54?ORwQ`-Jn9&x z#zZbN??9qXTyQL*{8=AdD!<+54fEXafRZIsje3G3A+?iy?+Xgu0fMkQz+%=8-=4uZev}4 zUay<2WfbZcw5;%=6cpK~@54#6Wxk}2uM9PeB0Y!gOPat{c$rIj zGLFeskmsM?-im4TG<)&1gZR4AyfQ8-QcH!b(E1tTpHJQ6!i zF0vzMhl@QYW+Ru_5#ub$eg`XRqNhDsrlNuW=rVV$b@xQH`VE_}KZtZNp_ZR!`bEx9 zv8ik+>Y1a0S)4SPXQnpWNM3||+=T8f`3lGt;TFp&&x%yXd#b=;s&WFx7s_a<&xZ|b z!$ln=S6Oy4&-z5kb9I3Ob)ywb^dmDT?L-eJfXhgJh+?<~NsWFPZk>7Nw`W$TVq2uV z1VjuQ?J|nH%Eg;poW=Jn=olVEn**Bu;2{H3{Etfs^qIqNblvA=@P;K8%+E-_%jqm@ zt4*|tIb}g|QEuJ4Xi&y|!5x?(W%dfHb+@SF6G!x!;Cgb`!0ku? z^ntp_;K|s8vt+`fFUg6gQ*F2h%i8SC_HT%dC3=93Y9y-lqgPS|Yk7>oeFwwY9mjwW zsUAJVc<1#Fb_^Qb=Ig3GFVOQ6)fep~eBeh}S~e=)D~l;H0=s*OgqE?1Z|_pL=CDTV zo1E8m7yGCgf~i}sLGR#l zCIy9~lh8MCZi9X178I9_B|={%F>r`^G(~N}8cro1e)6kDL8GV@Q{YN;w0`(Xi5a~S z9NQnwB1%=pw490@jNBla8!%Tfy=u;5LI~_y{F<(hVsCm=SWn#v`px&zi7n8<8o+|z zH?4tK$bg3Qq)a+>VZ%>?FT<%Kg!+Zz5x8w89js}04(gohB%*ds%R{9LTv zANB+|74D!;BIgzP;zN#{V2^}qw3i+Cp^mU|AMwK1MhWQx3Q4yJ_3!ZixuD`4^Q^;) z=~dXuFIe4qi&7dHLjKuX?NXS&;`4dD=|N52BK5!*5POo({ z9FyFtMjB(E#Lzu|rAv&ipVd>U>2>f=_nf&pA(nRSy(jXpw^GAs#7+^l3|PC|uld4V zUN|lXr}2>vm82!-?XZ{vFRT+3v>xdY=pC|sdxmdZgGE&jrAYlUmz(?FSO6e>1OSO4 zfeu+LNjG7UZW8yHV7m(+0~+*kgy;rfzcLD0V1y?ti|`P)-G=o4l^=Je&aCwiD_at}%QZr>fLq_T|g~>!Jm& zU$m(l{47tEctWMsU}a-YVf3$y{gT|w4?Bl3bdmjsF_y3YqnN*a!7p9Ppn|IDLk=m> zD%>`!w!^jE7{X+5hO?C^3-CD-He8+JB;efP6)7_A&$rThFl(-oPb&UCH$O+1Xt>m5 z+x^{I^^ZdmD_|qX=LSNV{$uQ>PeAr)_+djXg?WjTHBy%744B`1t;7>oligUXbD-=x zU39e@g=Y&$&W;#&B!L!d?Lj4EHH^Z>S3UUXb$ePed;TBqM z|A=)%T>#_S(NDu3$2J689Vc^$TAV(%<$z`&dJ`%w9=RkIsy&FEiPwUu=4|M6ptLhghapGCja?pj-<6{$#P*VjedJphJ_QT{w1&qhvulu+#=yvmX8X_(`o@jD^5Q=ek=Tq( zDR)BZm&whtm|c2o`!Q9@a}U2QIA>}s4Bh9Px84uaxA#@yY&klC8GaI1!!1mPL0|`k zkb&#-4UXLi)BvOfk{hgX_*E=QZ7!mnsQ+>D5TsC1fOz{6+Gjown&+-fS24%$&II1R zqoNr%pTpF$*jQ}YSKJ%m5LCs*t)IA}x#25CzvUKm5_!>nS{w2sB2o~#y9?_fao?1# z?$YPU=bDto+^B9V=9#ANb^bx0?gGstvW#Pq*n=mEVqz4Ik3>&`pm@5(I{d_$*w8_E z66#bc&^7XRN-p^F$17KqT82sAh=@wA7kUT=e>bjJ9?;v}+nx*yVG<@RZAbrRxng0e zdXKs8f07sw0r5d!BEQKQ3oixygG7PFfx7R`7j!$8yB7YlUL^u&!k&2Ax|;su*EE=b z%b^(=S_J>Pk_2*&L0!O;nC^XD$YlN%ZXqO*FLf7xq{Krz|8e<(et?_I_zE8w*a0IU zOuS(oGBev(DQ1?u;PrP-x8I%V0?XTpQpnXHP>I4hkok49+diX*RO7gKfS|aO!TsjH zj{%TqigVXdyXm@T3Q5Z^b5ME%5TrCE+zAcr-2Vqe{6$rUIzZ51;nLUazf10}6k`Su zBxi<7h?@FOs_AQtt{eTbECN0<;Ge!0%pnIz%;9t7wf!ApAq+HG)K?U)@5Qrwcklh> z+W!AY_)U64UF4U7`a_Z19;HXh2gN}mA)F7X;p2^3&4-^5hKsx|ZMSwXf-5#`pZ+#! zP-~M{#!{9$u~D%z{pQ?Y+_6N;I44Gm;>9u?ylSfmCiWXoWH?M6dy=mK=wxaK6bMwr zN(J?K);kU|@}kNgUMQ*CZfq$Ye`ok~eX%uv!&}$?>0*5D5{+0e!yNVsK`ta}I z|FY$`wyffE?N3*>AeAws*xq2%lmDS?&7y#+X+D3|ht`$cT=KYHgX(oxR7ILR>VLHn zv_MO2h}Y<~+4Udo{HrM1e25dXjH`}F}&0W?K2t5Nmg=bRVcINycPmW5d) zB2hY#OG!2P6LSsy>#7N{>tV*WgBqvVaSn<5c7 zrlUKAK`_CO=To+mW4irD_uZcs4vf@QAZTQrkl*y;=HRoc_(u&2@3XT|@UBu@n5^tB zOF_5+*sU)Hd_8(pz1uS(?lply206V7EDa`0>R6YkG+S>Hfl1|Vh*!x@(0Hz>e9E)- zy^a-11DmiA9-?YizPwDz*|`P}kA5amNYaHeCQ9~%6%}`8KY(v?D%I>JvefL`$H0aQ z)n^yNO<`IlIHJp>8@YWU)Z!^i!QxfzPM?SCty{Ogm(@vsAi(BNEn^b9KPU}}T(k+x z=y*l@93Nw6F6W3B2DS3palF?0W?Lc)g>;TMD;r%sAHUCTOHeCgXYGe`ClpB9#$jpf z$%UoVuue5ERV-bv7AEDxX5R!$V~yEGeg zIrp{d-@*T7%l|prG8&&#Jp-ORTN?piHwWkA`-svtEcL578fispRd6(6S){r1y+L-E zHrPrf-_M!OZM(233BSnE3Hs^aQ9N&8fC=I31Xp0X?a5ndHz)FZ^_1x9E>R7J&M?92 z(Z^uQiNnd$tx5dOP2;xh^jc=?hJ}l2y`7IONHvuwTtma-4n~T98x6J_jitOTtrcA{ zfgx11%(!hBn^Roc6Rp!@8Lo!Q^{6$mHiP*ILj&7NuMQfrakxOBa!gZm7CA?;7t@XL ztfWbaGgs9l-hG25j^YEc%rG#&P~qM4?;49>c)H!6ryX&Uh^5%!C~$^px2)8bStJnL zG(E-ab3&gUsvO#ZO<}&IW~qH>Xeq$Xw{v-LEI;q12e&~m z-+751x_c9o`2BisqzKcYxPgNoe3omPiUz(+ssQWtjB{0+s*mQ1GuMO9zfBTkFZRRl zgav40@66U!c}R93{!be7Ka2qZO$is+H>AqPI}v9ofjVK8k_1bQ@bMzTLpcc9*_r#v ziAhiaSA(vtEkP67VPq!5B1pvd0S6BD`1+LzaB2OAY`#V$B6r(Fi0CHFAQx}KQaB$4 z6=25MF|nh8p_oN4lWx{y0&nMfTIp)N3J$8Q(%%j^2J;J~#Wx*{hN82N763 zh=e8Fnp(9lx-Sbg&y0D$xOsVcIe7a^E{?}Z4wvChk?F`;Xh^ob+gwUJx9JFQ+-yth zP-b3TRDF}*D*&NA&F$!Cle@#VcF9kqXlqRQMxGrT4Ey0R*mlh`$Zx2GAX`qYz!WI_ zFwi+i^{@3g?eo!yJEl$kBMxj}=I@XNbi3E z(@jTgNX1&Zf~X{BrmBFSzQ2k;YcwP*BwVS_?|jD)7! zX7&D@x@QPLq9UvF0s|so$^-aG^n5$@FA3bO3WW&Z)84wIxX^vnMneNcW2&YP_t6<> zfegs8_`TL&{lP*&!v#eFEQkHG%HjT;Or9`67L>`F0 zz#tq^%Q@DGSR6WNJrwp1okC0y7p@Ij=sH%0Bzk&tb>y=L%QwQVN(K3z z4P6K)11yL{?no*i*kMI%HeMKfy-&%bGJj@UNFAny9%}4%WiFh0%1D5m+q-Dr{c?RBHNLt zQL#CQSf|9*;%(Vd?k_IP6_f=pup-c6-&=OM)K$E?l(K$XoN4S%m%OtOsr5_@a*}6I zqN*%yi581G^S}n~y7QP&-`Tp|0Pb#B7&gQrBM_a$hsD*L=_N#JO#5<-(k{7LWN`x_Ohf|N zksQ&1|5%{C>GdYzOeM8FyVHJfPI{=U`=Egtx8y%ikLvwYar&9b8=1D^R!a9m)3^5- zibib94=Sd`f1#SpRl>Y+K%`f5EWWQZpF|X{8bM2E9$uT+Wc$*y-ZZqI-V0K@#J&j1 z8``=L6r1wU+EaxSO*urEY(BKbz3ZGxlXdHDUo*|$ zZ2V*g1@nbw@M`XxkXac;_<(uv#vg%47Y_-GFDCP*J6CN#DHjiusAE67bE(UN=;J_q zB1YAmog^EUC`PMX8wold9F!JGs}W|v*C8&LKOAC#Xa*ODsTXbIaf44#M3bNM(~fVa zk)3lkM8N`=p+?N$HxW`FAeIXhuduYf+3r?lx%&+&_}l4)LSZSZu4aY0uU;&2vj=tCyT!+^g$cbhI!EsZjlcusdNB*FlL6 z7@G1Ozs#IswXun0N445jIk#B~4kTib8aO*@*gvO`E!qt*h`lfNJ3ERZQB}lcUvHYK zO#>{NI?lo(GT2Ep#fjl0?*7YYc$rY5W?v_3Bo|8>YYpGob6G+xL$~y98F-e$m5>Y( zs{|st;b~JgM0CZK&Y(5qy6)Eimz^t#>?qY=%Uqa4xJDQRPZ&_K3Cq^^3sk_YKB1=< z2Y05;5>?G#mgo-|ZAbSwRjObfA8UPf=|6&8GqD`nn#3dXO|cyOK!xT(lEb6cTk^CW z5F1VW>}|UmTW+TmB+h>TdJ7okpzHN9^Ur2vyN^Y&Qa66|jB`;Qr^jlicg45SK<0&| zaJ$nG@*%ltjLoJ7j*SFwlK6q5I#-?%31W7~c;sfzc)MZUBu<7jT(^SQ|@!>w?6yH#kH+*P(COYhye20oL- zKfK2;e8EC8xT$&;RCW4P(my;zW7jk!*gxkH_$qfDyfn1J~pJ*_ZXc-Ea@^SVmvV{azC7Be%Do|@dGfJtUwfb;B>d#MYKglJ0X zAvD((49kB~$A-n8%!4yv1^o95!fRssf?#Ua6#a9nS!*+@sf+{GgDT0`+PJKPu}{9u zgGg7q1XUfk$D@pkK9{|Z_I>8;Mh16g=Kcsscwi_m9U?goH@ce5Nd0d?Ms`0SRZbk6 z%AKNB+^YEk;+qGcdf%w8ZV)jPK@$r=(V}{z2oUQZ>}184Y432_G`L(q^;idr+`G5> znw+^4mN9=D__>@TdF%Qt;|V&`a*??&Bs_zW_!S8ZZ-ZO9krU<#TzUz0?1hp4pzF8O znuQH1Hcb4^RZ)6%#v{IXloW4$Tjez0|921V@$Mv!$qP4g^DK(%`F*YcJNZ>G$_JLl`@!TPR09mK5L2G9 z{ZBx6ycG022+_7kfkYn&v1n9C_{I#DVUsuZmeUssY%{jXqhbv9HRVoLT5OtbZpj?? z(;k4sA+RAVej|aASmQ6wPfpZQZg%TVwn7_L3mhe&VG)-Zta%{M#R-_=TzZqzb% zi(2%v;@6IbL5?a4H5>D;rRCz=MBBC`L5NDPz%@A2bems=H54)kMMH^#zpk&~8nmtm zd+E=lStp!{4B>bcn?vG!rWTkGQooCy;t=X34blYu5|$>WiIbO489f)^w0nYVjWR1K#DR|JGAcb zeE1M=m=*s;;8i_0vQ30+wQ^QW8)W#;NDwTE$|R2%We94z+IXKIsk!BS+&;N4BNeEbKsIv*WN;l;WB*E4a(}0X?%#Ca)eZ!A6u;W=K50n;-(f6>;e~dsor%<>L`ae zGU4ObJo?*Zfx|8GLtH)LYH8^s3il^uvWM;0{w9YjX`Q4j$8~U$$rVHNIR#rXSDd$T zp`D61B-7fwS5lQ5GUIs&<%Q_zRV#8mW3=DSqaCtZMHn`dNNsmGoHAU5L64qIX0Gs~ zRR8A}ylGYBmA`qS_W(GFnmBV^ItCYj>w4DI_VZ93)Ab-tk9aVzxdvX~q-Ij;{;~yq zo11~c0JOswNUa+L$}X8i#Gw#U0f7(^@ZnYr=^@6+0E!pF0)jaAixdx`pfQ1ROz*ch z5I$B=0I0io`_e%anqc54fTBiXOa+851$hIDyndiA1z~b1Qb0Y1iP8d+FV{ff<>KL& zL`Zj$2mnuAnQ=%sZ~@eej-U;hAqwUo&2Adf)*0ji>%}q`@|*QO0W_x^xZI@^E|z-U~Xzc4M$C&s|GfPSx@`*L(4r8*`kv9eFr! zg)KR$wiC@W@!4AuiEW>~_CL0_+P2Hep$|vKzj((Qb6=$ll``l^gv)&ri{ z{I9r6agz7={*$>Kh`x*ZbQqy}8V#y8j8|+^e zp3S`N+V&>BG1kzbZVl$Lr&$~vPj8#I+AdA>>RHtv%d+zi{)MY~@uy{_BQG22bh~=L zXQAC!OWS9lFYi!3Tw3%UV>5lssR`nq&?ZP%^HgM$37Wj_oti8)vUsXlPG>%%g<)gq z&2rL_UwsJT$<9>u2masnw&fgTd?6NwYVh_aD(Dsy7hYm#W`#vL&)g62M=xo>6Yu{R zj3qcKor0|x!O@8sX&t($LQhjTay|g;{PT!=cw+M({a}KKn*o}-uLN2S_I9lHXZj7$ zt!&z-9&H+Uc>{OF#N}y+Mdl79s*0Y^N9f67XXAii4 zLE-Aj6zdO@F~!z@tLcBoxhu5G)nGUbx##k2bZVY;DzDfiP&v_x{)`w%LsSAf2kwgO zgtt2>x0NiglYdsk$P_Xz{o$}5&HlZF80=yr;gzo5l$Z0v^y#0986psn}b zH-caw>P)shg`i&-4REb5GrmJq>Cvcw1_i8?M94^vA_Rm51h$lWLj&t=2S(Aybhtm| z1JFw#TEKyfF(D@Xzl-UvbV@i69xxBT7I~?3Pc;7u;D3kKM;U>(;UloREou)A=n_8t z*&-2;30MZ6bAj-a?@*L)rjQ{GMj7ySQo3(dx?O+`3d>jrLOgc8>!UMidO>qv@tKwZGa;d@ z5{7up3yjiQrbCE*-wWZ{15PL^W*VYn2`>#8W#M~CM95!c-u)1z2H-~5x9up1$FjgE zdt0jF5MV1n6ENlFWoL*6r+_?Q6xWl_*$_}pm;qpzy*&>^a@(p1jKX_4CVw9&{y(!_ zZa+$I`l15tZPKGY8?Sb|z1zNIQ~5~N>Tl}<3tHtwY~Mw%tIyL0yF;i3Cs&%P_`kOL z-=pGh)UMz|y2LMxa$s8gscHToJOTFHQIXV8y#BXi%Vm)OIym`ymg2-HYH>oS3_-G3 zyBL+a(eKTAzsZ}qShb49i~QZ1Nv!+wA&z~l4(c9mz;qfhZja6G+i%OBUaVCPb;n}&IN5qO z@&U6YPzDhqX%jSpp>eJg`PAB{^ZkFqH8R=brl*_EX*3lD=lAu`7np?woyD;!l~f?j zLXybr&*-aoWY~p*Nb^Ut(l4s05FBr>8%du}6rJg7PE@Q8ffqlcI!d#98#`Sco77T- zL%Mu?oUXPw6#NtUClFj=5(E_PL1rODA0m(i3U=9g6G`4Ttp}iofU&?K7_r|s3c(*& zfSMm(szJ7gqR0UnCrI`AzQR3(S_KR}b-oiKdlc051%`Pjq#l0XD$ux^e4`ew_~Y;U z1t6`uvvjm>a|n($p940!dJ2Z9?4#jj0OsN=rvh1t?`I@7j*IgS2~L#y5M@0j~%#Ri0RW|4_iM>=hU4&t5b zD^_6Vr5?L_dj0NH0)t~MY58=W+cD|=wec0xPS7YTq{Moc}ky2xd@ z=Psb2j2ho(l2=;I6CIb~v;$3e{ML)1cOU zp@8kQfUA0&2W-)KqvmepJq5XuVW^%_Pty8r$&Pnw>HHCD=OHZ69d;7;*}z-xYNj|( zN>;wrf`0#YWp!c7Y&phG$ny&t&<80aLoEGPe z{*s?SP+s~h`XTPBi}MagP(Qe(fxv40>m$AZhfHDble^}BI(CR`9Raw3qL4&544()& z*%-?@I0%oBlQHedbUGiFiTycoX0h$2NZNwqg1Nul&|KQeM2-nZDxpkexp?1N|EGwO zyNxpLrLEY#Oj#UO2>%>~7<>FRvZznPV7t}AV7hMI?6ih#|4bHDv@f+Ik;TgTMe{Y- z%;iQU$Y8%r=c(-BYDay?Cv%74C&)pU6>zdMGMZ0lk%d2^zPdg$$ZORK3 zSCGFkP};xX8{goL{(XvoZ-Px!*A;o@pwDR^XwKfBNx$x?c1oiK1-RJoy!Z=Ze8*Tj z*6>LR`CJqV_S*i`b~SY|#9S$rV;T>E6R08u)ziPprM<6!h=mA-@`7Da?b`B~_wi4@ z5Rmj^J14)tF9bmxAo8nkVYA=IEg%{IZz$>IQ77G>_us+)Wy}BN+EV=hq(kIhReHQD zUAGAJ22A~@o$%KJ@H;!@`&+?WSeZ(1wfx=Tq2r`tMY}&BB#OXY=DMjU4&icWNP)IZ zgt`y+p*v`S62QvdUK>I8!LpzzP;JB3t5W>S`0wUmx&j`zmdFyK|G6v#wEQ5TPDZ-r z3}Nj7=>X0@JN`rlkrrbE8yrgSW@YXhd-LAi=1dd6lgJ>o&?*aLKvx%|=n$?X1gN&rv{>iepHue?*jEbBNq<3l zp8;~{cfGQc+YlMe|5;2Iv>JbGHt;!Raj2+o2q#;Ajy^pcJM_sHl`QxBQhos9hhcFd z7cM@paeWT-dni`#b#iJwv#~sXZRt%pA^!but)S|dT}oZ09@8o4F*Z`FjGEZUIJfe2 z&j{~eLt)Y<$SUlbFtH+uaAsp|axn`5#Y9_@BE8ng-x?sdkPLT{P=?0mBsVqJ1}$DT z#q2hmYBz0ab|DE|_ccy_S#9=b)6;;$)%wAB^CKO5ithB>X9#(M_`gaB3MIDvZj_+) zPKAwG4J74{QgedEG#b9~B0h$GTRz4*X^npiXzagScI)i%viI6SWe6R({<{02D!^PP z@idLk!?}V;EO*mHGi_(#&9jM}B9@(E`82x@%^otj8XZE~N$_DWJD9kNRh#Px^J)Y>UqmlsJs+3UpZL%B0jL+jkj5}^($V9u5H6y`xs9st zW>6xD&mx=$u}dPDQ74!y6c987c09fYfBY4(!-@vf+-86N?Nl5e49mHOr`7cu-gNfg z`o#ptVZWTB$E)Ki68)E1zdrz5u$|rs0sIMxukT|M1hg{*sA9u21T_^16M~Tdl4FT! zHAqAUw7@0bl072Q^ZINoNPN1pxxYsy5J6d4|CA?LASl4cSvgGBZV159`-46nbZJu; zJ+4cpTD>-@7^`VpZgk^bm!12!eN7zv`~|BoQzvVv8!PtZ=q?)6XU7Gq`p>7s*K96Z z>#EvLeX6O`yv)Swy^I~{6207(M~ZHy_*B0RMjExd4h*CP-Ch!TKV!TWXbMTtK3A@H zS+B3Y_~P>hMxndfpncd&XH+i9B-TEB+zafz9pJikp0E_h*O{d^H^vO!T&CCZ@mbaW z=CHMFvlu9Fw!7appdD=G!5_4K$kQ0;PUKBJ@qYWnIspl6M!%aFXP+|xqEbv_I0 zP7jscvO>AtDI#obac8!wkEe_3(5{R-v)zL-s5SfFTQ_ILp}ii_8_f(^251x6cj_!$Dq%Cx&o0ro ztEoL)k?zh@Rnj&Q>b3KDyxNekK~CaHx>iXO|HfGAYba&P9})otn_qr62zog=82p?C z?pDmMGw7R2AWit^q=rz7!TDyZ=Io|wqI0e?elgqXTH4da!J+(v`D=?4=2*+qt=elV zAA1wtYCBb_i|c9q;{#Z`<%(5<>m4O=pY0R*?!mt95IafB=|+JK6Z(*80Vl5+&yhvb z)S1o_w4AWZ&C5(^zPdb&;vpOYpFP9+ArAJPn#)R4uCb>UM6XvJeU49EcH($DBid1k z+$kPwk7Jz{U0HUX4$68f7Rq;57}m8*qgu2VpuT z;QE-$Q7_$Gc(3WzZ8H;eMEBqB3N-TR_xFJ>b~dxw8&*oLzeig;r4bfAS&H09MwP8> zudP7s$b}}t(q?pj>iJu+b8ghnX6k7vG(9v5ua0IWt2WQi7Ag(+l6Mk;3)x0I>~5S4 zkNK+FKW5hfn&{T>Vt!ge{UdY((-Ous>KS|aJ3BZXB5n01{z@V|?4F&QRyDR85leSN zudQ!!?l!aw!%p!PZ5QzAkulqpSrrWy#*xtq*9zv|k>S~~7VGXF);htTLa#2+{MZ?k z#cg5~IL_1w`q;Bc!iEY}0F${k_hZZY`|TCZb)U=e>`T%O zQeWI;JPXDn(W`Tt0Q=J^0D6NzC2?5mgf+BUkFoW>-IlM$3s;q^NFU#Hg1% zZ4!1aGc&iq2^htBK#M0KwM(tj&o8E_PELhy_ys1aG^ zI5@|ipq%(*ciJrw7RR|f(3>VqJ)y8&c`{SaqKMDjFI`Q;U*jt8sV}c6)khc>!(O(i z*T`qV^)pTCplHSJ(n{oCY=&B@n>n1BZn!U^J_?w?BsQb zNa3tB3nJzaa>7R3p*OPUwv#C2`~k3JzFE$Uyl8RiLz294=wH>AYm!37@BzaBsnc!; zcaVC7G8ZL-+#kgWyvEw2->y~F*{dxO)Nnl#Ti;n>H=mc6r`m9$E$|Wh3Gbz>&bAne z;pIh5&tgeaJ>9GVn7r|LJ5b-oe7}}^X<*v=luy-=m$rj4vUC54tmBa2Sp@uuj_A!b z=Xe|KE>_cY0tdtFsk%L(M`hwg|A^&5>{0fZ&61SH`USlGjuYGTcT!Fi@VR<7$PRHbVwAY{#SF?8PwF)wXN4Hh=_{lMT&S4kR~D^H3}k9MYu{Q z0!oK~K!SuMHV`QRMWqMnB0>bDBoI)9M7ktW6W~Imgg|0wfspdW^36N*{`h`>=f|Ge zGiUZWYt5Qnp7lIy>$Jkco3ezONh^ehqVGuK~k(F#hMMfcGArF zq$+-GxLu4^vbxEHX|}B8w@O`=OKF4!gr_;`;N?bj3Qxpst6;bu(X)}3R7qd_>kmHX zz`$0N5X%UgXoU6PR#4rAQRlx>BA&GGrHL617|+ku#GItuWu# zK~F5egY8{VkPQGAWmw*ia>)Z)g#ZKLbKp3%qKDzo0?&AWg&Hl2E68t55RQI}a#hCT z%$%02yTh} z`(cUc7Uf7#B-Ul%o9~i*$U$4w7r@1&b7Ou@a|^wiXo|fZ)fm7FH$tIy+UL8|eJ2j8 zS~*`LD^`CWlcuIO=#&7z{0G=cFI{3*^8|Ou*JbQ+$x`FGRJEJ^E$G}p0|LhYLR~Yp z#6*<2WHaDVw*4HSqebX4|dYTBZCQO!KNcJX|Cuoso@!ZZ3+R&TXXs>QMEwRkU z*z;3GXLMK<4OUKOdhdk68kvl4QP}8Kl;3J|oK_S)UF8wiGr2q`87A*}Y~$z&eeR6g?7Ky_|a7 z_-NwwB#CuahtQyNPVVS2Zf|^Nl zC_80)sDM$Cn@1d4ik|@5nw3bR7tP@mWbpaGnod zVVsBYxZCpxF6U&ehB@oV;jF^pJZyRO0$;M)ufOGRT1myOoP41$C+n~D%c=A!*DXe$ zGuy!pYZZ1)4KuyqTodLV%PUy@GNbKCEO+o=hRnpkl- z<#3XL#^hpf^)hgBTmrH3J!t?O;6q`5c;^3Ej%yJMGQ_Qvv@N}3Z6KbY^NkCYT53;? z0U$o95$NKey>S!hCPfcGBkLqiDr3ZB)EAgM{5Z*r;v|DZz?B1XJ0co_jZ3GV3X{*T z>$T-Q;LX3u{IT$XV*ikkCnTCwO*IYeBB9w{x@zAcnWFv!!A#B87fq6T4_=R!&ZtW0 zb|Y$XXnGhDjU)_*8e@jbU8s!>o2zLHI1?qNwBltR-elXLhK%1HjPNGZNBhuMs-Xi5 z{%;MB0H8W?M>6^n;sZ9f6*rFebz_^>flr>El?J{@Mq6F98xE^swp7@5ZjALRd5_{M zd3W9tjDawUyOpGntVzL$*V=ZD>;Zm!{bp=!Pfc6 zgF2+5>h`M+!Z@(YddZ*)1h% zU^qsPJ~xuPRAe!oD%YrKat(2YPsfWBA0|l?F@_}sx>+ve2$bD?JZBxZlCM)?^C zN{II{){J+@MKy>R=tq3X!u%T6jd(s{E>z#ZI%k)G5VE-rK9vjK>RlbKNPxM)4+AIf z)NialoN*#Xh%;O^VuD9D9&T{(=Kwq%-x(qiWVLokLs+U!hCJs=OnIns%YU9)@IcG! zV*b)l8F1U48eV%T*@xA)Ry_r!B|hP%Ent<5!!!j4o}@Z}2zav<(2Zn5sjB6Fux&Zc zJWZ%LoHkJpFgoYHbE13Go!PQAwlP%H-#*;RvIzwJLka7DR@Dk;Pkf9+HEqT-T{doT zJY6PPxMy&!#+9p~3s{lY5eP+(idBk?1NBif=K|mBZn`(|F8uZB0LtjzW8CZ)D~&Di z%aj%bz7hA%t8M;GEQsD+2Y{A z_HEEo&S-&4C&6F&*3cGPzKl5*tFIjao6mXi~6Lcx5mSDoNxzE{=A2ZK3A zk4I+4g7O?ztecRs_Bb3^`|orq`r4FfO23muD&U#ecZ&L`g;`;s(ylJm@70qf1V4PeXJzNtvs^(5 zDSHC-zO0?SJ&Mab`(tDISvcjnk;=AK1)kZ~PKWEu*!7-FjHetH;yFp(se9Us0;xEb z>sA%PHWjaX9(03lJjwE{n{~j?86N5WbD#JRFmm3);=j^1r#1dPsir!yTjamwbf3(A z5Mww4*ZvDXNfZJPr~f~`e&0$o*B@@|Ia+^mJ-0{FYLxdCyTK9eSLqPcX+!|Vkwt&c zbOJMykhly)h9udH4w`oBT%!x_I_6e6OMrh<51u=|s?SXc`b8-C=i!HsA)4(ZPy=BR zi_g?9&=pDq37G>DVpMwkE3R2}KzGuBEU75qWAf1FD$w zACV)Fn)i}s2}-9B2l|Ei$|W5gtrFI3BU5|0_@2B}D}1&yI%c@wN`?9OMtS&_G2-yB z?Uf~5llG7?KXB1-jr_hW`)CAhwQ&no8a-7o=lCH_Zk>SOu1^w{HMvQdBV2bQ>`d?Z zEYRPrjBi|Qx3_PzekVklpaVvx3jES+dF>+(K zcOIgy4nU|jfq^~W@NG4zQtJ>#{&+u^9le?|iGSW%H2wKLwH%e5X&{*0h@Hw*_yM6v z*Dq>(b#&Cz$Ts1PHWsm#_va8M6SG9;F4Gyq-oTzjG?!L$6k0_a;nfP4As{2Q{-~s* ziHSzvw~Raw@5iWi4bn4f}3?(ROy~G z!k@G6kriu(PA>f5S=~P&`MR>wy43-)zlbQgFKQrW!TEh1ENw@eyNI-_!_Dtri)7Mq zLjOd6*z$*}XoKDR;=~+}hTj@qVNnb*(OZ6iL?zGeJ^PBhi5u4jzzg<9tS_n5oA29; zq&<~)?eh`VG`pJ~G~Kh+TfNQ%T3fci?#q*^di5Gmt3DLW*ocEKJ2WOAcuW3y>f z{?uqYh?=sba?@WWcxd$26rHB0*AZ{}A#DdT^7`kep9uZx0GEaHlZh%mV?{XL!|2-< zXYD@KJLRMYK$Ej{XDI9VUr6?5O(BN+0(N4hl<@0}`9&z9FiSnGnK)^IE@O zaWxms#Km`tQqh8Geq)5Mh4LBSz7UoZf^|Wy<-HG%%EUiJLHl5QU~9bVPKe!D+BUA=QN zPDcycmh2okGnzuo9a+{gU`bDmka2vVSdKG*y>{MSf zM1H*v=Il)ShLW0QwvaIJH+5mE7KF(0p2<{t)%?=Xkv)_25il{gJWi6J+n1u1lOx(- z^A$BGJc9=(-qk^uxQcg>0Jct{#g%kl-4e3-?^=CAbQ~)<{%p;4(BdZs-ldurny0KE zc}l|1>2s84_7}{>UPV9ZlkCPR)C<;=ZUy8yJpu}1*YSLBnNZ=c?M)*Fd0S<-ZVft7 zrJzW{M5W<$dtHQ~>y@RshYvoHQAl7jOu>6~)fuzA{(5pnyxP~AAh|uS=`<&s{^#kK z7tYR0|Kl&yV z4$3+r1mZq}h27NlfygMcmFm~|xvh_=m8Q~S5nI@l0u34Z>x5NG0*N_?etV^!d7S32 zXDegO3`eG#uPEWA(Rm3nn`8N|T#I5ht^8?;9KyPyFAKwrxZKTZJLHiMiwA+4Bp zp#9OQ%a-zwlts(MXxx-(8^W)jsN-Xki1<~UH0H#??T%CAcyE& z_c<5t1Rh{&_wNsf5G2{}kloPzIX#Qq4HGnGF?Q~5i@0U{tx35;vu)dV9q>=OhqwZ) zD_4!kvA7On3tl=ghKt1%mAM`RAvx-8I53dg+gb-OzyXsmcgJhB;j5i+X%EW{{&@xk)IS}2jV^3)r%;Qm)b|q!J-0#Z^B&x77<6>Qg+53<=X}Ko1eEqc) zJf!TO8q<`9wq0gcR0})*Q`)aEXr;EGw_|4>S?ev=vma-3rqehNv|t8{L8if#lh+NP zSIig%FhjI;#@zMSvyxUhhL(FvNboC{-f;evkxI^MQy;(aw))*2i;5xXkAL{gkCcyQ zPaU4J8%(tUb;O7}-?wg{GbDOpn5z#*sRp`%X6C_}9cWRLk7>v9#GpEt2tz=VF_TPQ zSK#@^dgF%av~H|ZND}?)ncygu?0Ri&uTO^Dth0w4-{ec2#QuSh_-XA0d6yk>&F>Ul zPDqID_*4GgwjIod3z)_b642FKp;qlU_T* zTXhZ|QJDJihkwd{hwqKt-SqacX>89$2=yVDg1Y#{Xs-msrnf#YFW$S;xRmXBu`?SK zbfarZc(=hRqOMzPOZ)9!jjFC|In~VV+d>?aZ&JI^dtW%`$%de1zoh{x5$EL(IVQUR$aRA}K)rm&bp){b#czA>a8$ z*01Wp45KX%^zu{)H^S48O1nST6{*RCRtGGa8H@;1XZ`~$*EY0mMRhiy= F@ITtfD=z>5 literal 0 HcmV?d00001 diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java index b4f54fd4957..261f811a751 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java @@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelParser; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelParserMapper; import org.elasticsearch.search.aggregations.support.format.ValueFormat; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; @@ -140,12 +139,12 @@ public class MovAvgParser implements PipelineAggregator.Parser { formatter = ValueFormat.Patternable.Number.format(format).formatter(); } - MovAvgModelParser modelParser = movAvgModelParserMapper.get(model); + MovAvgModel.AbstractModelParser modelParser = movAvgModelParserMapper.get(model); if (modelParser == null) { throw new SearchParseException(context, "Unknown model [" + model + "] specified. Valid options are:" + movAvgModelParserMapper.getAllNames().toString(), parser.getTokenLocation()); } - MovAvgModel movAvgModel = modelParser.parse(settings); + MovAvgModel movAvgModel = modelParser.parse(settings, pipelineAggregatorName, context, window); return new MovAvgPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, formatter, gapPolicy, window, predict, movAvgModel); diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java index cc1e6682e70..af2db718846 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java @@ -117,21 +117,26 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy); currentKey = bucket.getKey(); + // Default is to reuse existing bucket. Simplifies the rest of the logic, + // since we only change newBucket if we can add to it + InternalHistogram.Bucket newBucket = bucket; + if (!(thisBucketValue == null || thisBucketValue.equals(Double.NaN))) { values.offer(thisBucketValue); - double movavg = model.next(values); + // Some models (e.g. HoltWinters) have certain preconditions that must be met + if (model.hasValue(values.size())) { + double movavg = model.next(values); - List aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), FUNCTION)); - aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList(), metaData())); - InternalHistogram.Bucket newBucket = factory.createBucket(currentKey, bucket.getDocCount(), new InternalAggregations( - aggs), bucket.getKeyed(), bucket.getFormatter()); - newBuckets.add(newBucket); - - } else { - newBuckets.add(bucket); + List aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); + aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList(), metaData())); + newBucket = factory.createBucket(currentKey, bucket.getDocCount(), new InternalAggregations( + aggs), bucket.getKeyed(), bucket.getFormatter()); + } } + newBuckets.add(newBucket); + if (predict > 0) { if (currentKey instanceof Number) { lastKey = ((Number) bucket.getKey()).longValue(); diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java index b6ee7f8ddb6..2f33855d50e 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collection; @@ -83,7 +84,7 @@ public class EwmaModel extends MovAvgModel { out.writeDouble(alpha); } - public static class SingleExpModelParser implements MovAvgModelParser { + public static class SingleExpModelParser extends AbstractModelParser { @Override public String getName() { @@ -91,15 +92,13 @@ public class EwmaModel extends MovAvgModel { } @Override - public MovAvgModel parse(@Nullable Map settings) { + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { - Double alpha; - if (settings == null || (alpha = (Double)settings.get("alpha")) == null) { - alpha = 0.5; - } + double alpha = parseDoubleParam(context, settings, "alpha", 0.5); return new EwmaModel(alpha); } + } public static class EWMAModelBuilder implements MovAvgModelBuilder { diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java index a78a5486460..3a7fd963c43 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.*; @@ -142,7 +143,7 @@ public class HoltLinearModel extends MovAvgModel { out.writeDouble(beta); } - public static class DoubleExpModelParser implements MovAvgModelParser { + public static class DoubleExpModelParser extends AbstractModelParser { @Override public String getName() { @@ -150,19 +151,10 @@ public class HoltLinearModel extends MovAvgModel { } @Override - public MovAvgModel parse(@Nullable Map settings) { - - Double alpha; - Double beta; - - if (settings == null || (alpha = (Double)settings.get("alpha")) == null) { - alpha = 0.5; - } - - if (settings == null || (beta = (Double)settings.get("beta")) == null) { - beta = 0.5; - } + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { + double alpha = parseDoubleParam(context, settings, "alpha", 0.5); + double beta = parseDoubleParam(context, settings, "beta", 0.5); return new HoltLinearModel(alpha, beta); } } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java new file mode 100644 index 00000000000..ef3c7354500 --- /dev/null +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java @@ -0,0 +1,422 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movavg.models; + + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.SearchParseException; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.*; + +/** + * Calculate a triple exponential weighted moving average + */ +public class HoltWintersModel extends MovAvgModel { + + protected static final ParseField NAME_FIELD = new ParseField("holt_winters"); + + public enum SeasonalityType { + ADDITIVE((byte) 0, "add"), MULTIPLICATIVE((byte) 1, "mult"); + + /** + * Parse a string SeasonalityType into the byte enum + * + * @param text SeasonalityType in string format (e.g. "add") + * @return SeasonalityType enum + */ + @Nullable + public static SeasonalityType parse(String text) { + if (text == null) { + return null; + } + SeasonalityType result = null; + for (SeasonalityType policy : values()) { + if (policy.parseField.match(text)) { + if (result == null) { + result = policy; + } else { + throw new IllegalStateException("Text can be parsed to 2 different seasonality types: text=[" + text + + "], " + "policies=" + Arrays.asList(result, policy)); + } + } + } + if (result == null) { + final List validNames = new ArrayList<>(); + for (SeasonalityType policy : values()) { + validNames.add(policy.getName()); + } + throw new ElasticsearchParseException("Invalid seasonality type: [" + text + "], accepted values: " + validNames); + } + return result; + } + + private final byte id; + private final ParseField parseField; + + SeasonalityType(byte id, String name) { + this.id = id; + this.parseField = new ParseField(name); + } + + /** + * Serialize the SeasonalityType to the output stream + * + * @param out + * @throws IOException + */ + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(id); + } + + /** + * Deserialize the SeasonalityType from the input stream + * + * @param in + * @return SeasonalityType Enum + * @throws IOException + */ + public static SeasonalityType readFrom(StreamInput in) throws IOException { + byte id = in.readByte(); + for (SeasonalityType seasonalityType : values()) { + if (id == seasonalityType.id) { + return seasonalityType; + } + } + throw new IllegalStateException("Unknown Seasonality Type with id [" + id + "]"); + } + + /** + * Return the english-formatted name of the SeasonalityType + * + * @return English representation of SeasonalityType + */ + public String getName() { + return parseField.getPreferredName(); + } + } + + + /** + * Controls smoothing of data. Alpha = 1 retains no memory of past values + * (e.g. random walk), while alpha = 0 retains infinite memory of past values (e.g. + * mean of the series). Useful values are somewhere in between + */ + private double alpha; + + /** + * Equivalent to alpha, but controls the smoothing of the trend instead of the data + */ + private double beta; + + private double gamma; + + private int period; + + private SeasonalityType seasonalityType; + + private boolean pad; + private double padding; + + public HoltWintersModel(double alpha, double beta, double gamma, int period, SeasonalityType seasonalityType, boolean pad) { + this.alpha = alpha; + this.beta = beta; + this.gamma = gamma; + this.period = period; + this.seasonalityType = seasonalityType; + this.pad = pad; + + // Only pad if we are multiplicative and padding is enabled + // The padding amount is not currently user-configurable...i dont see a reason to expose it? + this.padding = seasonalityType.equals(SeasonalityType.MULTIPLICATIVE) && pad ? 0.0000000001 : 0; + } + + + @Override + public boolean hasValue(int windowLength) { + // We need at least (period * 2) data-points (e.g. two "seasons") + return windowLength >= period * 2; + } + + /** + * Predicts the next `n` values in the series, using the smoothing model to generate new values. + * Unlike the other moving averages, HoltWinters has forecasting/prediction built into the algorithm. + * Prediction is more than simply adding the next prediction to the window and repeating. HoltWinters + * will extrapolate into the future by applying the trend and seasonal information to the smoothed data. + * + * @param values Collection of numerics to movingAvg, usually windowed + * @param numPredictions Number of newly generated predictions to return + * @param Type of numeric + * @return Returns an array of doubles, since most smoothing methods operate on floating points + */ + @Override + public double[] predict(Collection values, int numPredictions) { + return next(values, numPredictions); + } + + @Override + public double next(Collection values) { + return next(values, 1)[0]; + } + + /** + * Calculate a doubly exponential weighted moving average + * + * @param values Collection of values to calculate avg for + * @param numForecasts number of forecasts into the future to return + * + * @param Type T extending Number + * @return Returns a Double containing the moving avg for the window + */ + public double[] next(Collection values, int numForecasts) { + + if (values.size() < period * 2) { + // We need at least two full "seasons" to use HW + // This should have been caught earlier, we can't do anything now...bail + throw new AggregationExecutionException("Holt-Winters aggregation requires at least (2 * period == 2 * " + + period + " == "+(2 * period)+") data-points to function. Only [" + values.size() + "] were provided."); + } + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[values.size()]; + + int counter = 0; + double[] vs = new double[values.size()]; + for (T v : values) { + vs[counter] = v.doubleValue() + padding; + counter += 1; + } + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + // TODO if perf is a problem, we can specialize a subclass to avoid conditionals on each iteration + if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } else { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } + + b = beta * (s - last_s) + (1 - beta) * last_b; + + if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } else { + seasonal[i] = gamma * (vs[i] - (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } + + last_s = s; + last_b = b; + } + + double[] forecastValues = new double[numForecasts]; + int seasonCounter = (values.size() - 1) - period; + + for (int i = 0; i < numForecasts; i++) { + + // TODO perhaps pad out seasonal to a power of 2 and use a mask instead of modulo? + if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { + forecastValues[i] = s + (i * b) * seasonal[seasonCounter % values.size()]; + } else { + forecastValues[i] = s + (i * b) + seasonal[seasonCounter % values.size()]; + } + + seasonCounter += 1; + } + + return forecastValues; + } + + public static final MovAvgModelStreams.Stream STREAM = new MovAvgModelStreams.Stream() { + @Override + public MovAvgModel readResult(StreamInput in) throws IOException { + double alpha = in.readDouble(); + double beta = in.readDouble(); + double gamma = in.readDouble(); + int period = in.readVInt(); + SeasonalityType type = SeasonalityType.readFrom(in); + boolean pad = in.readBoolean(); + + return new HoltWintersModel(alpha, beta, gamma, period, type, pad); + } + + @Override + public String getName() { + return NAME_FIELD.getPreferredName(); + } + }; + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(STREAM.getName()); + out.writeDouble(alpha); + out.writeDouble(beta); + out.writeDouble(gamma); + out.writeVInt(period); + seasonalityType.writeTo(out); + out.writeBoolean(pad); + } + + public static class HoltWintersModelParser extends AbstractModelParser { + + @Override + public String getName() { + return NAME_FIELD.getPreferredName(); + } + + @Override + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { + + double alpha = parseDoubleParam(context, settings, "alpha", 0.5); + double beta = parseDoubleParam(context, settings, "beta", 0.5); + double gamma = parseDoubleParam(context, settings, "gamma", 0.5); + int period = parseIntegerParam(context, settings, "period", 1); + + if (windowSize < 2 * period) { + throw new SearchParseException(context, "Field [window] must be at least twice as large as the period when " + + "using Holt-Winters. Value provided was [" + windowSize + "], which is less than (2*period) == " + + (2 * period), null); + } + + SeasonalityType seasonalityType = SeasonalityType.ADDITIVE; + + if (settings != null) { + Object value = settings.get("type"); + if (value != null) { + if (value instanceof String) { + seasonalityType = SeasonalityType.parse((String)value); + } else { + throw new SearchParseException(context, "Parameter [type] must be a String, type `" + + value.getClass().getSimpleName() + "` provided instead", null); + } + } + } + + boolean pad = parseBoolParam(context, settings, "pad", seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)); + + return new HoltWintersModel(alpha, beta, gamma, period, seasonalityType, pad); + } + } + + public static class HoltWintersModelBuilder implements MovAvgModelBuilder { + + private double alpha = 0.5; + private double beta = 0.5; + private double gamma = 0.5; + private int period = 1; + private SeasonalityType seasonalityType = SeasonalityType.ADDITIVE; + private boolean pad = true; + + /** + * Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values + * (e.g. a random walk), while alpha = 0 retains infinite memory of past values (e.g. + * the series mean). Useful values are somewhere in between. Defaults to 0.5. + * + * @param alpha A double between 0-1 inclusive, controls data smoothing + * + * @return The builder to continue chaining + */ + public HoltWintersModelBuilder alpha(double alpha) { + this.alpha = alpha; + return this; + } + + /** + * Equivalent to alpha, but controls the smoothing of the trend instead of the data + * + * @param beta a double between 0-1 inclusive, controls trend smoothing + * + * @return The builder to continue chaining + */ + public HoltWintersModelBuilder beta(double beta) { + this.beta = beta; + return this; + } + + public HoltWintersModelBuilder gamma(double gamma) { + this.gamma = gamma; + return this; + } + + public HoltWintersModelBuilder period(int period) { + this.period = period; + return this; + } + + public HoltWintersModelBuilder seasonalityType(SeasonalityType type) { + this.seasonalityType = type; + return this; + } + + public HoltWintersModelBuilder pad(boolean pad) { + this.pad = pad; + return this; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName()); + builder.startObject(MovAvgParser.SETTINGS.getPreferredName()); + builder.field("alpha", alpha); + builder.field("beta", beta); + builder.field("gamma", gamma); + builder.field("period", period); + builder.field("type", seasonalityType.getName()); + builder.field("pad", pad); + builder.endObject(); + return builder; + } + } +} + diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java index 24780a345c7..c894f776ed4 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collection; @@ -70,7 +71,7 @@ public class LinearModel extends MovAvgModel { out.writeString(STREAM.getName()); } - public static class LinearModelParser implements MovAvgModelParser { + public static class LinearModelParser extends AbstractModelParser { @Override public String getName() { @@ -78,7 +79,7 @@ public class LinearModel extends MovAvgModel { } @Override - public MovAvgModel parse(@Nullable Map settings) { + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { return new LinearModel(); } } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java index 1fa30811f9c..5f41b24531b 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java @@ -21,14 +21,31 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models; import com.google.common.collect.EvictingQueue; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchParseException; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Map; public abstract class MovAvgModel { + /** + * Checks to see this model can produce a new value, without actually running the algo. + * This can be used for models that have certain preconditions that need to be met in order + * to short-circuit execution + * + * @param windowLength Length of current window + * @return Returns `true` if calling next() will produce a value, `false` otherwise + */ + public boolean hasValue(int windowLength) { + // Default implementation can always provide a next() value + return true; + } + /** * Returns the next value in the series, according to the underlying smoothing model * @@ -90,6 +107,122 @@ public abstract class MovAvgModel { * @throws IOException */ public abstract void writeTo(StreamOutput out) throws IOException; + + /** + * Abstract class which also provides some concrete parsing functionality. + */ + public abstract static class AbstractModelParser { + + /** + * Returns the name of the model + * + * @return The model's name + */ + public abstract String getName(); + + /** + * Parse a settings hash that is specific to this model + * + * @param settings Map of settings, extracted from the request + * @param pipelineName Name of the parent pipeline agg + * @param context The parser context that we are in + * @param windowSize Size of the window for this moving avg + * @return A fully built moving average model + */ + public abstract MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize); + + + /** + * Extracts a 0-1 inclusive double from the settings map, otherwise throws an exception + * + * @param context Search query context + * @param settings Map of settings provided to this model + * @param name Name of parameter we are attempting to extract + * @param defaultValue Default value to be used if value does not exist in map + * + * @throws SearchParseException + * + * @return Double value extracted from settings map + */ + protected double parseDoubleParam(SearchContext context, @Nullable Map settings, String name, double defaultValue) { + if (settings == null) { + return defaultValue; + } + + Object value = settings.get(name); + if (value == null) { + return defaultValue; + } else if (value instanceof Double) { + double v = (Double)value; + if (v >= 0 && v <= 1) { + return v; + } + + throw new SearchParseException(context, "Parameter [" + name + "] must be between 0-1 inclusive. Provided" + + "value was [" + v + "]", null); + } + + throw new SearchParseException(context, "Parameter [" + name + "] must be a double, type `" + + value.getClass().getSimpleName() + "` provided instead", null); + } + + /** + * Extracts an integer from the settings map, otherwise throws an exception + * + * @param context Search query context + * @param settings Map of settings provided to this model + * @param name Name of parameter we are attempting to extract + * @param defaultValue Default value to be used if value does not exist in map + * + * @throws SearchParseException + * + * @return Integer value extracted from settings map + */ + protected int parseIntegerParam(SearchContext context, @Nullable Map settings, String name, int defaultValue) { + if (settings == null) { + return defaultValue; + } + + Object value = settings.get(name); + if (value == null) { + return defaultValue; + } else if (value instanceof Integer) { + return (Integer)value; + } + + throw new SearchParseException(context, "Parameter [" + name + "] must be an integer, type `" + + value.getClass().getSimpleName() + "` provided instead", null); + } + + /** + * Extracts a boolean from the settings map, otherwise throws an exception + * + * @param context Search query context + * @param settings Map of settings provided to this model + * @param name Name of parameter we are attempting to extract + * @param defaultValue Default value to be used if value does not exist in map + * + * @throws SearchParseException + * + * @return Boolean value extracted from settings map + */ + protected boolean parseBoolParam(SearchContext context, @Nullable Map settings, String name, boolean defaultValue) { + if (settings == null) { + return defaultValue; + } + + Object value = settings.get(name); + if (value == null) { + return defaultValue; + } else if (value instanceof Boolean) { + return (Boolean)value; + } + + throw new SearchParseException(context, "Parameter [" + name + "] must be a boolean, type `" + + value.getClass().getSimpleName() + "` provided instead", null); + } + } + } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelModule.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelModule.java index 12a61d42d2a..6233270edf7 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelModule.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelModule.java @@ -31,23 +31,24 @@ import java.util.List; */ public class MovAvgModelModule extends AbstractModule { - private List> parsers = Lists.newArrayList(); + private List> parsers = Lists.newArrayList(); public MovAvgModelModule() { registerParser(SimpleModel.SimpleModelParser.class); registerParser(LinearModel.LinearModelParser.class); registerParser(EwmaModel.SingleExpModelParser.class); registerParser(HoltLinearModel.DoubleExpModelParser.class); + registerParser(HoltWintersModel.HoltWintersModelParser.class); } - public void registerParser(Class parser) { + public void registerParser(Class parser) { parsers.add(parser); } @Override protected void configure() { - Multibinder parserMapBinder = Multibinder.newSetBinder(binder(), MovAvgModelParser.class); - for (Class clazz : parsers) { + Multibinder parserMapBinder = Multibinder.newSetBinder(binder(), MovAvgModel.AbstractModelParser.class); + for (Class clazz : parsers) { parserMapBinder.addBinding().to(clazz); } bind(MovAvgModelParserMapper.class); diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParser.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParser.java deleted file mode 100644 index 43721e37309..00000000000 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParser.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.movavg.models; - - -import org.elasticsearch.common.Nullable; - -import java.util.Map; - -/** - * Common interface for parsers used by the various Moving Average models - */ -public interface MovAvgModelParser { - public MovAvgModel parse(@Nullable Map settings); - - public String getName(); -} diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParserMapper.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParserMapper.java index 2115f7e047b..bfd0c15c1c0 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParserMapper.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParserMapper.java @@ -32,19 +32,19 @@ import java.util.Set; */ public class MovAvgModelParserMapper { - protected ImmutableMap movAvgParsers; + protected ImmutableMap movAvgParsers; @Inject - public MovAvgModelParserMapper(Set parsers) { - MapBuilder builder = MapBuilder.newMapBuilder(); - for (MovAvgModelParser parser : parsers) { + public MovAvgModelParserMapper(Set parsers) { + MapBuilder builder = MapBuilder.newMapBuilder(); + for (MovAvgModel.AbstractModelParser parser : parsers) { builder.put(parser.getName(), parser); } movAvgParsers = builder.immutableMap(); } public @Nullable - MovAvgModelParser get(String parserName) { + MovAvgModel.AbstractModelParser get(String parserName) { return movAvgParsers.get(parserName); } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java index 68ffc3dd9aa..78055b063eb 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collection; @@ -63,7 +64,7 @@ public class SimpleModel extends MovAvgModel { out.writeString(STREAM.getName()); } - public static class SimpleModelParser implements MovAvgModelParser { + public static class SimpleModelParser extends AbstractModelParser { @Override public String getName() { @@ -71,7 +72,7 @@ public class SimpleModel extends MovAvgModel { } @Override - public MovAvgModel parse(@Nullable Map settings) { + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { return new SimpleModel(); } } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/TransportMovAvgModelModule.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/TransportMovAvgModelModule.java index 41f90b94d84..7f5dd14005c 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/TransportMovAvgModelModule.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/TransportMovAvgModelModule.java @@ -36,6 +36,7 @@ public class TransportMovAvgModelModule extends AbstractModule { registerStream(LinearModel.STREAM); registerStream(EwmaModel.STREAM); registerStream(HoltLinearModel.STREAM); + registerStream(HoltWintersModel.STREAM); } public void registerStream(MovAvgModelStreams.Stream stream) { diff --git a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java index 0e0eb239ce0..3db9531a537 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java @@ -35,21 +35,12 @@ import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.movavg.models.*; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.hamcrest.Matchers; import org.junit.Test; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; @@ -79,6 +70,9 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { static int windowSize; static double alpha; static double beta; + static double gamma; + static int period; + static HoltWintersModel.SeasonalityType seasonalityType; static BucketHelpers.GapPolicy gapPolicy; static ValuesSourceMetricsAggregationBuilder metric; static List mockHisto; @@ -87,7 +81,7 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { enum MovAvgType { - SIMPLE ("simple"), LINEAR("linear"), EWMA("ewma"), HOLT("holt"); + SIMPLE ("simple"), LINEAR("linear"), EWMA("ewma"), HOLT("holt"), HOLT_WINTERS("holt_winters"); private final String name; @@ -124,9 +118,13 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { interval = 5; numBuckets = randomIntBetween(6, 80); - windowSize = randomIntBetween(3, 10); + period = randomIntBetween(1, 5); + windowSize = randomIntBetween(period * 2, 10); // start must be 2*period to play nice with HW alpha = randomDouble(); beta = randomDouble(); + gamma = randomDouble(); + seasonalityType = randomBoolean() ? HoltWintersModel.SeasonalityType.ADDITIVE : HoltWintersModel.SeasonalityType.MULTIPLICATIVE; + gapPolicy = randomBoolean() ? BucketHelpers.GapPolicy.SKIP : BucketHelpers.GapPolicy.INSERT_ZEROS; metric = randomMetric("the_metric", VALUE_FIELD); @@ -212,6 +210,15 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { break; case HOLT: values.add(holt(window)); + break; + case HOLT_WINTERS: + // HW needs at least 2 periods of data to start + if (window.size() >= period * 2) { + values.add(holtWinters(window)); + } else { + values.add(null); + } + break; } @@ -308,7 +315,79 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { return s + (0 * b) ; } + /** + * Holt winters (triple exponential) moving avg + * @param window Window of values to compute movavg for + * @return + */ + private double holtWinters(Collection window) { + // Smoothed value + double s = 0; + double last_s = 0; + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[window.size()]; + + double padding = seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE) ? 0.0000000001 : 0; + + int counter = 0; + double[] vs = new double[window.size()]; + for (double v : window) { + vs[counter] = v + padding; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } else { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } + + b = beta * (s - last_s) + (1 - beta) * last_b; + + if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } else { + seasonal[i] = gamma * (vs[i] - (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } + + last_s = s; + last_b = b; + } + + int seasonCounter = (window.size() - 1) - period; + if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { + return s + (0 * b) * seasonal[seasonCounter % window.size()]; + } else { + return s + (0 * b) + seasonal[seasonCounter % window.size()]; + } + } /** @@ -522,6 +601,60 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { } } + @Test + public void HoltWintersValuedField() { + + SearchResponse response = client() + .prepareSearch("idx").setTypes("type") + .addAggregation( + histogram("histo").field(INTERVAL_FIELD).interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(movingAvg("movavg_counts") + .window(windowSize) + .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() + .alpha(alpha).beta(beta).gamma(gamma).period(period).seasonalityType(seasonalityType)) + .gapPolicy(gapPolicy) + .setBucketsPaths("_count")) + .subAggregation(movingAvg("movavg_values") + .window(windowSize) + .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() + .alpha(alpha).beta(beta).gamma(gamma).period(period).seasonalityType(seasonalityType)) + .gapPolicy(gapPolicy) + .setBucketsPaths("the_metric")) + ).execute().actionGet(); + + assertSearchResponse(response); + + InternalHistogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); + + List expectedCounts = testValues.get(MovAvgType.HOLT_WINTERS.toString() + "_" + MetricTarget.COUNT.toString()); + List expectedValues = testValues.get(MovAvgType.HOLT_WINTERS.toString() + "_" + MetricTarget.VALUE.toString()); + + Iterator actualIter = buckets.iterator(); + Iterator expectedBucketIter = mockHisto.iterator(); + Iterator expectedCountsIter = expectedCounts.iterator(); + Iterator expectedValuesIter = expectedValues.iterator(); + + while (actualIter.hasNext()) { + assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); + + Histogram.Bucket actual = actualIter.next(); + PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); + Double expectedCount = expectedCountsIter.next(); + Double expectedValue = expectedValuesIter.next(); + + assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); + assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); + + assertBucketContents(actual, expectedCount, expectedValue); + } + } + @Test public void testPredictNegativeKeysAtStart() { @@ -572,6 +705,7 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { } } + @Test public void testSizeZeroWindow() { try { @@ -1070,6 +1204,55 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { } } + @Test + public void testHoltWintersNotEnoughData() { + try { + SearchResponse response = client() + .prepareSearch("idx").setTypes("type") + .addAggregation( + histogram("histo").field(INTERVAL_FIELD).interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(movingAvg("movavg_counts") + .window(10) + .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() + .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) + .gapPolicy(gapPolicy) + .setBucketsPaths("_count")) + .subAggregation(movingAvg("movavg_values") + .window(windowSize) + .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() + .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) + .gapPolicy(gapPolicy) + .setBucketsPaths("the_metric")) + ).execute().actionGet(); + } catch (SearchPhaseExecutionException e) { + // All good + } + + } + + @Test + public void testBadModelParams() { + try { + SearchResponse response = client() + .prepareSearch("idx").setTypes("type") + .addAggregation( + histogram("histo").field(INTERVAL_FIELD).interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(movingAvg("movavg_counts") + .window(10) + .modelBuilder(randomModelBuilder(100)) + .gapPolicy(gapPolicy) + .setBucketsPaths("_count")) + ).execute().actionGet(); + } catch (SearchPhaseExecutionException e) { + // All good + } + + } + private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { @@ -1088,6 +1271,8 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { SimpleValue countMovAvg = actual.getAggregations().get("movavg_counts"); if (expectedCount == null) { assertThat("[_count] movavg is not null", countMovAvg, nullValue()); + } else if (Double.isNaN(expectedCount)) { + assertThat("[_count] movavg should be NaN, but is ["+countMovAvg.value()+"] instead", countMovAvg.value(), equalTo(Double.NaN)); } else { assertThat("[_count] movavg is null", countMovAvg, notNullValue()); assertThat("[_count] movavg does not match expected ["+countMovAvg.value()+" vs "+expectedCount+"]", @@ -1098,6 +1283,8 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { SimpleValue valuesMovAvg = actual.getAggregations().get("movavg_values"); if (expectedValue == null) { assertThat("[value] movavg is not null", valuesMovAvg, Matchers.nullValue()); + } else if (Double.isNaN(expectedValue)) { + assertThat("[value] movavg should be NaN, but is ["+valuesMovAvg.value()+"] instead", valuesMovAvg.value(), equalTo(Double.NaN)); } else { assertThat("[value] movavg is null", valuesMovAvg, notNullValue()); assertThat("[value] movavg does not match expected ["+valuesMovAvg.value()+" vs "+expectedValue+"]", @@ -1106,17 +1293,24 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { } private MovAvgModelBuilder randomModelBuilder() { + return randomModelBuilder(0); + } + + private MovAvgModelBuilder randomModelBuilder(double padding) { int rand = randomIntBetween(0,3); + // HoltWinters is excluded from random generation, because it's "cold start" behavior makes + // randomized testing too tricky. Should probably add dedicated, randomized tests just for HoltWinters, + // which can compensate for the idiosyncrasies switch (rand) { case 0: return new SimpleModel.SimpleModelBuilder(); case 1: return new LinearModel.LinearModelBuilder(); case 2: - return new EwmaModel.EWMAModelBuilder().alpha(alpha); + return new EwmaModel.EWMAModelBuilder().alpha(alpha + padding); case 3: - return new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta); + return new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha + padding).beta(beta + padding); default: return new SimpleModel.SimpleModelBuilder(); } diff --git a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java index a25f84d1902..0bd9711c7ef 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java @@ -28,6 +28,8 @@ import static org.hamcrest.Matchers.equalTo; import org.junit.Test; +import java.util.Arrays; + public class MovAvgUnitTests extends ElasticsearchTestCase { @Test @@ -259,7 +261,7 @@ public class MovAvgUnitTests extends ElasticsearchTestCase { MovAvgModel model = new HoltLinearModel(alpha, beta); int windowSize = randomIntBetween(1, 50); - int numPredictions = randomIntBetween(1,50); + int numPredictions = randomIntBetween(1, 50); EvictingQueue window = EvictingQueue.create(windowSize); for (int i = 0; i < windowSize; i++) { @@ -297,4 +299,288 @@ public class MovAvgUnitTests extends ElasticsearchTestCase { assertThat(Double.compare(expected[i], actual[i]), equalTo(0)); } } + + @Test + public void testHoltWintersMultiplicativePadModel() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.MULTIPLICATIVE, true); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + + EvictingQueue window = EvictingQueue.create(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v + 0.0000000001; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + //seasonal[i] = gamma * (vs[i] / s) + ((1 - gamma) * seasonal[i - period]); + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int seasonCounter = (windowSize - 1) - period; + double expected = s + (0 * b) * seasonal[seasonCounter % windowSize];; + double actual = model.next(window); + assertThat(Double.compare(expected, actual), equalTo(0)); + } + + @Test + public void testHoltWintersMultiplicativePadPredictionModel() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.MULTIPLICATIVE, true); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + int numPredictions = randomIntBetween(1, 50); + + EvictingQueue window = EvictingQueue.create(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + double actual[] = model.predict(window, numPredictions); + double expected[] = new double[numPredictions]; + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v + 0.0000000001; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + for (int i = 0; i < period; i++) { + // Calculate first seasonal + seasonal[i] = vs[i] / s; + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + //seasonal[i] = gamma * (vs[i] / s) + ((1 - gamma) * seasonal[i - period]); + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int seasonCounter = (windowSize - 1) - period; + + for (int i = 0; i < numPredictions; i++) { + + expected[i] = s + (i * b) * seasonal[seasonCounter % windowSize]; + assertThat(Double.compare(expected[i], actual[i]), equalTo(0)); + seasonCounter += 1; + } + + } + + @Test + public void testHoltWintersAdditiveModel() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.ADDITIVE, false); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + + EvictingQueue window = EvictingQueue.create(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + for (int i = 0; i < period; i++) { + // Calculate first seasonal + seasonal[i] = vs[i] / s; + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + //seasonal[i] = gamma * (vs[i] / s) + ((1 - gamma) * seasonal[i - period]); + seasonal[i] = gamma * (vs[i] - (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int seasonCounter = (windowSize - 1) - period; + double expected = s + (0 * b) + seasonal[seasonCounter % windowSize];; + double actual = model.next(window); + assertThat(Double.compare(expected, actual), equalTo(0)); + } + + @Test + public void testHoltWintersAdditivePredictionModel() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.ADDITIVE, false); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + int numPredictions = randomIntBetween(1, 50); + + EvictingQueue window = EvictingQueue.create(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + double actual[] = model.predict(window, numPredictions); + double expected[] = new double[numPredictions]; + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + for (int i = 0; i < period; i++) { + // Calculate first seasonal + seasonal[i] = vs[i] / s; + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + //seasonal[i] = gamma * (vs[i] / s) + ((1 - gamma) * seasonal[i - period]); + seasonal[i] = gamma * (vs[i] - (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int seasonCounter = (windowSize - 1) - period; + + for (int i = 0; i < numPredictions; i++) { + + expected[i] = s + (i * b) + seasonal[seasonCounter % windowSize]; + assertThat(Double.compare(expected[i], actual[i]), equalTo(0)); + seasonCounter += 1; + } + + } } From 6d96bfc98b981f1e5776da7c5c3cc50eef73cd42 Mon Sep 17 00:00:00 2001 From: jaymode Date: Wed, 27 May 2015 15:15:38 -0400 Subject: [PATCH 042/123] catch UnsatisfiedLinkError on JNA load This catches UnsatifiedLinkError when attempting to load the JNA Native class, in cases where there are error loading the native libraries that JNA needs to function. --- src/main/java/org/elasticsearch/bootstrap/Natives.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/elasticsearch/bootstrap/Natives.java b/src/main/java/org/elasticsearch/bootstrap/Natives.java index 3342cdfd2c0..2048895f59e 100644 --- a/src/main/java/org/elasticsearch/bootstrap/Natives.java +++ b/src/main/java/org/elasticsearch/bootstrap/Natives.java @@ -34,12 +34,14 @@ class Natives { static { try { - // load one of the main JNA classes to see if the classes are available. this does not ensure that native - // libraries are available + // load one of the main JNA classes to see if the classes are available. this does not ensure that all native + // libraries are available, only the ones necessary by JNA to function Class.forName("com.sun.jna.Native"); jnaAvailable = true; - } catch(ClassNotFoundException e) { - logger.warn("JNA not found. native methods will be disabled."); + } catch (ClassNotFoundException e) { + logger.warn("JNA not found. native methods will be disabled.", e); + } catch (UnsatisfiedLinkError e) { + logger.warn("unable to load JNA native support library, native methods will be disabled.", e); } } From d32a80f37b1e09d37b1556ba5cce74d0b76bb8c4 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Wed, 27 May 2015 16:13:36 -0400 Subject: [PATCH 043/123] Docs: Fix misplaced images in moving_avg docs --- .../pipeline/movavg-aggregation.asciidoc | 4 ++-- .../{reducers_movavg => pipeline_movavg}/triple.png | Bin .../triple_prediction.png | Bin .../triple_untruncated.png | Bin 4 files changed, 2 insertions(+), 2 deletions(-) rename docs/reference/images/{reducers_movavg => pipeline_movavg}/triple.png (100%) rename docs/reference/images/{reducers_movavg => pipeline_movavg}/triple_prediction.png (100%) rename docs/reference/images/{reducers_movavg => pipeline_movavg}/triple_untruncated.png (100%) diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc index 6f998ffca98..20677b404af 100644 --- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc @@ -255,7 +255,7 @@ does not backcast. [[holt_winters_cold_start]] .Holt-Winters showing a "cold" start where no values are emitted -image::images/reducers_movavg/triple_untruncated.png[] +image::images/pipeline_movavg/triple_untruncated.png[] Because the "cold start" obscures what the moving average looks like, the rest of the Holt-Winters images are truncated to not show the "cold start". Just be aware this will always be present at the beginning of your moving averages! @@ -290,7 +290,7 @@ The default value of `period` is `1`. [[holt_winters_add]] .Holt-Winters moving average with window of size 120, alpha = 0.5, beta = 0.7, gamma = 0.3, period = 30 -image::images/reducers_movavg/triple.png[] +image::images/pipeline_movavg/triple.png[] ===== Multiplicative Holt-Winters diff --git a/docs/reference/images/reducers_movavg/triple.png b/docs/reference/images/pipeline_movavg/triple.png similarity index 100% rename from docs/reference/images/reducers_movavg/triple.png rename to docs/reference/images/pipeline_movavg/triple.png diff --git a/docs/reference/images/reducers_movavg/triple_prediction.png b/docs/reference/images/pipeline_movavg/triple_prediction.png similarity index 100% rename from docs/reference/images/reducers_movavg/triple_prediction.png rename to docs/reference/images/pipeline_movavg/triple_prediction.png diff --git a/docs/reference/images/reducers_movavg/triple_untruncated.png b/docs/reference/images/pipeline_movavg/triple_untruncated.png similarity index 100% rename from docs/reference/images/reducers_movavg/triple_untruncated.png rename to docs/reference/images/pipeline_movavg/triple_untruncated.png From 2f57ae93454f2bd2ef6a6bf90feafe5ec4aa01f7 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 27 May 2015 16:53:35 +0200 Subject: [PATCH 044/123] Internal: deduplicate field names returned by `simpleMatchToFullName` & `simpleMatchToIndexNames` in FieldMappersLookup Relates to #10916 Closes #11377 --- .../index/mapper/DocumentFieldMappers.java | 5 ++--- .../index/mapper/FieldMappersLookup.java | 12 ++++++------ .../index/mapper/MapperService.java | 4 ++-- .../index/query/ExistsQueryParser.java | 10 +++------- .../index/query/MissingQueryParser.java | 10 +++------- .../index/query/QueryParseContext.java | 16 +++------------- .../search/highlight/HighlightPhase.java | 4 ++-- .../index/mapper/FieldMappersLookupTests.java | 7 ++++--- .../test/ElasticsearchIntegrationTest.java | 12 ++---------- 9 files changed, 27 insertions(+), 53 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index c5387cf3459..e4f61db2df1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.analysis.FieldNameAnalyzer; import java.util.Collection; import java.util.Iterator; -import java.util.List; import java.util.Map; /** @@ -98,11 +97,11 @@ public final class DocumentFieldMappers implements Iterable { return fieldMappers.get(field); } - List simpleMatchToIndexNames(String pattern) { + Collection simpleMatchToIndexNames(String pattern) { return fieldMappers.simpleMatchToIndexNames(pattern); } - public List simpleMatchToFullName(String pattern) { + public Collection simpleMatchToFullName(String pattern) { return fieldMappers.simpleMatchToFullName(pattern); } diff --git a/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java b/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java index 18f71d142db..d751c95910e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java +++ b/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.mapper; -import com.google.common.collect.Lists; +import com.google.common.collect.Sets; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.regex.Regex; @@ -27,7 +27,7 @@ import org.elasticsearch.common.regex.Regex; import java.util.Collection; import java.util.Collections; import java.util.Iterator; -import java.util.List; +import java.util.Set; /** * A class that holds a map of field mappers from name, index name, and full name. @@ -114,8 +114,8 @@ class FieldMappersLookup implements Iterable { /** * Returns a list of the index names of a simple match regex like pattern against full name and index name. */ - public List simpleMatchToIndexNames(String pattern) { - List fields = Lists.newArrayList(); + public Collection simpleMatchToIndexNames(String pattern) { + Set fields = Sets.newHashSet(); for (FieldMapper fieldMapper : this) { if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) { fields.add(fieldMapper.names().indexName()); @@ -129,8 +129,8 @@ class FieldMappersLookup implements Iterable { /** * Returns a list of the full names of a simple match regex like pattern against full name and index name. */ - public List simpleMatchToFullName(String pattern) { - List fields = Lists.newArrayList(); + public Collection simpleMatchToFullName(String pattern) { + Set fields = Sets.newHashSet(); for (FieldMapper fieldMapper : this) { if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) { fields.add(fieldMapper.names().fullName()); diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 8a6e5264f8f..85a3aae5bd4 100755 --- a/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -481,14 +481,14 @@ public class MapperService extends AbstractIndexComponent { * Returns all the fields that match the given pattern. If the pattern is prefixed with a type * then the fields will be returned with a type prefix. */ - public List simpleMatchToIndexNames(String pattern) { + public Collection simpleMatchToIndexNames(String pattern) { return simpleMatchToIndexNames(pattern, null); } /** * Returns all the fields that match the given pattern, with an optional narrowing * based on a list of types. */ - public List simpleMatchToIndexNames(String pattern, @Nullable String[] types) { + public Collection simpleMatchToIndexNames(String pattern, @Nullable String[] types) { if (Regex.isSimpleMatchPattern(pattern) == false) { // no wildcards return ImmutableList.of(pattern); diff --git a/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java b/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java index 3d049d98e7c..fe981193805 100644 --- a/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java @@ -19,11 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.*; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; @@ -32,7 +28,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; import java.io.IOException; -import java.util.List; +import java.util.Collection; /** * @@ -89,7 +85,7 @@ public class ExistsQueryParser implements QueryParser { fieldPattern = fieldPattern + ".*"; } - List fields = parseContext.simpleMatchToIndexNames(fieldPattern); + Collection fields = parseContext.simpleMatchToIndexNames(fieldPattern); if (fields.isEmpty()) { // no fields exists, so we should not match anything return Queries.newMatchNoDocsQuery(); diff --git a/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java b/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java index 8767e0b3a51..16596b6bdfb 100644 --- a/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java @@ -19,11 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.*; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; @@ -32,7 +28,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; import java.io.IOException; -import java.util.List; +import java.util.Collection; /** * @@ -100,7 +96,7 @@ public class MissingQueryParser implements QueryParser { fieldPattern = fieldPattern + ".*"; } - List fields = parseContext.simpleMatchToIndexNames(fieldPattern); + Collection fields = parseContext.simpleMatchToIndexNames(fieldPattern); if (fields.isEmpty()) { if (existence) { // if we ask for existence of fields, and we found none, then we should match on all diff --git a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index e3ee40c3ae5..e2be1229fa6 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.query; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.queryparser.classic.MapperQueryParser; import org.apache.lucene.queryparser.classic.QueryParserSettings; @@ -38,11 +37,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperBuilders; -import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.similarity.SimilarityService; @@ -52,12 +47,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** * @@ -281,7 +271,7 @@ public class QueryParseContext { } } - public List simpleMatchToIndexNames(String pattern) { + public Collection simpleMatchToIndexNames(String pattern) { return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern, getTypes()); } diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index f005f5be7c3..5b9ab72641a 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -34,7 +34,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.SearchContext; -import java.util.List; +import java.util.Collection; import java.util.Map; import static com.google.common.collect.Maps.newHashMap; @@ -77,7 +77,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { public void hitExecute(SearchContext context, HitContext hitContext) { Map highlightFields = newHashMap(); for (SearchContextHighlight.Field field : context.highlight().fields()) { - List fieldNamesToHighlight; + Collection fieldNamesToHighlight; if (Regex.isSimpleMatchPattern(field.field())) { DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().type()); fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field()); diff --git a/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java b/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java index 1ec854c4c65..75c8e18fc91 100644 --- a/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.test.ElasticsearchTestCase; import java.io.IOException; +import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -39,7 +40,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase { FieldMappersLookup lookup = new FieldMappersLookup(); assertNull(lookup.fullName("foo")); assertNull(lookup.indexName("foo")); - List names = lookup.simpleMatchToFullName("foo"); + Collection names = lookup.simpleMatchToFullName("foo"); assertNotNull(names); assertTrue(names.isEmpty()); names = lookup.simpleMatchToFullName("foo"); @@ -105,7 +106,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase { FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldMappersLookup lookup = new FieldMappersLookup(); lookup = lookup.copyAndAddAll(newList(f1, f2)); - List names = lookup.simpleMatchToIndexNames("b*"); + Collection names = lookup.simpleMatchToIndexNames("b*"); assertTrue(names.contains("baz")); assertTrue(names.contains("boo")); } @@ -115,7 +116,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase { FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldMappersLookup lookup = new FieldMappersLookup(); lookup = lookup.copyAndAddAll(newList(f1, f2)); - List names = lookup.simpleMatchToFullName("b*"); + Collection names = lookup.simpleMatchToFullName("b*"); assertTrue(names.contains("foo")); assertTrue(names.contains("bar")); } diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index d046a2f05ad..0655588e35d 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -145,15 +145,7 @@ import java.net.InetSocketAddress; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; +import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -906,7 +898,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); assertThat("document mapper doesn't exists on " + node, documentMapper, notNullValue()); for (String fieldName : fieldNames) { - List matches = documentMapper.mappers().simpleMatchToFullName(fieldName); + Collection matches = documentMapper.mappers().simpleMatchToFullName(fieldName); assertThat("field " + fieldName + " doesn't exists on " + node, matches, Matchers.not(emptyIterable())); } } From 91e9caabd7139e0fa48687d49fb5f35916a41d78 Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Thu, 28 May 2015 11:54:56 +0200 Subject: [PATCH 045/123] [TEST] add path.home to settings --- .../node/internal/InternalSettingsPreparerTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 77cd1aebfcb..315a5f7f2bd 100644 --- a/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -67,6 +67,7 @@ public class InternalSettingsPreparerTests extends ElasticsearchTestCase { // test that we can read config files with .yaml, .json, and .properties suffixes Tuple tuple = InternalSettingsPreparer.prepareSettings(settingsBuilder() .put("config.ignore_system_properties", true) + .put("path.home", createTempDir().toString()) .build(), true); assertThat(tuple.v1().get("yaml.config.exists"), equalTo("true")); From 105f4dd512115d0a802319af02c5f68f789bcdfc Mon Sep 17 00:00:00 2001 From: jaymode Date: Thu, 28 May 2015 06:39:51 -0400 Subject: [PATCH 046/123] Test: filter out colons in test section names On Windows, colons ':' are illegal in file names and since we use a Path to check if the test is blacklisted, tests with a colon in the test section name will fail. This change simply removes the colon from the name when matching against the blacklist. --- .../org/elasticsearch/test/rest/ElasticsearchRestTestCase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/elasticsearch/test/rest/ElasticsearchRestTestCase.java b/src/test/java/org/elasticsearch/test/rest/ElasticsearchRestTestCase.java index 5951a9a5815..ee217ae1e8f 100644 --- a/src/test/java/org/elasticsearch/test/rest/ElasticsearchRestTestCase.java +++ b/src/test/java/org/elasticsearch/test/rest/ElasticsearchRestTestCase.java @@ -312,7 +312,7 @@ public abstract class ElasticsearchRestTestCase extends ElasticsearchIntegration //skip test if it matches one of the blacklist globs for (PathMatcher blacklistedPathMatcher : blacklistPathMatchers) { //we need to replace a few characters otherwise the test section name can't be parsed as a path on windows - String testSection = testCandidate.getTestSection().getName().replace("*", "").replace("\\", "/").replaceAll("\\s+/", "/").trim(); + String testSection = testCandidate.getTestSection().getName().replace("*", "").replace("\\", "/").replaceAll("\\s+/", "/").replace(":", "").trim(); String testPath = testCandidate.getSuitePath() + "/" + testSection; assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher.matches(PathUtils.get(testPath))); } From 283b0931ff7de13b1e35ad8646bec1dba63b8112 Mon Sep 17 00:00:00 2001 From: markharwood Date: Tue, 26 May 2015 17:32:48 +0100 Subject: [PATCH 047/123] Aggregations fix: queries with size=0 broke aggregations that require scores. Aggregations like Sampler and TopHits that require access to scores did not work if the query has size param set to zero. The assumption was that the Lucene query scoring logic was not required in these cases. Added a Junit test to demonstrate the issue and a fix which relies on earlier creation of Collector wrappers so that Collector.needsScores() calls work for all search operations. Closes #11119 --- .../search/internal/ContextIndexSearcher.java | 12 ++- .../aggregations/bucket/TopHitsTests.java | 73 ++++++++++++++++++- 2 files changed, 81 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 388ed8e8ae9..e4c61209825 100644 --- a/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -133,8 +133,11 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { } } + @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(Query query, Collector collector) throws IOException { + // Wrap the caller's collector with various wrappers e.g. those used to siphon + // matches off for aggregation or to impose a time-limit on collection. final boolean timeoutSet = searchContext.timeoutInMillis() != -1; final boolean terminateAfterSet = searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER; @@ -166,8 +169,13 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { collector = new MinimumScoreCollector(collector, searchContext.minimumScore()); } } + super.search(query, collector); + } - // we only compute the doc id set once since within a context, we execute the same query always... + @Override + public void search(List leaves, Weight weight, Collector collector) throws IOException { + final boolean timeoutSet = searchContext.timeoutInMillis() != -1; + final boolean terminateAfterSet = searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER; try { if (timeoutSet || terminateAfterSet) { try { diff --git a/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsTests.java b/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsTests.java index ba927efd641..a592ec18b95 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsTests.java @@ -63,7 +63,15 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; /** * @@ -228,7 +236,9 @@ public class TopHitsTests extends ElasticsearchIntegrationTest { @Test public void testBasics() throws Exception { - SearchResponse response = client().prepareSearch("idx").setTypes("type") + SearchResponse response = client() + .prepareSearch("idx") + .setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) @@ -264,6 +274,65 @@ public class TopHitsTests extends ElasticsearchIntegrationTest { } } + @Test + public void testIssue11119() throws Exception { + // Test that top_hits aggregation is fed scores if query results size=0 + SearchResponse response = client() + .prepareSearch("idx") + .setTypes("field-collapsing") + .setSize(0) + .setQuery(matchQuery("text", "x y z")) + .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))) + .get(); + + assertSearchResponse(response); + + assertThat(response.getHits().getTotalHits(), equalTo(8l)); + assertThat(response.getHits().hits().length, equalTo(0)); + assertThat(response.getHits().maxScore(), equalTo(0f)); + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(3)); + + for (Terms.Bucket bucket : terms.getBuckets()) { + assertThat(bucket, notNullValue()); + TopHits topHits = bucket.getAggregations().get("hits"); + SearchHits hits = topHits.getHits(); + float bestScore = Float.MAX_VALUE; + for (int h = 0; h < hits.getHits().length; h++) { + float score=hits.getAt(h).getScore(); + assertThat(score, lessThanOrEqualTo(bestScore)); + assertThat(score, greaterThan(0f)); + bestScore = hits.getAt(h).getScore(); + } + } + + // Also check that min_score setting works when size=0 + // (technically not a test of top_hits but implementation details are + // tied up with the need to feed scores into the agg tree even when + // users don't want ranked set of query results.) + response = client() + .prepareSearch("idx") + .setTypes("field-collapsing") + .setSize(0) + .setMinScore(0.0001f) + .setQuery(matchQuery("text", "x y z")) + .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")) + .get(); + + assertSearchResponse(response); + + assertThat(response.getHits().getTotalHits(), equalTo(8l)); + assertThat(response.getHits().hits().length, equalTo(0)); + assertThat(response.getHits().maxScore(), equalTo(0f)); + terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(3)); + } + + @Test public void testBreadthFirst() throws Exception { // breadth_first will be ignored since we need scores From a4c88b723304bb0d2be0f587a71d3dacf3b0289e Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 28 May 2015 12:42:18 +0200 Subject: [PATCH 048/123] Consolidate directory lock obtain code The Directory#makeLock API is trappy and can easily lead to unexpected lock release if native locks are used. see LUCENE-6507 for details. This commit consolidates the lock lock into one place and only returns the lock instance if we actually acquired it. --- .../elasticsearch/common/lucene/Lucene.java | 32 ++++++++++++++----- .../common/util/MultiDataPathUpgrader.java | 13 ++++---- .../elasticsearch/env/NodeEnvironment.java | 15 +++++---- .../org/elasticsearch/index/store/Store.java | 10 ++---- 4 files changed, 41 insertions(+), 29 deletions(-) diff --git a/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 385607d89ba..e3d787779c7 100644 --- a/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -173,6 +173,28 @@ public class Lucene { return SegmentInfos.readCommit(directory, segmentsFileName); } + /** + * Tries to acquire the {@link IndexWriter#WRITE_LOCK_NAME} on the given directory. The returned lock must be closed once + * the lock is released. If the lock can't be obtained a {@link LockObtainFailedException} is thrown. + * This method uses the {@link IndexWriterConfig#getDefaultWriteLockTimeout()} as the lock timeout. + */ + public static Lock acquireWriteLock(Directory directory) throws IOException { + return acquireLock(directory, IndexWriter.WRITE_LOCK_NAME, IndexWriterConfig.getDefaultWriteLockTimeout()); + } + + /** + * Tries to acquire a lock on the given directory. The returned lock must be closed once + * the lock is released. If the lock can't be obtained a {@link LockObtainFailedException} is thrown. + */ + @SuppressForbidden(reason = "this method uses trappy Directory#makeLock API") + public static Lock acquireLock(Directory directory, String lockName, long timeout) throws IOException { + final Lock writeLock = directory.makeLock(lockName); + if (writeLock.obtain(timeout) == false) { + throw new LockObtainFailedException("failed to obtain lock: " + writeLock); + } + return writeLock; + } + /** * This method removes all files from the given directory that are not referenced by the given segments file. * This method will open an IndexWriter and relies on index file deleter to remove all unreferenced files. Segment files @@ -184,10 +206,7 @@ public class Lucene { */ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Directory directory) throws IOException { final SegmentInfos si = readSegmentInfos(segmentsFileName, directory); - try (Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME)) { - if (!writeLock.obtain(IndexWriterConfig.getDefaultWriteLockTimeout())) { // obtain write lock - throw new LockObtainFailedException("Index locked for write: " + writeLock); - } + try (Lock writeLock = acquireWriteLock(directory)) { int foundSegmentFiles = 0; for (final String file : directory.listAll()) { /** @@ -226,10 +245,7 @@ public class Lucene { * this operation fails. */ public static void cleanLuceneIndex(Directory directory) throws IOException { - try (Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME)) { - if (!writeLock.obtain(IndexWriterConfig.getDefaultWriteLockTimeout())) { // obtain write lock - throw new LockObtainFailedException("Index locked for write: " + writeLock); - } + try (Lock writeLock = acquireWriteLock(directory)) { for (final String file : directory.listAll()) { if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { directory.deleteFile(file); // remove all segment_N files diff --git a/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java b/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java index 1cb700cff60..3425d151c34 100644 --- a/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java +++ b/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.Directory; import org.apache.lucene.store.Lock; +import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -32,6 +33,7 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; @@ -84,13 +86,12 @@ public class MultiDataPathUpgrader { ShardStateMetaData.FORMAT.write(loaded, loaded.version, targetPath.getShardStatePath()); Files.createDirectories(targetPath.resolveIndex()); try (SimpleFSDirectory directory = new SimpleFSDirectory(targetPath.resolveIndex())) { - try (final Lock lock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME)) { - if (lock.obtain(5000)) { - upgradeFiles(shard, targetPath, targetPath.resolveIndex(), ShardPath.INDEX_FOLDER_NAME, paths); - } else { - throw new IllegalStateException("Can't obtain lock on " + targetPath.resolveIndex()); - } + try (final Lock lock = Lucene.acquireWriteLock(directory)) { + upgradeFiles(shard, targetPath, targetPath.resolveIndex(), ShardPath.INDEX_FOLDER_NAME, paths); + } catch (LockObtainFailedException ex) { + throw new IllegalStateException("Can't obtain lock on " + targetPath.resolveIndex(), ex); } + } diff --git a/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 26a725f9072..75ef6914eae 100644 --- a/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; @@ -146,18 +147,17 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) { logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); - Lock tmpLock = luceneDir.makeLock(NODE_LOCK_FILENAME); - boolean obtained = tmpLock.obtain(); - if (obtained) { + try { + locks[dirIndex] = Lucene.acquireLock(luceneDir, NODE_LOCK_FILENAME, 0); nodePaths[dirIndex] = new NodePath(dir, environment); - locks[dirIndex] = tmpLock; localNodeId = possibleLockId; - } else { + } catch (LockObtainFailedException ex) { logger.trace("failed to obtain node lock on {}", dir.toAbsolutePath()); // release all the ones that were obtained up until now releaseAndNullLocks(locks); break; } + } catch (IOException e) { logger.trace("failed to obtain node lock on {}", e, dir.toAbsolutePath()); lastException = new IOException("failed to obtain lock on " + dir.toAbsolutePath(), e); @@ -314,8 +314,9 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { // open a directory (will be immediately closed) on the shard's location dirs[i] = new SimpleFSDirectory(p, FsDirectoryService.buildLockFactory(indexSettings)); // create a lock for the "write.lock" file - locks[i] = dirs[i].makeLock(IndexWriter.WRITE_LOCK_NAME); - if (locks[i].obtain() == false) { + try { + locks[i] = Lucene.acquireWriteLock(dirs[i]); + } catch (IOException ex) { throw new ElasticsearchException("unable to acquire " + IndexWriter.WRITE_LOCK_NAME + " for " + p); } diff --git a/src/main/java/org/elasticsearch/index/store/Store.java b/src/main/java/org/elasticsearch/index/store/Store.java index c889dd16c20..d92128a319c 100644 --- a/src/main/java/org/elasticsearch/index/store/Store.java +++ b/src/main/java/org/elasticsearch/index/store/Store.java @@ -259,10 +259,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref metadataLock.writeLock().lock(); // we make sure that nobody fetches the metadata while we do this rename operation here to ensure we don't // get exceptions if files are still open. - try (Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME)) { - if (!writeLock.obtain(IndexWriterConfig.getDefaultWriteLockTimeout())) { // obtain write lock - throw new LockObtainFailedException("Index locked for write: " + writeLock); - } + try (Lock writeLock = Lucene.acquireWriteLock(directory())) { for (Map.Entry entry : entries) { String tempFile = entry.getKey(); String origFile = entry.getValue(); @@ -586,10 +583,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref */ public void cleanupAndVerify(String reason, MetadataSnapshot sourceMetaData) throws IOException { metadataLock.writeLock().lock(); - try (Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME)) { - if (!writeLock.obtain(IndexWriterConfig.getDefaultWriteLockTimeout())) { // obtain write lock - throw new LockObtainFailedException("Index locked for write: " + writeLock); - } + try (Lock writeLock = Lucene.acquireWriteLock(directory)) { final StoreDirectory dir = directory; for (String existingFile : dir.listAll()) { if (existingFile.equals(IndexWriter.WRITE_LOCK_NAME) || Store.isChecksum(existingFile) || sourceMetaData.contains(existingFile)) { From 38639074b4d8a7fe19076b31dd2207b6b31412e4 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Thu, 28 May 2015 16:24:41 +0200 Subject: [PATCH 049/123] Testing: Ensure cat API REST tests are unaffected by randomization The wildcard cat API REST tests relied on bulk.max and bulk.min in the thread_pool response. However due to the thread pool types being randomized in InternalTestCluster, the min/max values were not guaranteed to exist (the cached thread pool type is unbounded and thus does not have a max value). In order to prevent this, the test has been removed and now the cat nodes test is used for wildcard testing, which always returns stats about the heap. --- rest-api-spec/test/cat.nodes/10_basic.yaml | 10 ++++++++++ rest-api-spec/test/cat.thread_pool/10_basic.yaml | 12 ------------ 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/rest-api-spec/test/cat.nodes/10_basic.yaml b/rest-api-spec/test/cat.nodes/10_basic.yaml index f7e9bedcd0b..5b2ddb7fc38 100755 --- a/rest-api-spec/test/cat.nodes/10_basic.yaml +++ b/rest-api-spec/test/cat.nodes/10_basic.yaml @@ -29,6 +29,16 @@ /^ heap\.current \s+ heap\.percent \s+ heap\.max \s+ \n (\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \s+ \n)+ $/ + - do: + cat.nodes: + h: heap.* + v: true + + - match: + $body: | + /^ heap\.current \s+ heap\.percent \s+ heap\.max \s+ \n + (\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \s+ \n)+ $/ + - do: cat.nodes: h: file_desc.current,file_desc.percent,file_desc.max diff --git a/rest-api-spec/test/cat.thread_pool/10_basic.yaml b/rest-api-spec/test/cat.thread_pool/10_basic.yaml index 283e353b7a3..edb87ce27b9 100755 --- a/rest-api-spec/test/cat.thread_pool/10_basic.yaml +++ b/rest-api-spec/test/cat.thread_pool/10_basic.yaml @@ -29,18 +29,6 @@ / #pid id host ip port ^ (\d+ \s+ \S{4} \s+ \S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ (\d+|-) \s+ \n)+ $/ - - - do: - cat.thread_pool: - h: bulk.m* - - - match: - $body: | - /^ bulk.min \s+ bulk.max \s+ \n - (\s+ \d+ \s+ \d+ \s+ \n)+ $/ - -#(\s+ \d+ \s+ \d+ \n)+ $/ - - do: cat.thread_pool: h: id,ba,fa,gea,ga,ia,maa,ma,oa,pa From 55fc3a727b0d0ba18ba2852e7e630b07beee29a8 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Sun, 24 May 2015 15:22:06 -0700 Subject: [PATCH 050/123] Core: refactor upgrade API to use transport and write minimum compatible version that the index was upgraded to In #11072 we are adding a check that will prevent opening of old indices. However, this check doesn't take into consideration the fact that indices can be made compatible with the current version through upgrade API. In order to make compatibility check aware of the upgrade, the upgrade API should write a new setting `index.version.minimum_compatible` that will indicate the minimum compatible version of lucene this index is compatible with and `index.version.upgraded` that will indicate the version of elasticsearch that performed the upgrade. Closes #11095 --- docs/reference/indices/upgrade.asciidoc | 17 +- .../elasticsearch/action/ActionModule.java | 9 + .../indices/optimize/OptimizeRequest.java | 42 ---- .../upgrade/get/IndexShardUpgradeStatus.java | 78 +++++++ .../upgrade/get/IndexUpgradeStatus.java | 95 ++++++++ .../upgrade/get/ShardUpgradeStatus.java | 92 ++++++++ .../get/TransportUpgradeStatusAction.java | 152 +++++++++++++ .../upgrade/get/UpgradeStatusAction.java | 45 ++++ .../upgrade/get/UpgradeStatusRequest.java | 39 ++++ .../get/UpgradeStatusRequestBuilder.java | 33 +++ .../upgrade/get/UpgradeStatusResponse.java | 191 ++++++++++++++++ .../upgrade/post/ShardUpgradeRequest.java | 60 +++++ .../upgrade/post/ShardUpgradeResponse.java | 76 +++++++ .../upgrade/post/TransportUpgradeAction.java | 214 ++++++++++++++++++ .../post/TransportUpgradeSettingsAction.java | 86 +++++++ .../indices/upgrade/post/UpgradeAction.java | 46 ++++ .../indices/upgrade/post/UpgradeRequest.java | 91 ++++++++ .../upgrade/post/UpgradeRequestBuilder.java | 42 ++++ .../indices/upgrade/post/UpgradeResponse.java | 76 +++++++ .../upgrade/post/UpgradeSettingsAction.java | 45 ++++ ...radeSettingsClusterStateUpdateRequest.java | 51 +++++ .../upgrade/post/UpgradeSettingsRequest.java | 98 ++++++++ .../post/UpgradeSettingsRequestBuilder.java | 43 ++++ .../upgrade/post/UpgradeSettingsResponse.java | 51 +++++ .../client/IndicesAdminClient.java | 53 +++++ .../org/elasticsearch/client/Requests.java | 12 + .../client/support/AbstractClient.java | 38 ++++ .../cluster/metadata/IndexMetaData.java | 40 +++- .../metadata/MetaDataIndexUpgradeService.java | 18 +- .../MetaDataUpdateSettingsService.java | 37 +++ .../elasticsearch/index/shard/IndexShard.java | 35 ++- .../indices/upgrade/RestUpgradeAction.java | 90 +++----- .../snapshots/RestoreService.java | 2 + .../OldIndexBackwardsCompatibilityTests.java | 13 +- .../upgrade/UpgradeReallyOldIndexTest.java | 18 +- .../admin/indices/upgrade/UpgradeTest.java | 136 ++++------- 36 files changed, 2030 insertions(+), 234 deletions(-) create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexShardUpgradeStatus.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexUpgradeStatus.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/ShardUpgradeStatus.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequest.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequestBuilder.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeRequest.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResponse.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeRequest.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeRequestBuilder.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeResponse.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsClusterStateUpdateRequest.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequest.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java create mode 100644 src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsResponse.java diff --git a/docs/reference/indices/upgrade.asciidoc b/docs/reference/indices/upgrade.asciidoc index 295a407f979..046dde2fe8a 100644 --- a/docs/reference/indices/upgrade.asciidoc +++ b/docs/reference/indices/upgrade.asciidoc @@ -54,13 +54,26 @@ curl 'http://localhost:9200/twitter/_upgrade?pretty&human' [source,js] -------------------------------------------------- { - "twitter": { + "size": "21gb", + "size_in_bytes": "21000000000", + "size_to_upgrade": "10gb", + "size_to_upgrade_in_bytes": "10000000000" + "size_to_upgrade_ancient": "1gb", + "size_to_upgrade_ancient_in_bytes": "1000000000" + "indices": { + "twitter": { "size": "21gb", "size_in_bytes": "21000000000", "size_to_upgrade": "10gb", "size_to_upgrade_in_bytes": "10000000000" "size_to_upgrade_ancient": "1gb", "size_to_upgrade_ancient_in_bytes": "1000000000" - } + } + } } -------------------------------------------------- + +The level of details in the upgrade status command can be controlled by +setting `level` parameter to `cluster`, `index` (default) or `shard` levels. +For example, you can run the upgrade status command with `level=shard` to +get detailed upgrade information of each individual shard. \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/action/ActionModule.java b/src/main/java/org/elasticsearch/action/ActionModule.java index 0decb393405..7bb66260a58 100644 --- a/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/src/main/java/org/elasticsearch/action/ActionModule.java @@ -111,6 +111,12 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesActi import org.elasticsearch.action.admin.indices.template.get.TransportGetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; +import org.elasticsearch.action.admin.indices.upgrade.get.TransportUpgradeStatusAction; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusAction; +import org.elasticsearch.action.admin.indices.upgrade.post.TransportUpgradeAction; +import org.elasticsearch.action.admin.indices.upgrade.post.TransportUpgradeSettingsAction; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsAction; import org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerAction; @@ -256,6 +262,9 @@ public class ActionModule extends AbstractModule { registerAction(FlushAction.INSTANCE, TransportFlushAction.class); registerAction(SealIndicesAction.INSTANCE, TransportSealIndicesAction.class); registerAction(OptimizeAction.INSTANCE, TransportOptimizeAction.class); + registerAction(UpgradeAction.INSTANCE, TransportUpgradeAction.class); + registerAction(UpgradeStatusAction.INSTANCE, TransportUpgradeStatusAction.class); + registerAction(UpgradeSettingsAction.INSTANCE, TransportUpgradeSettingsAction.class); registerAction(ClearIndicesCacheAction.INSTANCE, TransportClearIndicesCacheAction.class); registerAction(PutWarmerAction.INSTANCE, TransportPutWarmerAction.class); registerAction(DeleteWarmerAction.INSTANCE, TransportDeleteWarmerAction.class); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java index 3510a3b7f96..08f322a1154 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java @@ -42,15 +42,11 @@ public class OptimizeRequest extends BroadcastRequest { public static final int MAX_NUM_SEGMENTS = -1; public static final boolean ONLY_EXPUNGE_DELETES = false; public static final boolean FLUSH = true; - public static final boolean UPGRADE = false; - public static final boolean UPGRADE_ONLY_ANCIENT_SEGMENTS = false; } private int maxNumSegments = Defaults.MAX_NUM_SEGMENTS; private boolean onlyExpungeDeletes = Defaults.ONLY_EXPUNGE_DELETES; private boolean flush = Defaults.FLUSH; - private boolean upgrade = Defaults.UPGRADE; - private boolean upgradeOnlyAncientSegments = Defaults.UPGRADE_ONLY_ANCIENT_SEGMENTS; /** * Constructs an optimization request over one or more indices. @@ -114,30 +110,12 @@ public class OptimizeRequest extends BroadcastRequest { return this; } - /** - * Should the merge upgrade all old segments to the current index format. - * Defaults to false. - */ - public boolean upgrade() { - return upgrade; - } - - /** - * See {@link #upgrade()} - */ - public OptimizeRequest upgrade(boolean upgrade) { - this.upgrade = upgrade; - return this; - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); maxNumSegments = in.readInt(); onlyExpungeDeletes = in.readBoolean(); flush = in.readBoolean(); - upgrade = in.readBoolean(); - upgradeOnlyAncientSegments = in.readBoolean(); } @Override @@ -146,24 +124,6 @@ public class OptimizeRequest extends BroadcastRequest { out.writeInt(maxNumSegments); out.writeBoolean(onlyExpungeDeletes); out.writeBoolean(flush); - out.writeBoolean(upgrade); - out.writeBoolean(upgradeOnlyAncientSegments); - } - - /** - * Should the merge upgrade only the ancient (older major version of Lucene) segments? - * Defaults to false. - */ - public boolean upgradeOnlyAncientSegments() { - return upgradeOnlyAncientSegments; - } - - /** - * See {@link #upgradeOnlyAncientSegments()} - */ - public OptimizeRequest upgradeOnlyAncientSegments(boolean upgradeOnlyAncientSegments) { - this.upgradeOnlyAncientSegments = upgradeOnlyAncientSegments; - return this; } @Override @@ -172,8 +132,6 @@ public class OptimizeRequest extends BroadcastRequest { "maxNumSegments=" + maxNumSegments + ", onlyExpungeDeletes=" + onlyExpungeDeletes + ", flush=" + flush + - ", upgrade=" + upgrade + - ", upgradeOnlyAncientSegments=" + upgradeOnlyAncientSegments + '}'; } } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexShardUpgradeStatus.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexShardUpgradeStatus.java new file mode 100644 index 00000000000..e1cd16370c3 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexShardUpgradeStatus.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.get; + +import com.google.common.collect.Iterators; +import org.elasticsearch.index.shard.ShardId; + +import java.util.Iterator; + +public class IndexShardUpgradeStatus implements Iterable { + + private final ShardId shardId; + + private final ShardUpgradeStatus[] shards; + + IndexShardUpgradeStatus(ShardId shardId, ShardUpgradeStatus[] shards) { + this.shardId = shardId; + this.shards = shards; + } + + public ShardId getShardId() { + return this.shardId; + } + + public ShardUpgradeStatus getAt(int i) { + return shards[i]; + } + + public ShardUpgradeStatus[] getShards() { + return this.shards; + } + + @Override + public Iterator iterator() { + return Iterators.forArray(shards); + } + + public long getTotalBytes() { + long totalBytes = 0; + for (ShardUpgradeStatus indexShardUpgradeStatus : shards) { + totalBytes += indexShardUpgradeStatus.getTotalBytes(); + } + return totalBytes; + } + + public long getToUpgradeBytes() { + long upgradeBytes = 0; + for (ShardUpgradeStatus indexShardUpgradeStatus : shards) { + upgradeBytes += indexShardUpgradeStatus.getToUpgradeBytes(); + } + return upgradeBytes; + } + + public long getToUpgradeBytesAncient() { + long upgradeBytesAncient = 0; + for (ShardUpgradeStatus indexShardUpgradeStatus : shards) { + upgradeBytesAncient += indexShardUpgradeStatus.getToUpgradeBytesAncient(); + } + return upgradeBytesAncient; + } +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexUpgradeStatus.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexUpgradeStatus.java new file mode 100644 index 00000000000..33a60328951 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/IndexUpgradeStatus.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.get; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public class IndexUpgradeStatus implements Iterable { + + private final String index; + + private final Map indexShards; + + IndexUpgradeStatus(String index, ShardUpgradeStatus[] shards) { + this.index = index; + + Map> tmpIndexShards = Maps.newHashMap(); + for (ShardUpgradeStatus shard : shards) { + List lst = tmpIndexShards.get(shard.getShardRouting().id()); + if (lst == null) { + lst = Lists.newArrayList(); + tmpIndexShards.put(shard.getShardRouting().id(), lst); + } + lst.add(shard); + } + indexShards = Maps.newHashMap(); + for (Map.Entry> entry : tmpIndexShards.entrySet()) { + indexShards.put(entry.getKey(), new IndexShardUpgradeStatus(entry.getValue().get(0).getShardRouting().shardId(), entry.getValue().toArray(new ShardUpgradeStatus[entry.getValue().size()]))); + } + } + + public String getIndex() { + return this.index; + } + + /** + * A shard id to index shard upgrade status map (note, index shard upgrade status is the replication shard group that maps + * to the shard id). + */ + public Map getShards() { + return this.indexShards; + } + + @Override + public Iterator iterator() { + return indexShards.values().iterator(); + } + + public long getTotalBytes() { + long totalBytes = 0; + for (IndexShardUpgradeStatus indexShardUpgradeStatus : indexShards.values()) { + totalBytes += indexShardUpgradeStatus.getTotalBytes(); + } + return totalBytes; + } + + public long getToUpgradeBytes() { + long upgradeBytes = 0; + for (IndexShardUpgradeStatus indexShardUpgradeStatus : indexShards.values()) { + upgradeBytes += indexShardUpgradeStatus.getToUpgradeBytes(); + } + return upgradeBytes; + } + + public long getToUpgradeBytesAncient() { + long upgradeBytesAncient = 0; + for (IndexShardUpgradeStatus indexShardUpgradeStatus : indexShards.values()) { + upgradeBytesAncient += indexShardUpgradeStatus.getToUpgradeBytesAncient(); + } + return upgradeBytesAncient; + } + + +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/ShardUpgradeStatus.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/ShardUpgradeStatus.java new file mode 100644 index 00000000000..e5f0261932c --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/ShardUpgradeStatus.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.get; + +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +import static org.elasticsearch.cluster.routing.ImmutableShardRouting.readShardRoutingEntry; + +public class ShardUpgradeStatus extends BroadcastShardResponse { + + private ShardRouting shardRouting; + + private long totalBytes; + + private long toUpgradeBytes; + + private long toUpgradeBytesAncient; + + ShardUpgradeStatus() { + } + + ShardUpgradeStatus(ShardRouting shardRouting, long totalBytes, long toUpgradeBytes, long upgradeBytesAncient) { + super(shardRouting.shardId()); + this.shardRouting = shardRouting; + this.totalBytes = totalBytes; + this.toUpgradeBytes = toUpgradeBytes; + this.toUpgradeBytesAncient = upgradeBytesAncient; + + } + + public ShardRouting getShardRouting() { + return this.shardRouting; + } + + public long getTotalBytes() { + return totalBytes; + } + + public long getToUpgradeBytes() { + return toUpgradeBytes; + } + + public long getToUpgradeBytesAncient() { + return toUpgradeBytesAncient; + } + + public static ShardUpgradeStatus readShardUpgradeStatus(StreamInput in) throws IOException { + ShardUpgradeStatus shard = new ShardUpgradeStatus(); + shard.readFrom(in); + return shard; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + shardRouting = readShardRoutingEntry(in); + totalBytes = in.readLong(); + toUpgradeBytes = in.readLong(); + toUpgradeBytesAncient = in.readLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + shardRouting.writeTo(out); + out.writeLong(totalBytes); + out.writeLong(toUpgradeBytes); + out.writeLong(toUpgradeBytesAncient); + } +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java new file mode 100644 index 00000000000..370dce6e41f --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java @@ -0,0 +1,152 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.get; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Segment; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.atomic.AtomicReferenceArray; + +import static com.google.common.collect.Lists.newArrayList; + +/** + * + */ +public class TransportUpgradeStatusAction extends TransportBroadcastAction { + + private final IndicesService indicesService; + + @Inject + public TransportUpgradeStatusAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + IndicesService indicesService, ActionFilters actionFilters) { + super(settings, UpgradeStatusAction.NAME, threadPool, clusterService, transportService, actionFilters, + UpgradeStatusRequest.class, IndexShardUpgradeStatusRequest.class, ThreadPool.Names.MANAGEMENT); + this.indicesService = indicesService; + } + + /** + * Getting upgrade stats from *all* active shards. + */ + @Override + protected GroupShardsIterator shards(ClusterState clusterState, UpgradeStatusRequest request, String[] concreteIndices) { + return clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true); + } + + @Override + protected ClusterBlockException checkGlobalBlock(ClusterState state, UpgradeStatusRequest request) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } + + @Override + protected ClusterBlockException checkRequestBlock(ClusterState state, UpgradeStatusRequest countRequest, String[] concreteIndices) { + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, concreteIndices); + } + + @Override + protected UpgradeStatusResponse newResponse(UpgradeStatusRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { + int successfulShards = 0; + int failedShards = 0; + List shardFailures = null; + final List shards = newArrayList(); + for (int i = 0; i < shardsResponses.length(); i++) { + Object shardResponse = shardsResponses.get(i); + if (shardResponse == null) { + // simply ignore non active shards + } else if (shardResponse instanceof BroadcastShardOperationFailedException) { + failedShards++; + if (shardFailures == null) { + shardFailures = newArrayList(); + } + shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); + } else { + shards.add((ShardUpgradeStatus) shardResponse); + successfulShards++; + } + } + return new UpgradeStatusResponse(shards.toArray(new ShardUpgradeStatus[shards.size()]), shardsResponses.length(), successfulShards, failedShards, shardFailures); + } + + @Override + protected IndexShardUpgradeStatusRequest newShardRequest(int numShards, ShardRouting shard, UpgradeStatusRequest request) { + return new IndexShardUpgradeStatusRequest(shard.shardId(), request); + } + + @Override + protected ShardUpgradeStatus newShardResponse() { + return new ShardUpgradeStatus(); + } + + @Override + protected ShardUpgradeStatus shardOperation(IndexShardUpgradeStatusRequest request) { + IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); + IndexShard indexShard = indexService.shardSafe(request.shardId().id()); + List segments = indexShard.engine().segments(false); + long total_bytes = 0; + long to_upgrade_bytes = 0; + long to_upgrade_bytes_ancient = 0; + for (Segment seg : segments) { + total_bytes += seg.sizeInBytes; + if (seg.version.major != Version.CURRENT.luceneVersion.major) { + to_upgrade_bytes_ancient += seg.sizeInBytes; + to_upgrade_bytes += seg.sizeInBytes; + } else if (seg.version.minor != Version.CURRENT.luceneVersion.minor) { + // TODO: this comparison is bogus! it would cause us to upgrade even with the same format + // instead, we should check if the codec has changed + to_upgrade_bytes += seg.sizeInBytes; + } + } + + return new ShardUpgradeStatus(indexShard.routingEntry(), total_bytes, to_upgrade_bytes, to_upgrade_bytes_ancient); + } + + static class IndexShardUpgradeStatusRequest extends BroadcastShardRequest { + + IndexShardUpgradeStatusRequest() { + + } + + IndexShardUpgradeStatusRequest(ShardId shardId, UpgradeStatusRequest request) { + super(shardId, request); + } + + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java new file mode 100644 index 00000000000..e0318b13b97 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.get; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +/** + */ +public class UpgradeStatusAction extends Action { + + public static final UpgradeStatusAction INSTANCE = new UpgradeStatusAction(); + public static final String NAME = "indices:monitor/upgrade"; + + private UpgradeStatusAction() { + super(NAME); + } + + @Override + public UpgradeStatusResponse newResponse() { + return new UpgradeStatusResponse(); + } + + @Override + public UpgradeStatusRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new UpgradeStatusRequestBuilder(client, this); + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequest.java new file mode 100644 index 00000000000..a951924720d --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequest.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.get; + +import org.elasticsearch.action.support.broadcast.BroadcastRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class UpgradeStatusRequest extends BroadcastRequest { + + public UpgradeStatusRequest() { + this(Strings.EMPTY_ARRAY); + } + + public UpgradeStatusRequest(String... indices) { + super(indices); + } + +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequestBuilder.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequestBuilder.java new file mode 100644 index 00000000000..98dd1c1828d --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusRequestBuilder.java @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.get; + +import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * + */ +public class UpgradeStatusRequestBuilder extends BroadcastOperationRequestBuilder { + + public UpgradeStatusRequestBuilder(ElasticsearchClient client, UpgradeStatusAction action) { + super(client, action, new UpgradeStatusRequest()); + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java new file mode 100644 index 00000000000..89520704049 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java @@ -0,0 +1,191 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.get; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class UpgradeStatusResponse extends BroadcastResponse implements ToXContent { + + + private ShardUpgradeStatus[] shards; + + private Map indicesUpgradeStatus; + + UpgradeStatusResponse() { + + } + + UpgradeStatusResponse(ShardUpgradeStatus[] shards, int totalShards, int successfulShards, int failedShards, List shardFailures) { + super(totalShards, successfulShards, failedShards, shardFailures); + this.shards = shards; + } + + public Map getIndices() { + if (indicesUpgradeStatus != null) { + return indicesUpgradeStatus; + } + Map indicesUpgradeStats = Maps.newHashMap(); + + Set indices = Sets.newHashSet(); + for (ShardUpgradeStatus shard : shards) { + indices.add(shard.getIndex()); + } + + for (String index : indices) { + List shards = Lists.newArrayList(); + for (ShardUpgradeStatus shard : this.shards) { + if (shard.getShardRouting().index().equals(index)) { + shards.add(shard); + } + } + indicesUpgradeStats.put(index, new IndexUpgradeStatus(index, shards.toArray(new ShardUpgradeStatus[shards.size()]))); + } + this.indicesUpgradeStatus = indicesUpgradeStats; + return indicesUpgradeStats; + } + + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + shards = new ShardUpgradeStatus[in.readVInt()]; + for (int i = 0; i < shards.length; i++) { + shards[i] = ShardUpgradeStatus.readShardUpgradeStatus(in); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(shards.length); + for (ShardUpgradeStatus shard : shards) { + shard.writeTo(out); + } + } + + public long getTotalBytes() { + long totalBytes = 0; + for (IndexUpgradeStatus indexShardUpgradeStatus : getIndices().values()) { + totalBytes += indexShardUpgradeStatus.getTotalBytes(); + } + return totalBytes; + } + + public long getToUpgradeBytes() { + long upgradeBytes = 0; + for (IndexUpgradeStatus indexShardUpgradeStatus : getIndices().values()) { + upgradeBytes += indexShardUpgradeStatus.getToUpgradeBytes(); + } + return upgradeBytes; + } + + public long getToUpgradeBytesAncient() { + long upgradeBytesAncient = 0; + for (IndexUpgradeStatus indexShardUpgradeStatus : getIndices().values()) { + upgradeBytesAncient += indexShardUpgradeStatus.getToUpgradeBytesAncient(); + } + return upgradeBytesAncient; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + + + builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, getTotalBytes()); + builder.byteSizeField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, getToUpgradeBytes()); + builder.byteSizeField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT, getToUpgradeBytesAncient()); + + String level = params.param("level", "indices"); + boolean outputShards = "shards".equals(level); + boolean outputIndices = "indices".equals(level) || outputShards; + if (outputIndices) { + builder.startObject(Fields.INDICES); + for (IndexUpgradeStatus indexUpgradeStatus : getIndices().values()) { + builder.startObject(indexUpgradeStatus.getIndex(), XContentBuilder.FieldCaseConversion.NONE); + + builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, indexUpgradeStatus.getTotalBytes()); + builder.byteSizeField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, indexUpgradeStatus.getToUpgradeBytes()); + builder.byteSizeField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT, indexUpgradeStatus.getToUpgradeBytesAncient()); + if (outputShards) { + builder.startObject(Fields.SHARDS); + for (IndexShardUpgradeStatus indexShardUpgradeStatus : indexUpgradeStatus) { + builder.startArray(Integer.toString(indexShardUpgradeStatus.getShardId().id())); + for (ShardUpgradeStatus shardUpgradeStatus : indexShardUpgradeStatus) { + builder.startObject(); + + builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, getTotalBytes()); + builder.byteSizeField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, getToUpgradeBytes()); + builder.byteSizeField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT, getToUpgradeBytesAncient()); + + builder.startObject(Fields.ROUTING); + builder.field(Fields.STATE, shardUpgradeStatus.getShardRouting().state()); + builder.field(Fields.PRIMARY, shardUpgradeStatus.getShardRouting().primary()); + builder.field(Fields.NODE, shardUpgradeStatus.getShardRouting().currentNodeId()); + if (shardUpgradeStatus.getShardRouting().relocatingNodeId() != null) { + builder.field(Fields.RELOCATING_NODE, shardUpgradeStatus.getShardRouting().relocatingNodeId()); + } + builder.endObject(); + + builder.endObject(); + } + builder.endArray(); + } + builder.endObject(); + } + + builder.endObject(); + } + + builder.endObject(); + } + return builder; + } + + static final class Fields { + static final XContentBuilderString INDICES = new XContentBuilderString("indices"); + static final XContentBuilderString SHARDS = new XContentBuilderString("shards"); + static final XContentBuilderString ROUTING = new XContentBuilderString("routing"); + static final XContentBuilderString STATE = new XContentBuilderString("state"); + static final XContentBuilderString PRIMARY = new XContentBuilderString("primary"); + static final XContentBuilderString NODE = new XContentBuilderString("node"); + static final XContentBuilderString RELOCATING_NODE = new XContentBuilderString("relocating_node"); + static final XContentBuilderString SIZE = new XContentBuilderString("size"); + static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes"); + static final XContentBuilderString SIZE_TO_UPGRADE = new XContentBuilderString("size_to_upgrade"); + static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT = new XContentBuilderString("size_to_upgrade_ancient"); + static final XContentBuilderString SIZE_TO_UPGRADE_IN_BYTES = new XContentBuilderString("size_to_upgrade_in_bytes"); + static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT_IN_BYTES = new XContentBuilderString("size_to_upgrade_ancient_in_bytes"); + + } +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeRequest.java new file mode 100644 index 00000000000..9731a983c38 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeRequest.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + + +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; + +/** + * + */ +final class ShardUpgradeRequest extends BroadcastShardRequest { + + private UpgradeRequest request = new UpgradeRequest(); + + ShardUpgradeRequest() { + } + + ShardUpgradeRequest(ShardId shardId, UpgradeRequest request) { + super(shardId, request); + this.request = request; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + request.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + request.writeTo(out); + } + + public UpgradeRequest upgradeRequest() { + return this.request; + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResponse.java new file mode 100644 index 00000000000..efbb19142c3 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResponse.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; +import java.text.ParseException; + +/** + * + */ +class ShardUpgradeResponse extends BroadcastShardResponse { + + private org.apache.lucene.util.Version version; + + private boolean primary; + + + ShardUpgradeResponse() { + } + + ShardUpgradeResponse(ShardId shardId, boolean primary, org.apache.lucene.util.Version version) { + super(shardId); + this.primary = primary; + this.version = version; + } + + public org.apache.lucene.util.Version version() { + return this.version; + } + + public boolean primary() { + return primary; + } + + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + primary = in.readBoolean(); + try { + version = org.apache.lucene.util.Version.parse(in.readString()); + } catch (ParseException ex) { + throw new IOException("failed to parse lucene version [" + version + "]", ex); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(primary); + out.writeString(version.toString()); + } + +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java new file mode 100644 index 00000000000..c5dc59ee634 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -0,0 +1,214 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.apache.lucene.util.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.PrimaryMissingActionException; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReferenceArray; + +import static com.google.common.collect.Lists.newArrayList; +import static com.google.common.collect.Maps.newHashMap; +import static com.google.common.collect.Sets.newHashSet; + +/** + * Upgrade index/indices action. + */ +public class TransportUpgradeAction extends TransportBroadcastAction { + + private final IndicesService indicesService; + + private final TransportUpgradeSettingsAction upgradeSettingsAction; + + @Inject + public TransportUpgradeAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, + TransportUpgradeSettingsAction upgradeSettingsAction) { + super(settings, UpgradeAction.NAME, threadPool, clusterService, transportService, actionFilters, + UpgradeRequest.class, ShardUpgradeRequest.class, ThreadPool.Names.OPTIMIZE); + this.indicesService = indicesService; + this.upgradeSettingsAction = upgradeSettingsAction; + } + + @Override + protected UpgradeResponse newResponse(UpgradeRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { + int successfulShards = 0; + int failedShards = 0; + List shardFailures = null; + Map successfulPrimaryShards = newHashMap(); + Map versions = newHashMap(); + for (int i = 0; i < shardsResponses.length(); i++) { + Object shardResponse = shardsResponses.get(i); + if (shardResponse == null) { + // a non active shard, ignore... + } else if (shardResponse instanceof BroadcastShardOperationFailedException) { + failedShards++; + if (shardFailures == null) { + shardFailures = newArrayList(); + } + shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); + } else { + successfulShards++; + ShardUpgradeResponse shardUpgradeResponse = (ShardUpgradeResponse) shardResponse; + String index = shardUpgradeResponse.getIndex(); + if (shardUpgradeResponse.primary()) { + Integer count = successfulPrimaryShards.get(index); + successfulPrimaryShards.put(index, count == null ? 1 : count + 1); + } + Version version = versions.get(index); + if (version == null || shardUpgradeResponse.version().onOrAfter(version) == false) { + versions.put(index, shardUpgradeResponse.version()); + } + } + } + Map updatedVersions = newHashMap(); + MetaData metaData = clusterState.metaData(); + for (Map.Entry versionEntry : versions.entrySet()) { + String index = versionEntry.getKey(); + Integer primaryCount = successfulPrimaryShards.get(index); + int expectedPrimaryCount = metaData.index(index).getNumberOfShards(); + if (primaryCount == metaData.index(index).getNumberOfShards()) { + updatedVersions.put(index, versionEntry.getValue().toString()); + } else { + logger.warn("Not updating settings for the index [{}] because upgraded of some primary shards failed - expected[{}], received[{}]", index, + expectedPrimaryCount, primaryCount == null ? 0 : primaryCount); + } + } + + return new UpgradeResponse(updatedVersions, shardsResponses.length(), successfulShards, failedShards, shardFailures); + } + + @Override + protected ShardUpgradeRequest newShardRequest(int numShards, ShardRouting shard, UpgradeRequest request) { + return new ShardUpgradeRequest(shard.shardId(), request); + } + + @Override + protected ShardUpgradeResponse newShardResponse() { + return new ShardUpgradeResponse(); + } + + @Override + protected ShardUpgradeResponse shardOperation(ShardUpgradeRequest request) { + IndexShard indexShard = indicesService.indexServiceSafe(request.shardId().getIndex()).shardSafe(request.shardId().id()); + org.apache.lucene.util.Version version = indexShard.upgrade(request.upgradeRequest()); + return new ShardUpgradeResponse(request.shardId(), indexShard.routingEntry().primary(), version); + } + + /** + * The upgrade request works against *all* shards. + */ + @Override + protected GroupShardsIterator shards(ClusterState clusterState, UpgradeRequest request, String[] concreteIndices) { + GroupShardsIterator iterator = clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true); + Set indicesWithMissingPrimaries = indicesWithMissingPrimaries(clusterState, concreteIndices); + if (indicesWithMissingPrimaries.isEmpty()) { + return iterator; + } + // If some primary shards are not available the request should fail. + throw new PrimaryMissingActionException("Cannot upgrade indices because the following indices are missing primary shards " + indicesWithMissingPrimaries); + } + + /** + * Finds all indices that have not all primaries available + */ + private Set indicesWithMissingPrimaries(ClusterState clusterState, String[] concreteIndices) { + Set indices = newHashSet(); + RoutingTable routingTable = clusterState.routingTable(); + for (String index : concreteIndices) { + IndexRoutingTable indexRoutingTable = routingTable.index(index); + if (indexRoutingTable.allPrimaryShardsActive() == false) { + indices.add(index); + } + } + return indices; + } + + @Override + protected ClusterBlockException checkGlobalBlock(ClusterState state, UpgradeRequest request) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected ClusterBlockException checkRequestBlock(ClusterState state, UpgradeRequest request, String[] concreteIndices) { + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, concreteIndices); + } + + @Override + protected void doExecute(UpgradeRequest request, final ActionListener listener) { + ActionListener settingsUpdateListener = new ActionListener() { + @Override + public void onResponse(UpgradeResponse upgradeResponse) { + try { + if (upgradeResponse.versions().isEmpty()) { + listener.onResponse(upgradeResponse); + } else { + updateSettings(upgradeResponse, listener); + } + } catch (Throwable t) { + listener.onFailure(t); + } + } + + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); + } + }; + super.doExecute(request, settingsUpdateListener); + } + + private void updateSettings(final UpgradeResponse upgradeResponse, final ActionListener listener) { + UpgradeSettingsRequest upgradeSettingsRequest = new UpgradeSettingsRequest(upgradeResponse.versions()); + upgradeSettingsAction.execute(upgradeSettingsRequest, new ActionListener() { + @Override + public void onResponse(UpgradeSettingsResponse updateSettingsResponse) { + listener.onResponse(upgradeResponse); + } + + @Override + public void onFailure(Throwable e) { + listener.onFailure(e); + } + }); + } + +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java new file mode 100644 index 00000000000..26c3731697d --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +/** + * + */ +public class TransportUpgradeSettingsAction extends TransportMasterNodeAction { + + private final MetaDataUpdateSettingsService updateSettingsService; + + @Inject + public TransportUpgradeSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, + MetaDataUpdateSettingsService updateSettingsService, ActionFilters actionFilters) { + super(settings, UpgradeSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, UpgradeSettingsRequest.class); + this.updateSettingsService = updateSettingsService; + } + + @Override + protected String executor() { + // we go async right away.... + return ThreadPool.Names.SAME; + } + + @Override + protected ClusterBlockException checkBlock(UpgradeSettingsRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected UpgradeSettingsResponse newResponse() { + return new UpgradeSettingsResponse(); + } + + @Override + protected void masterOperation(final UpgradeSettingsRequest request, final ClusterState state, final ActionListener listener) { + UpgradeSettingsClusterStateUpdateRequest clusterStateUpdateRequest = new UpgradeSettingsClusterStateUpdateRequest() + .ackTimeout(request.timeout()) + .versions(request.versions()) + .masterNodeTimeout(request.masterNodeTimeout()); + + updateSettingsService.upgradeIndexSettings(clusterStateUpdateRequest, new ActionListener() { + @Override + public void onResponse(ClusterStateUpdateResponse response) { + listener.onResponse(new UpgradeSettingsResponse(response.isAcknowledged())); + } + + @Override + public void onFailure(Throwable t) { + logger.debug("failed to upgrade minimum compatibility version settings on indices [{}]", t, request.versions().keySet()); + listener.onFailure(t); + } + }); + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java new file mode 100644 index 00000000000..908a8a0d283 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Upgrade index/indices action. + */ +public class UpgradeAction extends Action { + + public static final UpgradeAction INSTANCE = new UpgradeAction(); + public static final String NAME = "indices:admin/upgrade"; + + private UpgradeAction() { + super(NAME); + } + + @Override + public UpgradeResponse newResponse() { + return new UpgradeResponse(); + } + + @Override + public UpgradeRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new UpgradeRequestBuilder(client, this); + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeRequest.java new file mode 100644 index 00000000000..af328ce21ad --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeRequest.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.support.broadcast.BroadcastRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * A request to upgrade one or more indices. In order to optimize on all the indices, pass an empty array or + * null for the indices. + *

    + * @see org.elasticsearch.client.Requests#upgradeRequest(String...) + * @see org.elasticsearch.client.IndicesAdminClient#upgrade(UpgradeRequest) + * @see UpgradeResponse + */ +public class UpgradeRequest extends BroadcastRequest { + + public static final class Defaults { + public static final boolean UPGRADE_ONLY_ANCIENT_SEGMENTS = false; + } + + private boolean upgradeOnlyAncientSegments = Defaults.UPGRADE_ONLY_ANCIENT_SEGMENTS; + + /** + * Constructs an optimization request over one or more indices. + * + * @param indices The indices to optimize, no indices passed means all indices will be optimized. + */ + public UpgradeRequest(String... indices) { + super(indices); + } + + public UpgradeRequest() { + + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + upgradeOnlyAncientSegments = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(upgradeOnlyAncientSegments); + } + + /** + * Should the upgrade only the ancient (older major version of Lucene) segments? + * Defaults to false. + */ + public boolean upgradeOnlyAncientSegments() { + return upgradeOnlyAncientSegments; + } + + /** + * See {@link #upgradeOnlyAncientSegments()} + */ + public UpgradeRequest upgradeOnlyAncientSegments(boolean upgradeOnlyAncientSegments) { + this.upgradeOnlyAncientSegments = upgradeOnlyAncientSegments; + return this; + } + + @Override + public String toString() { + return "UpgradeRequest{" + + "upgradeOnlyAncientSegments=" + upgradeOnlyAncientSegments + + '}'; + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeRequestBuilder.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeRequestBuilder.java new file mode 100644 index 00000000000..adc8ea5510a --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeRequestBuilder.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * A request to upgrade one or more indices. In order to optimize on all the indices, pass an empty array or + * null for the indices. + */ +public class UpgradeRequestBuilder extends BroadcastOperationRequestBuilder { + + public UpgradeRequestBuilder(ElasticsearchClient client, UpgradeAction action) { + super(client, action, new UpgradeRequest()); + } + + /** + * Should the upgrade only the ancient (older major version of Lucene) segments? + */ + public UpgradeRequestBuilder setUpgradeOnlyAncientSegments(boolean upgradeOnlyAncientSegments) { + request.upgradeOnlyAncientSegments(upgradeOnlyAncientSegments); + return this; + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeResponse.java new file mode 100644 index 00000000000..04e377dd75d --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeResponse.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static com.google.common.collect.Maps.newHashMap; + +/** + * A response for optimize action. + * + * + */ +public class UpgradeResponse extends BroadcastResponse { + + private Map versions; + + UpgradeResponse() { + + } + + UpgradeResponse(Map versions, int totalShards, int successfulShards, int failedShards, List shardFailures) { + super(totalShards, successfulShards, failedShards, shardFailures); + this.versions = versions; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + int size = in.readVInt(); + versions = newHashMap(); + for (int i=0; i entry : versions.entrySet()) { + out.writeString(entry.getKey()); + out.writeString(entry.getValue()); + } + } + + public Map versions() { + return versions; + } +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java new file mode 100644 index 00000000000..5257b50132d --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +/** + */ +public class UpgradeSettingsAction extends Action { + + public static final UpgradeSettingsAction INSTANCE = new UpgradeSettingsAction(); + public static final String NAME = "internal:indices/admin/upgrade"; + + private UpgradeSettingsAction() { + super(NAME); + } + + @Override + public UpgradeSettingsResponse newResponse() { + return new UpgradeSettingsResponse(); + } + + @Override + public UpgradeSettingsRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new UpgradeSettingsRequestBuilder(client, this); + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsClusterStateUpdateRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsClusterStateUpdateRequest.java new file mode 100644 index 00000000000..7067f2f61ec --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsClusterStateUpdateRequest.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest; + +import java.util.Map; + +/** + * Cluster state update request that allows to change minimum compatibility settings for some indices + */ +public class UpgradeSettingsClusterStateUpdateRequest extends ClusterStateUpdateRequest { + + private Map versions; + + public UpgradeSettingsClusterStateUpdateRequest() { + + } + + /** + * Returns the index to version map for indices that should be updated + */ + public Map versions() { + return versions; + } + + /** + * Sets the index to version map for indices that should be updated + */ + public UpgradeSettingsClusterStateUpdateRequest versions(Map versions) { + this.versions = versions; + return this; + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequest.java new file mode 100644 index 00000000000..b191fa53539 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequest.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Map; + +import static com.google.common.collect.Maps.newHashMap; +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Request for an update index settings action + */ +public class UpgradeSettingsRequest extends AcknowledgedRequest { + + + private Map versions; + + UpgradeSettingsRequest() { + } + + /** + * Constructs a new request to update minimum compatible version settings for one or more indices + */ + public UpgradeSettingsRequest(Map versions) { + this.versions = versions; + } + + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (versions.isEmpty()) { + validationException = addValidationError("no indices to update", validationException); + } + return validationException; + } + + + Map versions() { + return versions; + } + + /** + * Sets the index versions to be updated + */ + public UpgradeSettingsRequest versions(Map versions) { + this.versions = versions; + return this; + } + + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + int size = in.readVInt(); + versions = newHashMap(); + for (int i=0; i entry : versions.entrySet()) { + out.writeString(entry.getKey()); + out.writeString(entry.getValue()); + } + writeTimeout(out); + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java new file mode 100644 index 00000000000..74c42a5fe80 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +import java.util.Map; + +/** + * Builder for an update index settings request + */ +public class UpgradeSettingsRequestBuilder extends AcknowledgedRequestBuilder { + + public UpgradeSettingsRequestBuilder(ElasticsearchClient client, UpgradeSettingsAction action) { + super(client, action, new UpgradeSettingsRequest()); + } + + /** + * Sets the index versions to be updated + */ + public UpgradeSettingsRequestBuilder setVersions(Map versions) { + request.versions(versions); + return this; + } +} diff --git a/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsResponse.java new file mode 100644 index 00000000000..0918af6f418 --- /dev/null +++ b/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsResponse.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.upgrade.post; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * A response for an update index settings action + */ +public class UpgradeSettingsResponse extends AcknowledgedResponse { + + UpgradeSettingsResponse() { + } + + UpgradeSettingsResponse(boolean acknowledged) { + super(acknowledged); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } +} diff --git a/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index c54aaece7f4..ae16d7b36d2 100644 --- a/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -96,6 +96,12 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResp import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequest; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequestBuilder; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequestBuilder; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; @@ -406,6 +412,53 @@ public interface IndicesAdminClient extends ElasticsearchClient { */ OptimizeRequestBuilder prepareOptimize(String... indices); + + /** + * Explicitly upgrade one or more indices + * + * @param request The upgrade request + * @return A result future + * @see org.elasticsearch.client.Requests#upgradeRequest(String...) + */ + ActionFuture upgrade(UpgradeRequest request); + + /** + * Explicitly upgrade one or more indices + * + * @param request The upgrade request + * @param listener A listener to be notified with a result + * @see org.elasticsearch.client.Requests#upgradeRequest(String...) + */ + void upgrade(UpgradeRequest request, ActionListener listener); + + /** + * Explicitly upgrade one or more indices + */ + UpgradeStatusRequestBuilder prepareUpgradeStatus(String... indices); + + /** + * Check upgrade status of one or more indices + * + * @param request The upgrade request + * @return A result future + * @see org.elasticsearch.client.Requests#upgradeRequest(String...) + */ + ActionFuture upgradeStatus(UpgradeStatusRequest request); + + /** + * Check upgrade status of one or more indices + * + * @param request The upgrade request + * @param listener A listener to be notified with a result + * @see org.elasticsearch.client.Requests#upgradeRequest(String...) + */ + void upgradeStatus(UpgradeStatusRequest request, ActionListener listener); + + /** + * Check upgrade status of one or more indices + */ + UpgradeRequestBuilder prepareUpgrade(String... indices); + /** * Get the complete mappings of one or more types */ diff --git a/src/main/java/org/elasticsearch/client/Requests.java b/src/main/java/org/elasticsearch/client/Requests.java index bc2a778f570..8a70c18b374 100644 --- a/src/main/java/org/elasticsearch/client/Requests.java +++ b/src/main/java/org/elasticsearch/client/Requests.java @@ -49,6 +49,7 @@ import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.count.CountRequest; import org.elasticsearch.action.delete.DeleteRequest; @@ -291,6 +292,17 @@ public class Requests { return new OptimizeRequest(indices); } + /** + * Creates an upgrade request. + * + * @param indices The indices to upgrade. Use null or _all to execute against all indices + * @return The upgrade request + * @see org.elasticsearch.client.IndicesAdminClient#upgrade(UpgradeRequest) + */ + public static UpgradeRequest upgradeRequest(String... indices) { + return new UpgradeRequest(indices); + } + /** * Creates a clean indices cache request. * diff --git a/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 75e8ada560b..625a469470d 100644 --- a/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -196,6 +196,14 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateActio import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusAction; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequest; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequestBuilder; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequestBuilder; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; @@ -1415,6 +1423,36 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new OptimizeRequestBuilder(this, OptimizeAction.INSTANCE).setIndices(indices); } + @Override + public ActionFuture upgrade(final UpgradeRequest request) { + return execute(UpgradeAction.INSTANCE, request); + } + + @Override + public void upgrade(final UpgradeRequest request, final ActionListener listener) { + execute(UpgradeAction.INSTANCE, request, listener); + } + + @Override + public UpgradeRequestBuilder prepareUpgrade(String... indices) { + return new UpgradeRequestBuilder(this, UpgradeAction.INSTANCE).setIndices(indices); + } + + + @Override + public ActionFuture upgradeStatus(final UpgradeStatusRequest request) { + return execute(UpgradeStatusAction.INSTANCE, request); + } + + @Override + public void upgradeStatus(final UpgradeStatusRequest request, final ActionListener listener) { + execute(UpgradeStatusAction.INSTANCE, request, listener); + } + + @Override + public UpgradeStatusRequestBuilder prepareUpgradeStatus(String... indices) { + return new UpgradeStatusRequestBuilder(this, UpgradeStatusAction.INSTANCE).setIndices(indices); + } @Override public ActionFuture refresh(final RefreshRequest request) { return execute(RefreshAction.INSTANCE, request); diff --git a/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index c9c7bbabb00..07703bca591 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -49,6 +49,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import java.io.IOException; +import java.text.ParseException; import java.util.EnumSet; import java.util.HashMap; import java.util.Locale; @@ -159,6 +160,7 @@ public class IndexMetaData implements Diffable { public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; public static final String SETTING_VERSION_CREATED = "index.version.created"; + public static final String SETTING_VERSION_UPGRADED = "index.version.upgraded"; public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible"; public static final String SETTING_CREATION_DATE = "index.creation_date"; public static final String SETTING_UUID = "index.uuid"; @@ -192,7 +194,8 @@ public class IndexMetaData implements Diffable { private final DiscoveryNodeFilters excludeFilters; private final Version indexCreatedVersion; - private final Version indexMinimumCompatibleVersion; + private final Version indexUpgradedVersion; + private final org.apache.lucene.util.Version minimumCompatibleLuceneVersion; private final HashFunction routingHashFunction; private final boolean useTypeForRouting; @@ -227,7 +230,17 @@ public class IndexMetaData implements Diffable { excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); } indexCreatedVersion = Version.indexCreated(settings); - indexMinimumCompatibleVersion = settings.getAsVersion(SETTING_VERSION_MINIMUM_COMPATIBLE, indexCreatedVersion); + indexUpgradedVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_UPGRADED, indexCreatedVersion); + String stringLuceneVersion = settings.get(SETTING_VERSION_MINIMUM_COMPATIBLE); + if (stringLuceneVersion != null) { + try { + this.minimumCompatibleLuceneVersion = org.apache.lucene.util.Version.parse(stringLuceneVersion); + } catch (ParseException ex) { + throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE +"] setting", ex); + } + } else { + this.minimumCompatibleLuceneVersion = null; + } final Class hashFunctionClass = settings.getAsClass(SETTING_LEGACY_ROUTING_HASH_FUNCTION, null); if (hashFunctionClass == null) { routingHashFunction = MURMUR3_HASH_FUNCTION; @@ -280,8 +293,6 @@ public class IndexMetaData implements Diffable { /** * Return the {@link Version} on which this index has been created. This * information is typically useful for backward compatibility. - * - * Returns null if the index was created before 0.19.0.RC1. */ public Version creationVersion() { return indexCreatedVersion; @@ -292,17 +303,22 @@ public class IndexMetaData implements Diffable { } /** - * Return the {@link Version} of that created the oldest segment in the index. - * - * If the index was created before v1.6 and didn't go through upgrade API the creation verion is returned. - * Returns null if the index was created before 0.19.0.RC1. + * Return the {@link Version} on which this index has been upgraded. This + * information is typically useful for backward compatibility. */ - public Version minimumCompatibleVersion() { - return indexMinimumCompatibleVersion; + public Version upgradeVersion() { + return indexUpgradedVersion; } - public Version getMinimumCompatibleVersion() { - return minimumCompatibleVersion(); + public Version getUpgradeVersion() { + return upgradeVersion(); + } + + /** + * Return the {@link org.apache.lucene.util.Version} of the oldest lucene segment in the index + */ + public org.apache.lucene.util.Version getMinimumCompatibleVersion() { + return minimumCompatibleLuceneVersion; } /** diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index 5265cfabd5d..ea9f7f7a611 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.settings.Settings; /** * This service is responsible for upgrading legacy index metadata to the current version - * + *

    * Every time an existing index is introduced into cluster this service should be used * to upgrade the existing index metadata to the latest version of the cluster. It typically * occurs during cluster upgrade, when dangling indices are imported into the cluster or indices @@ -64,7 +64,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { pre20HashFunction = DjbHashFunction.class; } pre20UseType = settings.getAsBoolean(DEPRECATED_SETTING_ROUTING_USE_TYPE, null); - if (hasCustomPre20HashFunction|| pre20UseType != null) { + if (hasCustomPre20HashFunction || pre20UseType != null) { logger.warn("Settings [{}] and [{}] are deprecated. Index settings from your old indices have been updated to record the fact that they " + "used some custom routing logic, you can now remove these settings from your `elasticsearch.yml` file", DEPRECATED_SETTING_ROUTING_HASH_FUNCTION, DEPRECATED_SETTING_ROUTING_USE_TYPE); } @@ -72,7 +72,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { /** * Checks that the index can be upgraded to the current version of the master node. - * + *

    * If the index does need upgrade it returns the index metadata unchanged, otherwise it returns a modified index metadata. If index cannot be * updated the method throws an exception. */ @@ -101,8 +101,16 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { * Returns true if this index can be supported by the current version of elasticsearch */ private static boolean isSupportedVersion(IndexMetaData indexMetaData) { - return indexMetaData.minimumCompatibleVersion() != null && - indexMetaData.minimumCompatibleVersion().luceneVersion.onOrAfter(Version.V_0_90_0_Beta1.luceneVersion); + if (indexMetaData.creationVersion().onOrAfter(Version.V_0_90_0_Beta1)) { + // The index was created with elasticsearch that was using Lucene 4.0 + return true; + } + if (indexMetaData.getMinimumCompatibleVersion() != null && + indexMetaData.getMinimumCompatibleVersion().onOrAfter(org.apache.lucene.util.Version.LUCENE_4_0_0)) { + //The index was upgraded we can work with it + return true; + } + return false; } /** diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 4b5d793356d..2f40335116e 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -20,8 +20,10 @@ package org.elasticsearch.cluster.metadata; import com.google.common.collect.Sets; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -40,6 +42,8 @@ import org.elasticsearch.index.settings.IndexDynamicSettings; import java.util.*; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; + /** * Service responsible for submitting update index settings requests */ @@ -307,4 +311,37 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } }); } + + public void upgradeIndexSettings(final UpgradeSettingsClusterStateUpdateRequest request, final ActionListener listener) { + + + clusterService.submitStateUpdateTask("update-index-compatibility-versions", Priority.URGENT, new AckedClusterStateUpdateTask(request, listener) { + + @Override + protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { + return new ClusterStateUpdateResponse(acknowledged); + } + + @Override + public ClusterState execute(ClusterState currentState) { + MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData()); + for (Map.Entry entry : request.versions().entrySet()) { + String index = entry.getKey(); + IndexMetaData indexMetaData = metaDataBuilder.get(index); + if (indexMetaData != null) { + if (Version.CURRENT.equals(indexMetaData.creationVersion()) == false) { + // No reason to pollute the settings, we didn't really upgrade anything + metaDataBuilder.put(IndexMetaData.builder(indexMetaData) + .settings(settingsBuilder().put(indexMetaData.settings()) + .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, entry.getValue()) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT) + ) + ); + } + } + } + return ClusterState.builder(currentState).metaData(metaDataBuilder).build(); + } + }); + } } diff --git a/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/src/main/java/org/elasticsearch/index/shard/IndexShard.java index acac1c07794..e6222003651 100644 --- a/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -33,6 +33,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -717,8 +718,38 @@ public class IndexShard extends AbstractIndexShardComponent { if (logger.isTraceEnabled()) { logger.trace("optimize with {}", optimize); } - engine().forceMerge(optimize.flush(), optimize.maxNumSegments(), optimize.onlyExpungeDeletes(), - optimize.upgrade(), optimize.upgradeOnlyAncientSegments()); + engine().forceMerge(optimize.flush(), optimize.maxNumSegments(), optimize.onlyExpungeDeletes(), false, false); + } + + /** + * Upgrades the shard to the current version of Lucene and returns the minimum segment version + */ + public org.apache.lucene.util.Version upgrade(UpgradeRequest upgrade) { + verifyStarted(); + if (logger.isTraceEnabled()) { + logger.trace("upgrade with {}", upgrade); + } + org.apache.lucene.util.Version previousVersion = minimumCompatibleVersion(); + // we just want to upgrade the segments, not actually optimize to a single segment + engine().forceMerge(true, // we need to flush at the end to make sure the upgrade is durable + Integer.MAX_VALUE, // we just want to upgrade the segments, not actually optimize to a single segment + false, true, upgrade.upgradeOnlyAncientSegments()); + org.apache.lucene.util.Version version = minimumCompatibleVersion(); + if (logger.isTraceEnabled()) { + logger.trace("upgraded segment {} from version {} to version {}", previousVersion, version); + } + + return version; + } + + public org.apache.lucene.util.Version minimumCompatibleVersion() { + org.apache.lucene.util.Version luceneVersion = Version.LUCENE_3_EMULATION_VERSION; + for(Segment segment : engine().segments(false)) { + if (luceneVersion.onOrAfter(segment.getVersion())) { + luceneVersion = segment.getVersion(); + } + } + return luceneVersion; } public SnapshotIndexCommit snapshotIndex(boolean flushFirst) throws EngineException { diff --git a/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java b/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java index 6ea428bc31a..8c1b1c0458a 100644 --- a/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java +++ b/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java @@ -19,17 +19,14 @@ package org.elasticsearch.rest.action.admin.indices.upgrade; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest; -import org.elasticsearch.action.admin.indices.optimize.OptimizeResponse; -import org.elasticsearch.action.admin.indices.segments.*; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; +import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.index.engine.Segment; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; @@ -37,7 +34,8 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; -import java.io.IOException; + +import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -66,72 +64,36 @@ public class RestUpgradeAction extends BaseRestHandler { } } - void handleGet(RestRequest request, RestChannel channel, Client client) { - IndicesSegmentsRequest segsReq = new IndicesSegmentsRequest(Strings.splitStringByCommaToArray(request.param("index"))); - client.admin().indices().segments(segsReq, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(IndicesSegmentResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - - // TODO: getIndices().values() is what IndicesSegmentsResponse uses, but this will produce different orders with jdk8? - for (IndexSegments indexSegments : response.getIndices().values()) { - builder.startObject(indexSegments.getIndex()); - buildUpgradeStatus(indexSegments, builder); - builder.endObject(); - } - - builder.endObject(); - return new BytesRestResponse(OK, builder); - } - }); + void handleGet(final RestRequest request, RestChannel channel, Client client) { + client.admin().indices().prepareUpgradeStatus(Strings.splitStringByCommaToArray(request.param("index"))) + .execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(UpgradeStatusResponse response, XContentBuilder builder) throws Exception { + builder.startObject(); + response.toXContent(builder, request); + builder.endObject(); + return new BytesRestResponse(OK, builder); + } + }); } - + void handlePost(final RestRequest request, RestChannel channel, Client client) { - OptimizeRequest optimizeReq = new OptimizeRequest(Strings.splitStringByCommaToArray(request.param("index"))); - optimizeReq.flush(true); - optimizeReq.upgrade(true); - optimizeReq.upgradeOnlyAncientSegments(request.paramAsBoolean("only_ancient_segments", false)); - optimizeReq.maxNumSegments(Integer.MAX_VALUE); // we just want to upgrade the segments, not actually optimize to a single segment - client.admin().indices().optimize(optimizeReq, new RestBuilderListener(channel) { + UpgradeRequest upgradeReq = new UpgradeRequest(Strings.splitStringByCommaToArray(request.param("index"))); + upgradeReq.upgradeOnlyAncientSegments(request.paramAsBoolean("only_ancient_segments", false)); + client.admin().indices().upgrade(upgradeReq, new RestBuilderListener(channel) { @Override - public RestResponse buildResponse(OptimizeResponse response, XContentBuilder builder) throws Exception { + public RestResponse buildResponse(UpgradeResponse response, XContentBuilder builder) throws Exception { builder.startObject(); buildBroadcastShardsHeader(builder, request, response); + builder.startArray("upgraded_indices"); + for (Map.Entry entry : response.versions().entrySet()) { + builder.field(entry.getKey(), entry.getValue(), XContentBuilder.FieldCaseConversion.NONE); + } + builder.endObject(); builder.endObject(); return new BytesRestResponse(OK, builder); } }); } - - void buildUpgradeStatus(IndexSegments indexSegments, XContentBuilder builder) throws IOException { - long total_bytes = 0; - long to_upgrade_bytes = 0; - long to_upgrade_bytes_ancient = 0; - for (IndexShardSegments shard : indexSegments) { - for (ShardSegments segs : shard.getShards()) { - for (Segment seg : segs.getSegments()) { - total_bytes += seg.sizeInBytes; - if (seg.version.major != Version.CURRENT.luceneVersion.major) { - to_upgrade_bytes_ancient += seg.sizeInBytes; - to_upgrade_bytes += seg.sizeInBytes; - } else if (seg.version.minor != Version.CURRENT.luceneVersion.minor) { - // TODO: this comparison is bogus! it would cause us to upgrade even with the same format - // instead, we should check if the codec has changed - to_upgrade_bytes += seg.sizeInBytes; - } - } - } - } - builder.byteSizeField(SIZE_IN_BYTES, SIZE, total_bytes); - builder.byteSizeField(SIZE_TO_UPGRADE_IN_BYTES, SIZE_TO_UPGRADE, to_upgrade_bytes); - builder.byteSizeField(SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, SIZE_TO_UPGRADE_ANCIENT, to_upgrade_bytes_ancient); - } - - static final XContentBuilderString SIZE = new XContentBuilderString("size"); - static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes"); - static final XContentBuilderString SIZE_TO_UPGRADE = new XContentBuilderString("size_to_upgrade"); - static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT = new XContentBuilderString("size_to_upgrade_ancient"); - static final XContentBuilderString SIZE_TO_UPGRADE_IN_BYTES = new XContentBuilderString("size_to_upgrade_in_bytes"); - static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT_IN_BYTES = new XContentBuilderString("size_to_upgrade_ancient_in_bytes"); } diff --git a/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 9aabab8580b..5f10e5217a6 100644 --- a/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -106,6 +106,8 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis .addAll(UNMODIFIABLE_SETTINGS) .add(SETTING_NUMBER_OF_REPLICAS) .add(SETTING_AUTO_EXPAND_REPLICAS) + .add(SETTING_VERSION_UPGRADED) + .add(SETTING_VERSION_MINIMUM_COMPATIBLE) .build(); private final ClusterService clusterService; diff --git a/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityTests.java b/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityTests.java index 32fef09363e..ac59615f902 100644 --- a/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityTests.java +++ b/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityTests.java @@ -54,7 +54,6 @@ import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.test.index.merge.NoMergePolicyProvider; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.Before; @@ -67,8 +66,9 @@ import java.nio.file.attribute.BasicFileAttributes; import java.util.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.junit.matchers.JUnitMatchers.containsString; // needs at least 2 nodes since it bumps replicas to 1 @ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numDataNodes = 0) @@ -110,7 +110,6 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio @Override public Settings nodeSettings(int ord) { return Settings.builder() - .put(Node.HTTP_ENABLED, true) // for _upgrade .put(MergePolicyModule.MERGE_POLICY_TYPE_KEY, NoMergePolicyProvider.class) // disable merging so no segments will be upgraded .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 30) // increase recovery speed for small files .build(); @@ -438,13 +437,11 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio } void assertUpgradeWorks(String indexName, boolean alreadyLatest) throws Exception { - HttpRequestBuilder httpClient = httpClient(); - if (alreadyLatest == false) { - UpgradeTest.assertNotUpgraded(httpClient, indexName); + UpgradeTest.assertNotUpgraded(client(), indexName); } - UpgradeTest.runUpgrade(httpClient, indexName); - UpgradeTest.assertUpgraded(httpClient, indexName); + assertNoFailures(client().admin().indices().prepareUpgrade(indexName).get()); + UpgradeTest.assertUpgraded(client(), indexName); } } diff --git a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexTest.java b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexTest.java index 10416d44868..ce2d54bb30f 100644 --- a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexTest.java +++ b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexTest.java @@ -20,20 +20,22 @@ package org.elasticsearch.rest.action.admin.indices.upgrade; import org.elasticsearch.bwcompat.StaticIndexBackwardCompatibilityTest; -import org.elasticsearch.node.Node; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; public class UpgradeReallyOldIndexTest extends StaticIndexBackwardCompatibilityTest { public void testUpgrade_0_90_6() throws Exception { String indexName = "index-0.90.6"; - loadIndex(indexName, Node.HTTP_ENABLED, true); - - UpgradeTest.assertNotUpgraded(httpClient(), indexName); - assertTrue(UpgradeTest.hasAncientSegments(httpClient(), indexName)); - UpgradeTest.runUpgrade(httpClient(), indexName, "wait_for_completion", "true", "only_ancient_segments", "true"); - assertFalse(UpgradeTest.hasAncientSegments(httpClient(), "index-0.90.6")); + + loadIndex(indexName); + UpgradeTest.assertNotUpgraded(client(), indexName); + assertTrue(UpgradeTest.hasAncientSegments(client(), indexName)); + assertNoFailures(client().admin().indices().prepareUpgrade(indexName).setUpgradeOnlyAncientSegments(true).get()); + + assertFalse(UpgradeTest.hasAncientSegments(client(), "index-0.90.6")); // This index has only ancient segments, so it should now be fully upgraded: - UpgradeTest.assertUpgraded(httpClient(), indexName); + UpgradeTest.assertUpgraded(client(), indexName); } } diff --git a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java index 3ea5747899c..ddf4bbe2057 100644 --- a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java +++ b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java @@ -26,26 +26,26 @@ import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; +import org.elasticsearch.action.admin.indices.upgrade.get.IndexUpgradeStatus; +import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Segment; -import org.elasticsearch.node.Node; import org.elasticsearch.test.ElasticsearchBackwardsCompatIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; -import org.elasticsearch.test.rest.client.http.HttpResponse; -import org.elasticsearch.test.rest.json.JsonPath; import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Collection; import java.util.List; -import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST) // test scope since we set cluster wide settings public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { @@ -134,20 +134,20 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { logger.info("--> Nodes upgrade complete"); logSegmentsState(); - assertNotUpgraded(httpClient(), null); + assertNotUpgraded(client(), null); final String indexToUpgrade = "test" + randomInt(numIndexes - 1); // This test fires up another node running an older version of ES, but because wire protocol changes across major ES versions, it // means we can never generate ancient segments in this test (unless Lucene major version bumps but ES major version does not): - assertFalse(hasAncientSegments(httpClient(), indexToUpgrade)); + assertFalse(hasAncientSegments(client(), indexToUpgrade)); logger.info("--> Running upgrade on index " + indexToUpgrade); - runUpgrade(httpClient(), indexToUpgrade); + assertNoFailures(client().admin().indices().prepareUpgrade(indexToUpgrade).get()); awaitBusy(new Predicate() { @Override public boolean apply(Object o) { try { - return isUpgraded(httpClient(), indexToUpgrade); + return isUpgraded(client(), indexToUpgrade); } catch (Exception e) { throw ExceptionsHelper.convertToRuntime(e); } @@ -156,48 +156,40 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { logger.info("--> Single index upgrade complete"); logger.info("--> Running upgrade on the rest of the indexes"); - runUpgrade(httpClient(), null); + assertNoFailures(client().admin().indices().prepareUpgrade().get()); logSegmentsState(); logger.info("--> Full upgrade complete"); - assertUpgraded(httpClient(), null); + assertUpgraded(client(), null); } - static String upgradePath(String index) { - String path = "/_upgrade"; - if (index != null) { - path = "/" + index + path; - } - return path; - } - - public static void assertNotUpgraded(HttpRequestBuilder httpClient, String index) throws Exception { - for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { - assertTrue("index " + status.indexName + " should not be zero sized", status.totalBytes != 0); + public static void assertNotUpgraded(Client client, String index) throws Exception { + for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { + assertTrue("index " + status.getIndex() + " should not be zero sized", status.getTotalBytes() != 0); // TODO: it would be better for this to be strictly greater, but sometimes an extra flush // mysteriously happens after the second round of docs are indexed - assertTrue("index " + status.indexName + " should have recovered some segments from transaction log", - status.totalBytes >= status.toUpgradeBytes); - assertTrue("index " + status.indexName + " should need upgrading", status.toUpgradeBytes != 0); + assertTrue("index " + status.getIndex() + " should have recovered some segments from transaction log", + status.getTotalBytes() >= status.getToUpgradeBytes()); + assertTrue("index " + status.getIndex() + " should need upgrading", status.getToUpgradeBytes() != 0); } } - public static void assertNoAncientSegments(HttpRequestBuilder httpClient, String index) throws Exception { - for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { - assertTrue("index " + status.indexName + " should not be zero sized", status.totalBytes != 0); + public static void assertNoAncientSegments(Client client, String index) throws Exception { + for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { + assertTrue("index " + status.getIndex() + " should not be zero sized", status.getTotalBytes() != 0); // TODO: it would be better for this to be strictly greater, but sometimes an extra flush // mysteriously happens after the second round of docs are indexed - assertTrue("index " + status.indexName + " should not have any ancient segments", - status.toUpgradeBytesAncient == 0); - assertTrue("index " + status.indexName + " should have recovered some segments from transaction log", - status.totalBytes >= status.toUpgradeBytes); - assertTrue("index " + status.indexName + " should need upgrading", status.toUpgradeBytes != 0); + assertTrue("index " + status.getIndex() + " should not have any ancient segments", + status.getToUpgradeBytesAncient() == 0); + assertTrue("index " + status.getIndex() + " should have recovered some segments from transaction log", + status.getTotalBytes() >= status.getToUpgradeBytes()); + assertTrue("index " + status.getIndex() + " should need upgrading", status.getToUpgradeBytes() != 0); } } /** Returns true if there are any ancient segments. */ - public static boolean hasAncientSegments(HttpRequestBuilder httpClient, String index) throws Exception { - for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { - if (status.toUpgradeBytesAncient != 0) { + public static boolean hasAncientSegments(Client client, String index) throws Exception { + for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { + if (status.getToUpgradeBytesAncient() != 0) { return true; } } @@ -205,20 +197,20 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { } /** Returns true if there are any old but not ancient segments. */ - public static boolean hasOldButNotAncientSegments(HttpRequestBuilder httpClient, String index) throws Exception { - for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { - if (status.toUpgradeBytes > status.toUpgradeBytesAncient) { + public static boolean hasOldButNotAncientSegments(Client client, String index) throws Exception { + for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { + if (status.getToUpgradeBytes() > status.getToUpgradeBytesAncient()) { return true; } } return false; } - public static void assertUpgraded(HttpRequestBuilder httpClient, String index) throws Exception { - for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { - assertTrue("index " + status.indexName + " should not be zero sized", status.totalBytes != 0); - assertEquals("index " + status.indexName + " should be upgraded", - 0, status.toUpgradeBytes); + public static void assertUpgraded(Client client, String index) throws Exception { + for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { + assertTrue("index " + status.getIndex() + " should not be zero sized", status.getTotalBytes() != 0); + assertEquals("index " + status.getIndex() + " should be upgraded", + 0, status.getToUpgradeBytes()); } // double check using the segments api that all segments are actually upgraded @@ -242,12 +234,12 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { } } - static boolean isUpgraded(HttpRequestBuilder httpClient, String index) throws Exception { + static boolean isUpgraded(Client client, String index) throws Exception { ESLogger logger = Loggers.getLogger(UpgradeTest.class); int toUpgrade = 0; - for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { - logger.info("Index: " + status.indexName + ", total: " + status.totalBytes + ", toUpgrade: " + status.toUpgradeBytes); - toUpgrade += status.toUpgradeBytes; + for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { + logger.info("Index: " + status.getIndex() + ", total: " + status.getTotalBytes() + ", toUpgrade: " + status.getToUpgradeBytes()); + toUpgrade += status.getToUpgradeBytes(); } return toUpgrade == 0; } @@ -257,7 +249,7 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { public final int totalBytes; public final int toUpgradeBytes; public final int toUpgradeBytesAncient; - + public UpgradeStatus(String indexName, int totalBytes, int toUpgradeBytes, int toUpgradeBytesAncient) { this.indexName = indexName; this.totalBytes = totalBytes; @@ -266,49 +258,11 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { assert toUpgradeBytesAncient <= toUpgradeBytes; } } - - public static void runUpgrade(HttpRequestBuilder httpClient, String index, String... params) throws Exception { - assert params.length % 2 == 0; - HttpRequestBuilder builder = httpClient.method("POST").path(upgradePath(index)); - for (int i = 0; i < params.length; i += 2) { - builder.addParam(params[i], params[i + 1]); - } - HttpResponse rsp = builder.execute(); - assertNotNull(rsp); - assertEquals(200, rsp.getStatusCode()); - } @SuppressWarnings("unchecked") - static List getUpgradeStatus(HttpRequestBuilder httpClient, String path) throws Exception { - HttpResponse rsp = httpClient.method("GET").path(path).execute(); - Map data = validateAndParse(rsp); - List ret = new ArrayList<>(); - for (String index : data.keySet()) { - Map status = (Map)data.get(index); - assertTrue("missing key size_in_bytes for index " + index, status.containsKey("size_in_bytes")); - Object totalBytes = status.get("size_in_bytes"); - assertTrue("size_in_bytes for index " + index + " is not an integer", totalBytes instanceof Integer); - assertTrue("missing key size_to_upgrade_in_bytes for index " + index, status.containsKey("size_to_upgrade_in_bytes")); - Object toUpgradeBytes = status.get("size_to_upgrade_in_bytes"); - assertTrue("size_to_upgrade_in_bytes for index " + index + " is not an integer", toUpgradeBytes instanceof Integer); - Object toUpgradeBytesAncient = status.get("size_to_upgrade_ancient_in_bytes"); - assertTrue("size_to_upgrade_ancient_in_bytes for index " + index + " is not an integer", toUpgradeBytesAncient instanceof Integer); - ret.add(new UpgradeStatus(index, (Integer) totalBytes, (Integer) toUpgradeBytes, (Integer) toUpgradeBytesAncient)); - } - return ret; - } - - @SuppressWarnings("unchecked") - static Map validateAndParse(HttpResponse rsp) throws Exception { - assertNotNull(rsp); - assertEquals(200, rsp.getStatusCode()); - assertTrue(rsp.hasBody()); - return (Map)new JsonPath(rsp.getBody()).evaluate(""); - } - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(Node.HTTP_ENABLED, true).build(); + static Collection getUpgradeStatus(Client client, String... indices) throws Exception { + UpgradeStatusResponse upgradeStatusResponse = client.admin().indices().prepareUpgradeStatus(indices).get(); + assertNoFailures(upgradeStatusResponse); + return upgradeStatusResponse.getIndices().values(); } } From f732900111ec310900b3f8d92ff76c77c19e2898 Mon Sep 17 00:00:00 2001 From: jaymode Date: Thu, 28 May 2015 11:56:09 -0400 Subject: [PATCH 051/123] Export hostname as environment variable for plugin manager In #9474, we exported the hostname in the bin/elasticsearch scripts so that it could be used as a variable in the elasticsearch.yml file but did not do the same for plugin manager. When using the hostname variable in elasticsearch.yml and trying to use the plugin manager, initialization will fail because the property could not be resolved. This change will allow the hostname to be resolved in the same manner as the service scripts. Closes #10902 --- bin/plugin | 2 ++ bin/plugin.bat | 2 ++ 2 files changed, 4 insertions(+) diff --git a/bin/plugin b/bin/plugin index 80d3e7d2906..c1b5a777042 100755 --- a/bin/plugin +++ b/bin/plugin @@ -103,4 +103,6 @@ if [ -e "$CONF_FILE" ]; then esac fi +export HOSTNAME=`hostname -s` + exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Xmx64m -Xms16m -Delasticsearch -Des.path.home="$ES_HOME" $properties -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginManager $args diff --git a/bin/plugin.bat b/bin/plugin.bat index 462c2d78a07..1addc161323 100644 --- a/bin/plugin.bat +++ b/bin/plugin.bat @@ -9,6 +9,8 @@ for %%I in ("%SCRIPT_DIR%..") do set ES_HOME=%%~dpfI TITLE Elasticsearch Plugin Manager ${project.version} +SET HOSTNAME=%COMPUTERNAME% + "%JAVA_HOME%\bin\java" %JAVA_OPTS% %ES_JAVA_OPTS% -Xmx64m -Xms16m -Des.path.home="%ES_HOME%" -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginManager" %* goto finally From d955461f580f0874863b1a97ddcaddbd96cbd6c6 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Thu, 28 May 2015 07:13:32 -1000 Subject: [PATCH 052/123] Tests: fix NPE in UpgradeTest --- .../rest/action/admin/indices/upgrade/UpgradeTest.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java index ddf4bbe2057..3b9f8c19b5a 100644 --- a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java +++ b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java @@ -134,7 +134,7 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { logger.info("--> Nodes upgrade complete"); logSegmentsState(); - assertNotUpgraded(client(), null); + assertNotUpgraded(client()); final String indexToUpgrade = "test" + randomInt(numIndexes - 1); // This test fires up another node running an older version of ES, but because wire protocol changes across major ES versions, it @@ -159,10 +159,10 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { assertNoFailures(client().admin().indices().prepareUpgrade().get()); logSegmentsState(); logger.info("--> Full upgrade complete"); - assertUpgraded(client(), null); + assertUpgraded(client()); } - public static void assertNotUpgraded(Client client, String index) throws Exception { + public static void assertNotUpgraded(Client client, String... index) throws Exception { for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { assertTrue("index " + status.getIndex() + " should not be zero sized", status.getTotalBytes() != 0); // TODO: it would be better for this to be strictly greater, but sometimes an extra flush @@ -173,7 +173,7 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { } } - public static void assertNoAncientSegments(Client client, String index) throws Exception { + public static void assertNoAncientSegments(Client client, String... index) throws Exception { for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { assertTrue("index " + status.getIndex() + " should not be zero sized", status.getTotalBytes() != 0); // TODO: it would be better for this to be strictly greater, but sometimes an extra flush @@ -206,7 +206,7 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { return false; } - public static void assertUpgraded(Client client, String index) throws Exception { + public static void assertUpgraded(Client client, String... index) throws Exception { for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { assertTrue("index " + status.getIndex() + " should not be zero sized", status.getTotalBytes() != 0); assertEquals("index " + status.getIndex() + " should be upgraded", From 3db9caf7a1647d3179e0fa0665d1bc19f3b9a827 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Thu, 28 May 2015 08:54:38 -1000 Subject: [PATCH 053/123] Tests: Increase timeout waiting for snapshot to complete in batchingShardUpdateTaskTest When this test picks a large number of shards, the snapshot doesn't always manage to complete in 10 seconds. --- .../snapshots/SharedClusterSnapshotRestoreTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java b/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java index d4651e87174..1e8d45d3699 100644 --- a/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java +++ b/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java @@ -1907,7 +1907,7 @@ public class SharedClusterSnapshotRestoreTests extends AbstractSnapshotTests { } assertThat(count, equalTo(expectedCount)); } - }); + }, 1, TimeUnit.MINUTES); } /** From 6980286ba46bb2f9e74494609c29c2ecbab62331 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Thu, 28 May 2015 09:19:13 -1000 Subject: [PATCH 054/123] Core: fix upgrade response serialization --- rest-api-spec/test/indices.upgrade/10_basic.yaml | 16 ++++++++++++++++ .../admin/indices/upgrade/RestUpgradeAction.java | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 rest-api-spec/test/indices.upgrade/10_basic.yaml diff --git a/rest-api-spec/test/indices.upgrade/10_basic.yaml b/rest-api-spec/test/indices.upgrade/10_basic.yaml new file mode 100644 index 00000000000..f4844839c41 --- /dev/null +++ b/rest-api-spec/test/indices.upgrade/10_basic.yaml @@ -0,0 +1,16 @@ +--- +"Basic test for upgrade indices": + + - do: + indices.create: + index: test_index + + - do: + cluster.health: + wait_for_status: yellow + + - do: + indices.upgrade: + index: test_index + + - match: {upgraded_indices.test_index: '/(\d\.)+\d/'} diff --git a/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java b/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java index 8c1b1c0458a..a1c9c0b3ed6 100644 --- a/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java +++ b/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java @@ -85,7 +85,7 @@ public class RestUpgradeAction extends BaseRestHandler { public RestResponse buildResponse(UpgradeResponse response, XContentBuilder builder) throws Exception { builder.startObject(); buildBroadcastShardsHeader(builder, request, response); - builder.startArray("upgraded_indices"); + builder.startObject("upgraded_indices"); for (Map.Entry entry : response.versions().entrySet()) { builder.field(entry.getKey(), entry.getValue(), XContentBuilder.FieldCaseConversion.NONE); } From 790baed7551ba005732d530b1a6ccaad253ebb4c Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 28 May 2015 10:00:36 -0600 Subject: [PATCH 055/123] Fall back to reading SegmentInfos from Store if reading from commit fails In the event that reading from the latest commit fails, we should fall back to reading from the `Store` using the traditional `Directory.listAll()` Related to #11361 --- .../java/org/elasticsearch/index/engine/Engine.java | 10 +++++++++- .../org/elasticsearch/index/engine/InternalEngine.java | 2 +- .../org/elasticsearch/index/engine/ShadowEngine.java | 4 ++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/engine/Engine.java b/src/main/java/org/elasticsearch/index/engine/Engine.java index e2811bb0698..41adf2b3b45 100644 --- a/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -319,11 +319,19 @@ public abstract class Engine implements Closeable { /** * Read the last segments info from the commit pointed to by the searcher manager */ - protected static SegmentInfos readLastCommittedSegmentInfos(SearcherManager sm) throws IOException { + protected static SegmentInfos readLastCommittedSegmentInfos(final SearcherManager sm, final Store store) throws IOException { IndexSearcher searcher = sm.acquire(); try { IndexCommit latestCommit = ((DirectoryReader) searcher.getIndexReader()).getIndexCommit(); return Lucene.readSegmentInfos(latestCommit); + } catch (IOException e) { + // Fall back to reading from the store if reading from the commit fails + try { + return store. readLastCommittedSegmentsInfo(); + } catch (IOException e2) { + e2.addSuppressed(e); + throw e2; + } } finally { sm.release(searcher); } diff --git a/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 393dff33907..42bab8ca7fe 100644 --- a/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -275,7 +275,7 @@ public class InternalEngine extends Engine { try { final DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter, true), shardId); searcherManager = new SearcherManager(directoryReader, searcherFactory); - lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); + lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store); success = true; return searcherManager; } catch (IOException e) { diff --git a/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java b/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java index 301f6176a00..95b3810d330 100644 --- a/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java +++ b/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java @@ -79,7 +79,7 @@ public class ShadowEngine extends Engine { if (Lucene.waitForIndex(store.directory(), nonexistentRetryTime)) { reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(store.directory()), shardId); this.searcherManager = new SearcherManager(reader, searcherFactory); - this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); + this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store); success = true; } else { throw new IndexShardException(shardId, "failed to open a shadow engine after" + @@ -148,7 +148,7 @@ public class ShadowEngine extends Engine { store.incRef(); try (ReleasableLock lock = readLock.acquire()) { // reread the last committed segment infos - lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); + lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store); } catch (Throwable e) { if (isClosed.get() == false) { logger.warn("failed to read latest segment infos on flush", e); From 503f844a05c02112c852cc7a6b4ee4b52c99497a Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Thu, 28 May 2015 10:51:18 -1000 Subject: [PATCH 056/123] Tests: make randomRepoPath work with bwc tests --- .../elasticsearch/test/ElasticsearchIntegrationTest.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index 0655588e35d..0e146d37ae1 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -1847,7 +1847,12 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase * Returns path to a random directory that can be used to create a temporary file system repo */ public Path randomRepoPath() { - return randomRepoPath(internalCluster().getDefaultSettings()); + if (currentCluster instanceof InternalTestCluster) { + return randomRepoPath(((InternalTestCluster) currentCluster).getDefaultSettings()); + } else if (currentCluster instanceof CompositeTestCluster) { + return randomRepoPath(((CompositeTestCluster) currentCluster).internalCluster().getDefaultSettings()); + } + throw new UnsupportedOperationException("unsupported cluster type"); } /** From c695f35bcabc57223ef8480aa2d1b24f14310f33 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Thu, 28 May 2015 12:08:28 -1000 Subject: [PATCH 057/123] Tests: make REST upgrade test more reliable Make sure that all shards are started to avoid flush conflicts. --- rest-api-spec/test/indices.upgrade/10_basic.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/test/indices.upgrade/10_basic.yaml b/rest-api-spec/test/indices.upgrade/10_basic.yaml index f4844839c41..d6a38b4c168 100644 --- a/rest-api-spec/test/indices.upgrade/10_basic.yaml +++ b/rest-api-spec/test/indices.upgrade/10_basic.yaml @@ -4,10 +4,15 @@ - do: indices.create: index: test_index + body: + settings: + index: + number_of_replicas: 0 + - do: cluster.health: - wait_for_status: yellow + wait_for_status: green - do: indices.upgrade: From 5600757f3eb505bf22162c1190914d86a57dda53 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Thu, 28 May 2015 17:09:43 +0200 Subject: [PATCH 058/123] Serialization: Remove old version checks As the 2.x release does not need to be backwards compatible in terms of serialization, we can remove a fair share of the serialization checks. --- .../cluster/health/ClusterHealthResponse.java | 9 ++---- .../hotthreads/NodesHotThreadsRequest.java | 12 ++------ .../restore/RestoreSnapshotRequest.java | 25 +++------------- .../get/GetIndexedScriptRequest.java | 30 ------------------- .../cluster/metadata/MappingMetaData.java | 14 ++------- .../org/elasticsearch/common/Priority.java | 4 --- .../zen/fd/MasterFaultDetection.java | 9 ------ .../percolator/stats/PercolateStats.java | 11 ------- .../AbstractInternalPercentiles.java | 10 ------- 9 files changed, 10 insertions(+), 114 deletions(-) diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java index 1e1a363d906..fc4567a5b53 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.health; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -229,9 +228,7 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable { private String snapshot; - private String repository; - private String[] indices = Strings.EMPTY_ARRAY; - private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen(); - private String renamePattern; - private String renameReplacement; - private boolean waitForCompletion; - private boolean includeGlobalState = true; - private boolean partial = false; - private boolean includeAliases = true; - private Settings settings = EMPTY_SETTINGS; - private Settings indexSettings = EMPTY_SETTINGS; - private String[] ignoreIndexSettings = Strings.EMPTY_ARRAY; RestoreSnapshotRequest() { @@ -638,10 +625,8 @@ public class RestoreSnapshotRequest extends MasterNodeRequest { out.writeOptionalString(timestamp().path()); out.writeString(timestamp().format()); out.writeOptionalString(timestamp().defaultTimestamp()); - // TODO Remove the test in elasticsearch 2.0.0 - if (out.getVersion().onOrAfter(Version.V_1_5_0)) { - out.writeOptionalBoolean(timestamp().ignoreMissing()); - } + out.writeOptionalBoolean(timestamp().ignoreMissing()); out.writeBoolean(hasParentField()); } @@ -619,10 +612,7 @@ public class MappingMetaData extends AbstractDiffable { String defaultTimestamp = in.readOptionalString(); Boolean ignoreMissing = null; - // TODO Remove the test in elasticsearch 2.0.0 - if (in.getVersion().onOrAfter(Version.V_1_5_0)) { - ignoreMissing = in.readOptionalBoolean(); - } + ignoreMissing = in.readOptionalBoolean(); final Timestamp timestamp = new Timestamp(enabled, path, format, defaultTimestamp, ignoreMissing); final boolean hasParentField = in.readBoolean(); diff --git a/src/main/java/org/elasticsearch/common/Priority.java b/src/main/java/org/elasticsearch/common/Priority.java index 658a7e5e9e2..19c2024555d 100644 --- a/src/main/java/org/elasticsearch/common/Priority.java +++ b/src/main/java/org/elasticsearch/common/Priority.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -35,9 +34,6 @@ public final class Priority implements Comparable { public static void writeTo(Priority priority, StreamOutput output) throws IOException { byte b = priority.value; - if (output.getVersion().before(Version.V_1_1_0)) { - b = (byte) Math.max(URGENT.value, b); - } output.writeByte(b); } diff --git a/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java index 1159f1c4e6a..9dfaef728b3 100644 --- a/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java +++ b/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.zen.fd; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -428,19 +427,11 @@ public class MasterFaultDetection extends FaultDetection { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - if (in.getVersion().onOrBefore(Version.V_1_4_0_Beta1)) { - // old listedOnMaster - in.readBoolean(); - } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getVersion().onOrBefore(Version.V_1_4_0_Beta1)) { - // old listedOnMaster - out.writeBoolean(true); - } } } } diff --git a/src/main/java/org/elasticsearch/index/percolator/stats/PercolateStats.java b/src/main/java/org/elasticsearch/index/percolator/stats/PercolateStats.java index e75813772de..4ae854a4c40 100644 --- a/src/main/java/org/elasticsearch/index/percolator/stats/PercolateStats.java +++ b/src/main/java/org/elasticsearch/index/percolator/stats/PercolateStats.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.index.percolator.stats; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -152,11 +151,6 @@ public class PercolateStats implements Streamable, ToXContent { percolateCount = in.readVLong(); percolateTimeInMillis = in.readVLong(); current = in.readVLong(); - if (in.getVersion().before(Version.V_1_1_0)) { - in.readVLong(); - } else { - in.readLong(); - } numQueries = in.readVLong(); } @@ -165,11 +159,6 @@ public class PercolateStats implements Streamable, ToXContent { out.writeVLong(percolateCount); out.writeVLong(percolateTimeInMillis); out.writeVLong(current); - if (out.getVersion().before(Version.V_1_1_0)) { - out.writeVLong(0); - } else { - out.writeLong(-1); - } out.writeVLong(numQueries); } } diff --git a/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractInternalPercentiles.java b/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractInternalPercentiles.java index 05bc0f95683..6d6de6b4346 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractInternalPercentiles.java +++ b/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractInternalPercentiles.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.metrics.percentiles; -import org.elasticsearch.Version; import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -79,12 +78,6 @@ abstract class AbstractInternalPercentiles extends InternalNumericMetricsAggrega @Override protected void doReadFrom(StreamInput in) throws IOException { valueFormatter = ValueFormatterStreams.readOptional(in); - if (in.getVersion().before(Version.V_1_2_0)) { - final byte id = in.readByte(); - if (id != 0) { - throw new IllegalArgumentException("Unexpected percentiles aggregator id [" + id + "]"); - } - } keys = new double[in.readInt()]; for (int i = 0; i < keys.length; ++i) { keys[i] = in.readDouble(); @@ -96,9 +89,6 @@ abstract class AbstractInternalPercentiles extends InternalNumericMetricsAggrega @Override protected void doWriteTo(StreamOutput out) throws IOException { ValueFormatterStreams.writeOptional(valueFormatter, out); - if (out.getVersion().before(Version.V_1_2_0)) { - out.writeByte((byte) 0); - } out.writeInt(keys.length); for (int i = 0 ; i < keys.length; ++i) { out.writeDouble(keys[i]); From 5cd6ced7eefaea10882ea510d9b1f473c7f45860 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 29 May 2015 09:44:41 +0200 Subject: [PATCH 059/123] Close ShardFilterCache after Store is closed The ShardFilterCache relies on the fact that it's closed once the last reader on the shard is closed. This is only guaranteed once the Store and all its references are closed. This commit moves the closing into the internal callback mechanism we use for deleting shard data etc. to close the cache once we have all searchers released. --- .../org/elasticsearch/index/IndexService.java | 25 ++++++++++++++----- .../index/cache/filter/ShardFilterCache.java | 11 ++++---- .../cache/filter/ShardFilterCacheModule.java | 8 +++++- .../elasticsearch/index/shard/IndexShard.java | 4 +-- .../test/InternalTestCluster.java | 9 +++---- 5 files changed, 37 insertions(+), 20 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/IndexService.java b/src/main/java/org/elasticsearch/index/IndexService.java index 6276e682123..09335126c73 100644 --- a/src/main/java/org/elasticsearch/index/IndexService.java +++ b/src/main/java/org/elasticsearch/index/IndexService.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCacheModule; +import org.elasticsearch.index.cache.filter.ShardFilterCache; import org.elasticsearch.index.cache.filter.ShardFilterCacheModule; import org.elasticsearch.index.cache.query.ShardQueryCacheModule; import org.elasticsearch.index.deletionpolicy.DeletionPolicyModule; @@ -69,6 +70,7 @@ import org.elasticsearch.index.translog.TranslogService; import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InternalIndicesLifecycle; +import org.elasticsearch.indices.cache.filter.IndicesFilterCache; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ShardsPluginsModule; @@ -298,11 +300,11 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone } indicesLifecycle.beforeIndexShardCreated(shardId, indexSettings); - logger.debug("creating shard_id {}", shardId); // if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary. final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false || (primary && IndexMetaData.isOnSharedFilesystem(indexSettings)); + final ShardFilterCache shardFilterCache = new ShardFilterCache(shardId, injector.getInstance(IndicesFilterCache.class)); ModulesBuilder modules = new ModulesBuilder(); modules.add(new ShardsPluginsModule(indexSettings, pluginsService)); modules.add(new IndexShardModule(shardId, primary, indexSettings)); @@ -310,11 +312,11 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone modules.add(new ShardSearchModule()); modules.add(new ShardGetModule()); modules.add(new StoreModule(injector.getInstance(IndexStore.class).shardDirectory(), lock, - new StoreCloseListener(shardId, canDeleteShardContent), path)); + new StoreCloseListener(shardId, canDeleteShardContent, shardFilterCache), path)); modules.add(new DeletionPolicyModule(indexSettings)); modules.add(new MergePolicyModule(indexSettings)); modules.add(new MergeSchedulerModule(indexSettings)); - modules.add(new ShardFilterCacheModule()); + modules.add(new ShardFilterCacheModule(shardFilterCache)); modules.add(new ShardQueryCacheModule()); modules.add(new ShardBitsetFilterCacheModule()); modules.add(new ShardFieldDataModule()); @@ -465,16 +467,27 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone private class StoreCloseListener implements Store.OnClose { private final ShardId shardId; private final boolean ownsShard; + private final Closeable[] toClose; - public StoreCloseListener(ShardId shardId, boolean ownsShard) { + public StoreCloseListener(ShardId shardId, boolean ownsShard, Closeable... toClose) { this.shardId = shardId; this.ownsShard = ownsShard; + this.toClose = toClose; } @Override public void handle(ShardLock lock) { - assert lock.getShardId().equals(shardId) : "shard id mismatch, expected: " + shardId + " but got: " + lock.getShardId(); - onShardClose(lock, ownsShard); + try { + assert lock.getShardId().equals(shardId) : "shard id mismatch, expected: " + shardId + " but got: " + lock.getShardId(); + onShardClose(lock, ownsShard); + } finally { + try { + IOUtils.close(toClose); + } catch (IOException ex) { + logger.debug("failed to close resource", ex); + } + } + } } diff --git a/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCache.java b/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCache.java index 97f75094580..550c25bdbf6 100644 --- a/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCache.java +++ b/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCache.java @@ -28,17 +28,18 @@ import org.elasticsearch.indices.cache.filter.IndicesFilterCache; import java.io.Closeable; import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.ReentrantReadWriteLock; /** */ -public class ShardFilterCache extends AbstractIndexShardComponent implements Closeable { - +public class ShardFilterCache implements Closeable { final IndicesFilterCache cache; + final ShardId shardId; - @Inject - public ShardFilterCache(ShardId shardId, @IndexSettings Settings indexSettings, IndicesFilterCache cache) { - super(shardId, indexSettings); + public ShardFilterCache(ShardId shardId, IndicesFilterCache cache) { this.cache = cache; + this.shardId = shardId; } public FilterCacheStats stats() { diff --git a/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java b/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java index 749fd8e392c..37bcb805768 100644 --- a/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java +++ b/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java @@ -25,8 +25,14 @@ import org.elasticsearch.common.inject.AbstractModule; */ public class ShardFilterCacheModule extends AbstractModule { + private final ShardFilterCache shardFilterCache; + + public ShardFilterCacheModule(ShardFilterCache shardFilterCache) { + this.shardFilterCache = shardFilterCache; + } + @Override protected void configure() { - bind(ShardFilterCache.class).asEagerSingleton(); + bind(ShardFilterCache.class).toInstance(shardFilterCache); } } diff --git a/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 07b4bf1f940..06400a3bf82 100644 --- a/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -161,7 +161,6 @@ public class IndexShard extends AbstractIndexShardComponent { private final SnapshotDeletionPolicy deletionPolicy; private final SimilarityService similarityService; private final MergePolicyProvider mergePolicyProvider; - private final BigArrays bigArrays; private final EngineConfig engineConfig; private final TranslogConfig translogConfig; @@ -212,7 +211,6 @@ public class IndexShard extends AbstractIndexShardComponent { this.deletionPolicy = deletionPolicy; this.similarityService = similarityService; this.mergePolicyProvider = mergePolicyProvider; - this.bigArrays = bigArrays; Preconditions.checkNotNull(store, "Store must be provided to the index shard"); Preconditions.checkNotNull(deletionPolicy, "Snapshot deletion policy must be provided to the index shard"); this.engineFactory = factory; @@ -794,7 +792,7 @@ public class IndexShard extends AbstractIndexShardComponent { engine.flushAndClose(); } } finally { // playing safe here and close the engine even if the above succeeds - close can be called multiple times - IOUtils.close(engine, shardFilterCache); + IOUtils.close(engine); } } } diff --git a/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/src/test/java/org/elasticsearch/test/InternalTestCluster.java index 079008ef8cb..285463a176b 100644 --- a/src/test/java/org/elasticsearch/test/InternalTestCluster.java +++ b/src/test/java/org/elasticsearch/test/InternalTestCluster.java @@ -801,12 +801,11 @@ public final class InternalTestCluster extends TestCluster { } void resetClient() throws IOException { - if (closed.get()) { - throw new RuntimeException("already closed"); + if (closed.get() == false) { + Releasables.close(nodeClient, transportClient); + nodeClient = null; + transportClient = null; } - Releasables.close(nodeClient, transportClient); - nodeClient = null; - transportClient = null; } void closeNode() { From e98b68a665c6c236dc5721c13b4df85f1da34f47 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 29 May 2015 10:29:28 +0200 Subject: [PATCH 060/123] Prevent changing the number of replicas on a closed index Setting the number of replicas on a closed index can leave the index in an unopenable state since we might not be able to recover a quorum. This commit simply prevents updating this setting on a closed index. Closes #9566 --- .../metadata/MetaDataUpdateSettingsService.java | 8 +++++++- .../indices/IndicesOptionsIntegrationTests.java | 2 +- .../indices/settings/UpdateSettingsTests.java | 11 +++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 2f40335116e..07e67f11e54 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -231,9 +231,15 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } } + if (closeIndices.size() > 0 && closeSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) { + throw new IllegalArgumentException(String.format(Locale.ROOT, + "Can't update [%s] on closed indices [%s] - can leave index in an unopenable state", IndexMetaData.SETTING_NUMBER_OF_REPLICAS, + closeIndices + )); + } if (!removedSettings.isEmpty() && !openIndices.isEmpty()) { throw new IllegalArgumentException(String.format(Locale.ROOT, - "Can't update non dynamic settings[%s] for open indices[%s]", + "Can't update non dynamic settings[%s] for open indices [%s]", removedSettings, openIndices )); diff --git a/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationTests.java b/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationTests.java index 253aad86a4a..baec5760b7b 100644 --- a/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationTests.java +++ b/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationTests.java @@ -773,7 +773,7 @@ public class IndicesOptionsIntegrationTests extends ElasticsearchIntegrationTest try { verify(client().admin().indices().prepareUpdateSettings("barbaz").setSettings(Settings.builder().put("e", "f")), false); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Can't update non dynamic settings[[index.e]] for open indices[[barbaz]]")); + assertThat(e.getMessage(), equalTo("Can't update non dynamic settings[[index.e]] for open indices [[barbaz]]")); } verify(client().admin().indices().prepareUpdateSettings("baz*").setSettings(Settings.builder().put("a", "b")), true); } diff --git a/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsTests.java b/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsTests.java index a1b2fde2955..e1ca345b68d 100644 --- a/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsTests.java +++ b/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsTests.java @@ -92,6 +92,17 @@ public class UpdateSettingsTests extends ElasticsearchIntegrationTest { client().admin().indices().prepareClose("test").execute().actionGet(); + try { + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.settingsBuilder() + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + ) + .execute().actionGet(); + fail("can't change number of replicas on a closed index"); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Can't update [index.number_of_replicas] on closed indices [[test]] - can leave index in an unopenable state"); + // expected + } client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", "1s") // this one can change From 521f804c7dcb3173efca2abbeabedb901b2bd2d7 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 2 Apr 2015 13:30:25 +0200 Subject: [PATCH 061/123] Mapping: add an assertion to verify consistent serialization We recently run into two issues where mapping weren't serialized in a consistent manner (#10302 and #10318). We rely on this consistency to do a byte level checl that mappings we get from the master are indentical to the one we have locally. Mistakes here can cause endless refresh mapping loops. This commit adds an assert that verifies this upon every update from the master. --- .../elasticsearch/index/mapper/MapperService.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 85a3aae5bd4..b63df2d6cc4 100755 --- a/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -267,6 +267,7 @@ public class MapperService extends AbstractIndexComponent { } } fieldDataService.onMappingUpdate(); + assert assertSerialization(oldMapper); return oldMapper; } else { List newObjectMappers = new ArrayList<>(); @@ -284,11 +285,25 @@ public class MapperService extends AbstractIndexComponent { typeListener.beforeCreate(mapper); } mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map(); + assert assertSerialization(mapper); return mapper; } } } + private boolean assertSerialization(DocumentMapper mapper) { + // capture the source now, it may change due to concurrent parsing + final CompressedString mappingSource = mapper.mappingSource(); + DocumentMapper newMapper = parse(mapper.type(), mappingSource, false); + + if (newMapper.mappingSource().equals(mappingSource) == false) { + throw new IllegalStateException("DocumentMapper serialization result is different from source. \n--> Source [" + + mappingSource + "]\n--> Result [" + + newMapper.mappingSource() + "]"); + } + return true; + } + protected void addObjectMappers(Collection objectMappers) { assert mappingLock.isWriteLockedByCurrentThread(); ImmutableOpenMap.Builder fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers); From a031232c484542c0a61362e650f9e61d120aa6af Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Fri, 29 May 2015 11:39:20 +0200 Subject: [PATCH 062/123] [doc] remove reference to seal, was removed in #11336 --- docs/reference/indices.asciidoc | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/reference/indices.asciidoc b/docs/reference/indices.asciidoc index 06a24821440..c9af8c12271 100644 --- a/docs/reference/indices.asciidoc +++ b/docs/reference/indices.asciidoc @@ -107,8 +107,6 @@ include::indices/refresh.asciidoc[] include::indices/optimize.asciidoc[] -include::indices/seal.asciidoc[] - include::indices/shadow-replicas.asciidoc[] include::indices/upgrade.asciidoc[] From c3c79290c6f99fb7ba956f5b733a4a89dff6ceaf Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 29 May 2015 11:59:27 +0200 Subject: [PATCH 063/123] Close lock even if we fail to obtain --- .../java/org/elasticsearch/common/lucene/Lucene.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/src/main/java/org/elasticsearch/common/lucene/Lucene.java index e3d787779c7..7154f83c71d 100644 --- a/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -189,8 +189,16 @@ public class Lucene { @SuppressForbidden(reason = "this method uses trappy Directory#makeLock API") public static Lock acquireLock(Directory directory, String lockName, long timeout) throws IOException { final Lock writeLock = directory.makeLock(lockName); - if (writeLock.obtain(timeout) == false) { - throw new LockObtainFailedException("failed to obtain lock: " + writeLock); + boolean success = false; + try { + if (writeLock.obtain(timeout) == false) { + throw new LockObtainFailedException("failed to obtain lock: " + writeLock); + } + success = true; + } finally { + if (success == false) { + writeLock.close(); + } } return writeLock; } From 08ee4a87b3b4cc7fff1204623014b359861628a7 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 21 May 2015 12:36:44 +0200 Subject: [PATCH 064/123] Internal: tighten up our compression framework. We have a compression framework that we use internally, mainly to compress some xcontent bytes. However it is quite lenient: for instance it relies on the assumption that detection of the compression format can only be called on either some compressed xcontent bytes or some raw xcontent bytes, but nothing checks this. By the way, we are misusing it in BinaryFieldMapper so that if someone indexes a binary field which happens to have the same header as a LZF stream, then at read time, we will try to decompress it. It also simplifies the API by removing block compression (only streaming) and some code duplication caused by some methods accepting a byte[] and other methods a BytesReference. --- .../TransportReplicationAction.java | 4 +- .../elasticsearch/cluster/ClusterState.java | 4 +- .../cluster/metadata/AliasMetaData.java | 22 ++-- .../cluster/metadata/IndexMetaData.java | 4 +- .../metadata/IndexTemplateMetaData.java | 24 ++-- .../cluster/metadata/MappingMetaData.java | 20 +-- .../metadata/MetaDataCreateIndexService.java | 8 +- .../metadata/MetaDataMappingService.java | 16 +-- .../common/bytes/PagedBytesReference.java | 13 +- .../common/compress/CompressedIndexInput.java | 6 +- .../compress/CompressedStreamInput.java | 13 +- .../compress/CompressedStreamOutput.java | 6 +- ...sedString.java => CompressedXContent.java} | 39 +++--- .../common/compress/Compressor.java | 24 ++-- .../common/compress/CompressorFactory.java | 68 +++++----- ...ntext.java => NotCompressedException.java} | 12 +- ...Context.java => NotXContentException.java} | 14 +- .../compress/lzf/LZFCompressedIndexInput.java | 4 +- .../lzf/LZFCompressedStreamInput.java | 4 +- .../lzf/LZFCompressedStreamOutput.java | 4 +- .../common/compress/lzf/LZFCompressor.java | 19 --- .../io/stream/InputStreamStreamInput.java | 10 ++ .../common/xcontent/XContentFactory.java | 121 ++++++++++-------- .../common/xcontent/XContentHelper.java | 75 +++-------- .../gateway/MetaDataStateFormat.java | 16 ++- .../index/aliases/IndexAlias.java | 8 +- .../index/aliases/IndexAliasesService.java | 8 +- .../index/mapper/DocumentMapper.java | 8 +- .../index/mapper/DocumentMapperParser.java | 8 +- .../index/mapper/MapperService.java | 8 +- .../index/mapper/core/BinaryFieldMapper.java | 15 ++- .../mapper/internal/SourceFieldMapper.java | 10 +- .../cluster/IndicesClusterStateService.java | 8 +- .../blobstore/BlobStoreRepository.java | 11 +- .../search/lookup/SourceLookup.java | 8 -- .../netty/MessageChannelHandler.java | 7 +- .../metadata/MappingMetaDataParserTests.java | 36 +++--- ...ests.java => CompressedXContentTests.java} | 34 ++--- .../common/xcontent/XContentFactoryTests.java | 2 +- .../aliases/IndexAliasesServiceTests.java | 7 +- .../fielddata/ParentChildFieldDataTests.java | 6 +- .../mapper/binary/BinaryMappingTests.java | 4 +- .../mapper/merge/TestMergeMapperTests.java | 6 +- .../mapper/multifield/MultiFieldTests.java | 2 +- .../source/CompressSourceMappingTests.java | 9 +- .../source/DefaultSourceMappingTests.java | 8 +- .../timestamp/TimestampMappingTests.java | 10 +- .../index/mapper/ttl/TTLMappingTests.java | 43 ++++--- .../mapper/update/UpdateMappingTests.java | 28 ++-- ...QueryParserFilterDateRangeFormatTests.java | 4 +- ...eryParserFilterDateRangeTimezoneTests.java | 4 +- .../query/SimpleIndexQueryParserTests.java | 4 +- .../search/child/AbstractChildTests.java | 6 +- .../template/SimpleIndexTemplateTests.java | 4 + .../bucket/nested/NestedAggregatorTest.java | 4 +- 55 files changed, 436 insertions(+), 434 deletions(-) rename src/main/java/org/elasticsearch/common/compress/{CompressedString.java => CompressedXContent.java} (74%) rename src/main/java/org/elasticsearch/common/compress/{CompressorContext.java => NotCompressedException.java} (73%) rename src/main/java/org/elasticsearch/common/compress/{lzf/LZFCompressorContext.java => NotXContentException.java} (69%) rename src/test/java/org/elasticsearch/common/compress/{CompressedStringTests.java => CompressedXContentTests.java} (72%) diff --git a/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 2e2a9e7abf3..d3f7a5b9356 100644 --- a/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -43,7 +43,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.settings.Settings; @@ -1080,7 +1080,7 @@ public abstract class TransportReplicationAction { builder.endObject(); builder.startObject("mappings"); - for (ObjectObjectCursor cursor1 : templateMetaData.mappings()) { + for (ObjectObjectCursor cursor1 : templateMetaData.mappings()) { byte[] mappingSource = cursor1.value.uncompressed(); XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource); Map mapping = parser.map(); diff --git a/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java index 0f7e55c8087..fb640eedc5a 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableSet; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -45,7 +45,7 @@ public class AliasMetaData extends AbstractDiffable { private final String alias; - private final CompressedString filter; + private final CompressedXContent filter; private final String indexRouting; @@ -53,7 +53,7 @@ public class AliasMetaData extends AbstractDiffable { private final Set searchRoutingValues; - private AliasMetaData(String alias, CompressedString filter, String indexRouting, String searchRouting) { + private AliasMetaData(String alias, CompressedXContent filter, String indexRouting, String searchRouting) { this.alias = alias; this.filter = filter; this.indexRouting = indexRouting; @@ -77,11 +77,11 @@ public class AliasMetaData extends AbstractDiffable { return alias(); } - public CompressedString filter() { + public CompressedXContent filter() { return filter; } - public CompressedString getFilter() { + public CompressedXContent getFilter() { return filter(); } @@ -176,9 +176,9 @@ public class AliasMetaData extends AbstractDiffable { @Override public AliasMetaData readFrom(StreamInput in) throws IOException { String alias = in.readString(); - CompressedString filter = null; + CompressedXContent filter = null; if (in.readBoolean()) { - filter = CompressedString.readCompressedString(in); + filter = CompressedXContent.readCompressedString(in); } String indexRouting = null; if (in.readBoolean()) { @@ -195,7 +195,7 @@ public class AliasMetaData extends AbstractDiffable { private final String alias; - private CompressedString filter; + private CompressedXContent filter; private String indexRouting; @@ -217,7 +217,7 @@ public class AliasMetaData extends AbstractDiffable { return alias; } - public Builder filter(CompressedString filter) { + public Builder filter(CompressedXContent filter) { this.filter = filter; return this; } @@ -244,7 +244,7 @@ public class AliasMetaData extends AbstractDiffable { } try { XContentBuilder builder = XContentFactory.jsonBuilder().map(filter); - this.filter = new CompressedString(builder.bytes()); + this.filter = new CompressedXContent(builder.bytes()); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); @@ -324,7 +324,7 @@ public class AliasMetaData extends AbstractDiffable { } } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { if ("filter".equals(currentFieldName)) { - builder.filter(new CompressedString(parser.binaryValue())); + builder.filter(new CompressedXContent(parser.binaryValue())); } } else if (token == XContentParser.Token.VALUE_STRING) { if ("routing".equals(currentFieldName)) { diff --git a/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 07703bca591..2f3f6c889f8 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -35,7 +35,7 @@ import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -874,7 +874,7 @@ public class IndexMetaData implements Diffable { if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { - builder.putMapping(new MappingMetaData(new CompressedString(parser.binaryValue()))); + builder.putMapping(new MappingMetaData(new CompressedXContent(parser.binaryValue()))); } else { Map mapping = parser.mapOrdered(); if (mapping.size() == 1) { diff --git a/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index ae555a54e75..d91d0817cfc 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -24,7 +24,7 @@ import com.google.common.collect.Sets; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -54,13 +54,13 @@ public class IndexTemplateMetaData extends AbstractDiffable mappings; + private final ImmutableOpenMap mappings; private final ImmutableOpenMap aliases; private final ImmutableOpenMap customs; - public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap mappings, + public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs) { this.name = name; this.order = order; @@ -103,11 +103,11 @@ public class IndexTemplateMetaData extends AbstractDiffable mappings() { + public ImmutableOpenMap mappings() { return this.mappings; } - public ImmutableOpenMap getMappings() { + public ImmutableOpenMap getMappings() { return this.mappings; } @@ -170,7 +170,7 @@ public class IndexTemplateMetaData extends AbstractDiffable cursor : mappings) { + for (ObjectObjectCursor cursor : mappings) { out.writeString(cursor.key); cursor.value.writeTo(out); } @@ -223,7 +223,7 @@ public class IndexTemplateMetaData extends AbstractDiffable mappings; + private final ImmutableOpenMap.Builder mappings; private final ImmutableOpenMap.Builder aliases; @@ -276,13 +276,13 @@ public class IndexTemplateMetaData extends AbstractDiffable cursor : indexTemplateMetaData.mappings()) { + for (ObjectObjectCursor cursor : indexTemplateMetaData.mappings()) { byte[] mappingSource = cursor.value.uncompressed(); XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource); Map mapping = parser.map(); @@ -341,7 +341,7 @@ public class IndexTemplateMetaData extends AbstractDiffable cursor : indexTemplateMetaData.mappings()) { + for (ObjectObjectCursor cursor : indexTemplateMetaData.mappings()) { byte[] data = cursor.value.uncompressed(); XContentParser parser = XContentFactory.xContent(data).createParser(data); Map mapping = parser.mapOrderedAndClose(); diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index e6067c46817..2d8054d748f 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.joda.FormatDateTimeFormatter; @@ -276,7 +276,7 @@ public class MappingMetaData extends AbstractDiffable { private final String type; - private final CompressedString source; + private final CompressedXContent source; private Id id; private Routing routing; @@ -294,9 +294,9 @@ public class MappingMetaData extends AbstractDiffable { this.hasParentField = docMapper.parentFieldMapper().active(); } - public MappingMetaData(CompressedString mapping) throws IOException { + public MappingMetaData(CompressedXContent mapping) throws IOException { this.source = mapping; - Map mappingMap = XContentHelper.createParser(mapping.compressed(), 0, mapping.compressed().length).mapOrderedAndClose(); + Map mappingMap = XContentHelper.createParser(mapping.compressedReference()).mapOrderedAndClose(); if (mappingMap.size() != 1) { throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } @@ -311,7 +311,7 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData(String type, Map mapping) throws IOException { this.type = type; XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); - this.source = new CompressedString(mappingBuilder.bytes()); + this.source = new CompressedXContent(mappingBuilder.bytes()); Map withoutType = mapping; if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map) mapping.get(type); @@ -322,7 +322,7 @@ public class MappingMetaData extends AbstractDiffable { private MappingMetaData() { this.type = ""; try { - this.source = new CompressedString(""); + this.source = new CompressedXContent("{}"); } catch (IOException ex) { throw new IllegalStateException("Cannot create MappingMetaData prototype", ex); } @@ -393,7 +393,7 @@ public class MappingMetaData extends AbstractDiffable { } } - public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) { + public MappingMetaData(String type, CompressedXContent source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) { this.type = type; this.source = source; this.id = id; @@ -418,7 +418,7 @@ public class MappingMetaData extends AbstractDiffable { return this.type; } - public CompressedString source() { + public CompressedXContent source() { return this.source; } @@ -430,7 +430,7 @@ public class MappingMetaData extends AbstractDiffable { * Converts the serialized compressed form of the mappings into a parsed map. */ public Map sourceAsMap() throws IOException { - Map mapping = XContentHelper.convertToMap(source.compressed(), 0, source.compressed().length, true).v2(); + Map mapping = XContentHelper.convertToMap(source.compressedReference(), true).v2(); if (mapping.size() == 1 && mapping.containsKey(type())) { // the type name is the root value, reduce it mapping = (Map) mapping.get(type()); @@ -599,7 +599,7 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData readFrom(StreamInput in) throws IOException { String type = in.readString(); - CompressedString source = CompressedString.readCompressedString(in); + CompressedXContent source = CompressedXContent.readCompressedString(in); // id Id id = new Id(in.readBoolean() ? in.readString() : null); // routing diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 41e310a95ad..612989bb451 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -46,7 +46,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.regex.Regex; @@ -254,7 +254,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { // apply templates, merging the mappings into the request mapping if exists for (IndexTemplateMetaData template : templates) { templateNames.add(template.getName()); - for (ObjectObjectCursor cursor : template.mappings()) { + for (ObjectObjectCursor cursor : template.mappings()) { if (mappings.containsKey(cursor.key)) { XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string())); } else { @@ -357,7 +357,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { // first, add the default mapping if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { try { - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false); } catch (Exception e) { removalReason = "failed on parsing default mapping on index creation"; throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e); @@ -369,7 +369,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { } try { // apply the default here, its the first time we parse it - mapperService.merge(entry.getKey(), new CompressedString(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true); + mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true); } catch (Exception e) { removalReason = "failed on parsing mappings on index creation"; throw new MapperParsingException("mapping [" + entry.getKey() + "]", e); diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 0e39e7a613d..4406decfe0c 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -32,7 +32,7 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; @@ -91,11 +91,11 @@ public class MetaDataMappingService extends AbstractComponent { static class UpdateTask extends MappingTask { final String type; - final CompressedString mappingSource; + final CompressedXContent mappingSource; final String nodeId; // null fr unknown final ActionListener listener; - UpdateTask(String index, String indexUUID, String type, CompressedString mappingSource, String nodeId, ActionListener listener) { + UpdateTask(String index, String indexUUID, String type, CompressedXContent mappingSource, String nodeId, ActionListener listener) { super(index, indexUUID); this.type = type; this.mappingSource = mappingSource; @@ -254,7 +254,7 @@ public class MetaDataMappingService extends AbstractComponent { UpdateTask updateTask = (UpdateTask) task; try { String type = updateTask.type; - CompressedString mappingSource = updateTask.mappingSource; + CompressedXContent mappingSource = updateTask.mappingSource; MappingMetaData mappingMetaData = builder.mapping(type); if (mappingMetaData != null && mappingMetaData.source().equals(mappingSource)) { @@ -376,9 +376,9 @@ public class MetaDataMappingService extends AbstractComponent { DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); if (MapperService.DEFAULT_MAPPING.equals(request.type())) { // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default - newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), false); + newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), false); } else { - newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), existingMapper == null); + newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null); if (existingMapper != null) { // first, simulate MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true); @@ -415,12 +415,12 @@ public class MetaDataMappingService extends AbstractComponent { continue; } - CompressedString existingSource = null; + CompressedXContent existingSource = null; if (existingMappers.containsKey(entry.getKey())) { existingSource = existingMappers.get(entry.getKey()).mappingSource(); } DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false); - CompressedString updatedSource = mergedMapper.mappingSource(); + CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { if (existingSource.equals(updatedSource)) { diff --git a/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java b/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java index 1bc370cd894..add383b75fa 100644 --- a/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java +++ b/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java @@ -352,6 +352,7 @@ public class PagedBytesReference implements BytesReference { private final int offset; private final int length; private int pos; + private int mark; public PagedBytesReferenceStreamInput(ByteArray bytearray, int offset, int length) { this.bytearray = bytearray; @@ -420,9 +421,19 @@ public class PagedBytesReference implements BytesReference { return copiedBytes; } + @Override + public boolean markSupported() { + return true; + } + + @Override + public void mark(int readlimit) { + this.mark = pos; + } + @Override public void reset() throws IOException { - pos = 0; + pos = mark; } @Override diff --git a/src/main/java/org/elasticsearch/common/compress/CompressedIndexInput.java b/src/main/java/org/elasticsearch/common/compress/CompressedIndexInput.java index 06ec2a2f48f..12094108932 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressedIndexInput.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressedIndexInput.java @@ -30,10 +30,9 @@ import java.io.IOException; * @deprecated Used only for backward comp. to read old compressed files, since we now use codec based compression */ @Deprecated -public abstract class CompressedIndexInput extends IndexInput { +public abstract class CompressedIndexInput extends IndexInput { private IndexInput in; - protected final T context; private int version; private long totalUncompressedLength; @@ -48,10 +47,9 @@ public abstract class CompressedIndexInput extends private int currentOffsetIdx; private long currentUncompressedChunkPointer; - public CompressedIndexInput(IndexInput in, T context) throws IOException { + public CompressedIndexInput(IndexInput in) throws IOException { super("compressed(" + in.toString() + ")"); this.in = in; - this.context = context; readHeader(in); this.version = in.readInt(); long metaDataPosition = in.readLong(); diff --git a/src/main/java/org/elasticsearch/common/compress/CompressedStreamInput.java b/src/main/java/org/elasticsearch/common/compress/CompressedStreamInput.java index 3df98a7f718..82eefe13a4c 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressedStreamInput.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressedStreamInput.java @@ -27,10 +27,9 @@ import java.io.IOException; /** */ -public abstract class CompressedStreamInput extends StreamInput { +public abstract class CompressedStreamInput extends StreamInput { private final StreamInput in; - protected final CompressorContext context; private boolean closed; @@ -38,9 +37,8 @@ public abstract class CompressedStreamInput extends private int position = 0; private int valid = 0; - public CompressedStreamInput(StreamInput in, T context) throws IOException { + public CompressedStreamInput(StreamInput in) throws IOException { this.in = in; - this.context = context; super.setVersion(in.getVersion()); readHeader(in); } @@ -51,13 +49,6 @@ public abstract class CompressedStreamInput extends return super.setVersion(version); } - /** - * Expert!, resets to buffer start, without the need to decompress it again. - */ - public void resetToBufferStart() { - this.position = 0; - } - /** * Method is overridden to report number of bytes that can now be read * from decoded data buffer, without reading bytes from the underlying diff --git a/src/main/java/org/elasticsearch/common/compress/CompressedStreamOutput.java b/src/main/java/org/elasticsearch/common/compress/CompressedStreamOutput.java index 009fddc074c..9e0763816a8 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressedStreamOutput.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressedStreamOutput.java @@ -26,10 +26,9 @@ import java.io.IOException; /** */ -public abstract class CompressedStreamOutput extends StreamOutput { +public abstract class CompressedStreamOutput extends StreamOutput { private final StreamOutput out; - protected final T context; protected byte[] uncompressed; protected int uncompressedLength; @@ -37,9 +36,8 @@ public abstract class CompressedStreamOutput extend private boolean closed; - public CompressedStreamOutput(StreamOutput out, T context) throws IOException { + public CompressedStreamOutput(StreamOutput out) throws IOException { this.out = out; - this.context = context; super.setVersion(out.getVersion()); writeHeader(out); } diff --git a/src/main/java/org/elasticsearch/common/compress/CompressedString.java b/src/main/java/org/elasticsearch/common/compress/CompressedXContent.java similarity index 74% rename from src/main/java/org/elasticsearch/common/compress/CompressedString.java rename to src/main/java/org/elasticsearch/common/compress/CompressedXContent.java index aca1d45f86d..09ced0e29b2 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressedString.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressedXContent.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.compress; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -34,33 +35,32 @@ import java.util.Arrays; * memory. Note that the compressed string might still sometimes need to be * decompressed in order to perform equality checks or to compute hash codes. */ -public final class CompressedString { +public final class CompressedXContent { private final byte[] bytes; private int hashCode; - public CompressedString(BytesReference data) throws IOException { + public CompressedXContent(BytesReference data) throws IOException { Compressor compressor = CompressorFactory.compressor(data); if (compressor != null) { // already compressed... this.bytes = data.toBytes(); } else { - BytesArray bytesArray = data.toBytesArray(); - this.bytes = CompressorFactory.defaultCompressor().compress(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length()); - assert CompressorFactory.compressor(bytes) != null; + BytesStreamOutput out = new BytesStreamOutput(); + try (StreamOutput compressedOutput = CompressorFactory.defaultCompressor().streamOutput(out)) { + data.writeTo(compressedOutput); + } + this.bytes = out.bytes().toBytes(); + assert CompressorFactory.compressor(new BytesArray(bytes)) != null; } } - public CompressedString(byte[] data, int offset, int length) throws IOException { - this(new BytesArray(data, offset, length)); + public CompressedXContent(byte[] data) throws IOException { + this(new BytesArray(data)); } - public CompressedString(byte[] data) throws IOException { - this(data, 0, data.length); - } - - public CompressedString(String str) throws IOException { + public CompressedXContent(String str) throws IOException { this(new BytesArray(new BytesRef(str))); } @@ -69,12 +69,15 @@ public final class CompressedString { return this.bytes; } + /** Return the compressed bytes as a {@link BytesReference}. */ + public BytesReference compressedReference() { + return new BytesArray(bytes); + } + /** Return the uncompressed bytes. */ public byte[] uncompressed() { - Compressor compressor = CompressorFactory.compressor(bytes); - assert compressor != null; try { - return compressor.uncompress(bytes, 0, bytes.length); + return CompressorFactory.uncompress(new BytesArray(bytes)).toBytes(); } catch (IOException e) { throw new IllegalStateException("Cannot decompress compressed string", e); } @@ -84,10 +87,10 @@ public final class CompressedString { return new BytesRef(uncompressed()).utf8ToString(); } - public static CompressedString readCompressedString(StreamInput in) throws IOException { + public static CompressedXContent readCompressedString(StreamInput in) throws IOException { byte[] bytes = new byte[in.readVInt()]; in.readBytes(bytes, 0, bytes.length); - return new CompressedString(bytes); + return new CompressedXContent(bytes); } public void writeTo(StreamOutput out) throws IOException { @@ -100,7 +103,7 @@ public final class CompressedString { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CompressedString that = (CompressedString) o; + CompressedXContent that = (CompressedXContent) o; if (Arrays.equals(compressed(), that.compressed())) { return true; diff --git a/src/main/java/org/elasticsearch/common/compress/Compressor.java b/src/main/java/org/elasticsearch/common/compress/Compressor.java index 8d0199703ac..d8f0ae82bf6 100644 --- a/src/main/java/org/elasticsearch/common/compress/Compressor.java +++ b/src/main/java/org/elasticsearch/common/compress/Compressor.java @@ -38,26 +38,18 @@ public interface Compressor { boolean isCompressed(BytesReference bytes); - boolean isCompressed(byte[] data, int offset, int length); - boolean isCompressed(ChannelBuffer buffer); + StreamInput streamInput(StreamInput in) throws IOException; + + StreamOutput streamOutput(StreamOutput out) throws IOException; + + /** + * @deprecated Used for backward comp. since we now use Lucene compressed codec. + */ + @Deprecated boolean isCompressed(IndexInput in) throws IOException; - /** - * Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}. - */ - byte[] uncompress(byte[] data, int offset, int length) throws IOException; - - /** - * Compresses the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}. - */ - byte[] compress(byte[] data, int offset, int length) throws IOException; - - CompressedStreamInput streamInput(StreamInput in) throws IOException; - - CompressedStreamOutput streamOutput(StreamOutput out) throws IOException; - /** * @deprecated Used for backward comp. since we now use Lucene compressed codec. */ diff --git a/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java b/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java index 9eb9c9d7212..9873123e558 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java @@ -21,9 +21,9 @@ package org.elasticsearch.common.compress; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; + import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.lzf.LZFCompressor; @@ -32,6 +32,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; @@ -92,14 +94,6 @@ public class CompressorFactory { return compressor(bytes) != null; } - public static boolean isCompressed(byte[] data) { - return compressor(data, 0, data.length) != null; - } - - public static boolean isCompressed(byte[] data, int offset, int length) { - return compressor(data, offset, length) != null; - } - public static boolean isCompressed(IndexInput in) throws IOException { return compressor(in) != null; } @@ -108,35 +102,29 @@ public class CompressorFactory { public static Compressor compressor(BytesReference bytes) { for (Compressor compressor : compressors) { if (compressor.isCompressed(bytes)) { + // bytes should be either detected as compressed or as xcontent, + // if we have bytes that can be either detected as compressed or + // as a xcontent, we have a problem + assert XContentFactory.xContentType(bytes) == null; return compressor; } } - return null; - } - @Nullable - public static Compressor compressor(byte[] data) { - return compressor(data, 0, data.length); - } - - @Nullable - public static Compressor compressor(byte[] data, int offset, int length) { - for (Compressor compressor : compressors) { - if (compressor.isCompressed(data, offset, length)) { - return compressor; - } + XContentType contentType = XContentFactory.xContentType(bytes); + if (contentType == null) { + throw new NotXContentException("Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes"); } + return null; } - @Nullable public static Compressor compressor(ChannelBuffer buffer) { for (Compressor compressor : compressors) { if (compressor.isCompressed(buffer)) { return compressor; } } - return null; + throw new NotCompressedException(); } @Nullable @@ -158,16 +146,30 @@ public class CompressorFactory { */ public static BytesReference uncompressIfNeeded(BytesReference bytes) throws IOException { Compressor compressor = compressor(bytes); + BytesReference uncompressed; if (compressor != null) { - if (bytes.hasArray()) { - return new BytesArray(compressor.uncompress(bytes.array(), bytes.arrayOffset(), bytes.length())); - } - StreamInput compressed = compressor.streamInput(bytes.streamInput()); - BytesStreamOutput bStream = new BytesStreamOutput(); - Streams.copy(compressed, bStream); - compressed.close(); - return bStream.bytes(); + uncompressed = uncompress(bytes, compressor); + } else { + uncompressed = bytes; } - return bytes; + + return uncompressed; + } + + /** Decompress the provided {@link BytesReference}. */ + public static BytesReference uncompress(BytesReference bytes) throws IOException { + Compressor compressor = compressor(bytes); + if (compressor == null) { + throw new IllegalArgumentException("Bytes are not compressed"); + } + return uncompress(bytes, compressor); + } + + private static BytesReference uncompress(BytesReference bytes, Compressor compressor) throws IOException { + StreamInput compressed = compressor.streamInput(bytes.streamInput()); + BytesStreamOutput bStream = new BytesStreamOutput(); + Streams.copy(compressed, bStream); + compressed.close(); + return bStream.bytes(); } } diff --git a/src/main/java/org/elasticsearch/common/compress/CompressorContext.java b/src/main/java/org/elasticsearch/common/compress/NotCompressedException.java similarity index 73% rename from src/main/java/org/elasticsearch/common/compress/CompressorContext.java rename to src/main/java/org/elasticsearch/common/compress/NotCompressedException.java index 9ad70554046..653483fc586 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressorContext.java +++ b/src/main/java/org/elasticsearch/common/compress/NotCompressedException.java @@ -19,7 +19,13 @@ package org.elasticsearch.common.compress; -/** - */ -public interface CompressorContext { +/** Exception indicating that we were expecting something compressed, which + * was not compressed or corrupted so that the compression format could not + * be detected. */ +public class NotCompressedException extends RuntimeException { + + public NotCompressedException() { + super(); + } + } diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressorContext.java b/src/main/java/org/elasticsearch/common/compress/NotXContentException.java similarity index 69% rename from src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressorContext.java rename to src/main/java/org/elasticsearch/common/compress/NotXContentException.java index 89c7b182c47..bca35c317d8 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressorContext.java +++ b/src/main/java/org/elasticsearch/common/compress/NotXContentException.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.common.compress.lzf; +package org.elasticsearch.common.compress; -import org.elasticsearch.common.compress.CompressorContext; +/** Exception indicating that we were expecting something compressed, which + * was not compressed or corrupted so that the compression format could not + * be detected. */ +public class NotXContentException extends RuntimeException { -/** - */ -public class LZFCompressorContext implements CompressorContext { + public NotXContentException(String message) { + super(message); + } - public static final LZFCompressorContext INSTANCE = new LZFCompressorContext(); } diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java index 326eceb77c4..93bd583662b 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java +++ b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java @@ -32,14 +32,14 @@ import java.util.Arrays; /** */ @Deprecated -public class LZFCompressedIndexInput extends CompressedIndexInput { +public class LZFCompressedIndexInput extends CompressedIndexInput { private final ChunkDecoder decoder; // scratch area buffer private byte[] inputBuffer; public LZFCompressedIndexInput(IndexInput in, ChunkDecoder decoder) throws IOException { - super(in, LZFCompressorContext.INSTANCE); + super(in); this.decoder = decoder; this.uncompressed = new byte[LZFChunk.MAX_CHUNK_LEN]; diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java index caaaadbeb3e..baefcaa8928 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java +++ b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java @@ -29,7 +29,7 @@ import java.io.IOException; /** */ -public class LZFCompressedStreamInput extends CompressedStreamInput { +public class LZFCompressedStreamInput extends CompressedStreamInput { private final BufferRecycler recycler; @@ -39,7 +39,7 @@ public class LZFCompressedStreamInput extends CompressedStreamInput { +public class LZFCompressedStreamOutput extends CompressedStreamOutput { private final BufferRecycler recycler; private final ChunkEncoder encoder; public LZFCompressedStreamOutput(StreamOutput out) throws IOException { - super(out, LZFCompressorContext.INSTANCE); + super(out); this.recycler = BufferRecycler.instance(); this.uncompressed = this.recycler.allocOutputBuffer(LZFChunk.MAX_CHUNK_LEN); this.uncompressedLength = LZFChunk.MAX_CHUNK_LEN; diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java index 57771a1e3a8..c5c937ccbb9 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java +++ b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.compress.lzf; import com.ning.compress.lzf.ChunkDecoder; import com.ning.compress.lzf.LZFChunk; -import com.ning.compress.lzf.LZFEncoder; import com.ning.compress.lzf.util.ChunkDecoderFactory; import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.bytes.BytesReference; @@ -69,14 +68,6 @@ public class LZFCompressor implements Compressor { (bytes.get(2) == LZFChunk.BLOCK_TYPE_COMPRESSED || bytes.get(2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); } - @Override - public boolean isCompressed(byte[] data, int offset, int length) { - return length >= 3 && - data[offset] == LZFChunk.BYTE_Z && - data[offset + 1] == LZFChunk.BYTE_V && - (data[offset + 2] == LZFChunk.BLOCK_TYPE_COMPRESSED || data[offset + 2] == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); - } - @Override public boolean isCompressed(ChannelBuffer buffer) { int offset = buffer.readerIndex(); @@ -103,16 +94,6 @@ public class LZFCompressor implements Compressor { return true; } - @Override - public byte[] uncompress(byte[] data, int offset, int length) throws IOException { - return decoder.decode(data, offset, length); - } - - @Override - public byte[] compress(byte[] data, int offset, int length) throws IOException { - return LZFEncoder.safeEncode(data, offset, length); - } - @Override public CompressedStreamInput streamInput(StreamInput in) throws IOException { return new LZFCompressedStreamInput(in, decoder); diff --git a/src/main/java/org/elasticsearch/common/io/stream/InputStreamStreamInput.java b/src/main/java/org/elasticsearch/common/io/stream/InputStreamStreamInput.java index ffe8d297ba4..e9aa52cf4d0 100644 --- a/src/main/java/org/elasticsearch/common/io/stream/InputStreamStreamInput.java +++ b/src/main/java/org/elasticsearch/common/io/stream/InputStreamStreamInput.java @@ -59,6 +59,16 @@ public class InputStreamStreamInput extends StreamInput { is.reset(); } + @Override + public boolean markSupported() { + return is.markSupported(); + } + + @Override + public void mark(int readlimit) { + is.mark(readlimit); + } + @Override public void close() throws IOException { is.close(); diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java b/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java index 9ae1a03a67d..75e57509948 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.smile.SmileConstants; + import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -163,6 +164,9 @@ public class XContentFactory { if (c == '{') { return XContentType.JSON; } + if (Character.isWhitespace(c) == false) { + break; + } } return null; } @@ -204,65 +208,76 @@ public class XContentFactory { } /** - * Guesses the content type based on the provided input stream. + * Guesses the content type based on the provided input stream without consuming it. */ public static XContentType xContentType(InputStream si) throws IOException { - final int firstInt = si.read(); // this must be an int since we need to respect the method contract - if (firstInt == -1) { - return null; + if (si.markSupported() == false) { + throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass()); } - - final int secondInt = si.read(); // this must be an int since we need to respect the method contract - if (secondInt == -1) { - return null; - } - final byte first = (byte) (0xff & firstInt); - final byte second = (byte) (0xff & secondInt); - if (first == SmileConstants.HEADER_BYTE_1 && second == SmileConstants.HEADER_BYTE_2) { - int third = si.read(); - if (third == SmileConstants.HEADER_BYTE_3) { - return XContentType.SMILE; - } - } - if (first == '{' || second == '{') { - return XContentType.JSON; - } - if (first == '-' && second == '-') { - int third = si.read(); - if (third == '-') { - return XContentType.YAML; - } - } - // CBOR logic similar to CBORFactory#hasCBORFormat - if (first == CBORConstants.BYTE_OBJECT_INDEFINITE){ - return XContentType.CBOR; - } - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first)) { - // Actually, specific "self-describe tag" is a very good indicator - int third = si.read(); - if (third == -1) { + si.mark(GUESS_HEADER_LENGTH); + try { + final int firstInt = si.read(); // this must be an int since we need to respect the method contract + if (firstInt == -1) { return null; } - if (first == (byte) 0xD9 && second == (byte) 0xD9 && third == (byte) 0xF7) { - return XContentType.CBOR; - } - } - // for small objects, some encoders just encode as major type object, we can safely - // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) { - return XContentType.CBOR; - } - for (int i = 2; i < GUESS_HEADER_LENGTH; i++) { - int val = si.read(); - if (val == -1) { - return null; + final int secondInt = si.read(); // this must be an int since we need to respect the method contract + if (secondInt == -1) { + return null; } - if (val == '{') { + final byte first = (byte) (0xff & firstInt); + final byte second = (byte) (0xff & secondInt); + if (first == SmileConstants.HEADER_BYTE_1 && second == SmileConstants.HEADER_BYTE_2) { + int third = si.read(); + if (third == SmileConstants.HEADER_BYTE_3) { + return XContentType.SMILE; + } + } + if (first == '{' || second == '{') { return XContentType.JSON; } + if (first == '-' && second == '-') { + int third = si.read(); + if (third == '-') { + return XContentType.YAML; + } + } + // CBOR logic similar to CBORFactory#hasCBORFormat + if (first == CBORConstants.BYTE_OBJECT_INDEFINITE){ + return XContentType.CBOR; + } + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first)) { + // Actually, specific "self-describe tag" is a very good indicator + int third = si.read(); + if (third == -1) { + return null; + } + if (first == (byte) 0xD9 && second == (byte) 0xD9 && third == (byte) 0xF7) { + return XContentType.CBOR; + } + } + // for small objects, some encoders just encode as major type object, we can safely + // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) { + return XContentType.CBOR; + } + + for (int i = 2; i < GUESS_HEADER_LENGTH; i++) { + int val = si.read(); + if (val == -1) { + return null; + } + if (val == '{') { + return XContentType.JSON; + } + if (Character.isWhitespace(val) == false) { + break; + } + } + return null; + } finally { + si.reset(); } - return null; } /** @@ -284,7 +299,7 @@ public class XContentFactory { * Guesses the content type based on the provided bytes. */ public static XContentType xContentType(BytesReference bytes) { - int length = bytes.length() < GUESS_HEADER_LENGTH ? bytes.length() : GUESS_HEADER_LENGTH; + int length = bytes.length(); if (length == 0) { return null; } @@ -316,9 +331,13 @@ public class XContentFactory { // a last chance for JSON for (int i = 0; i < length; i++) { - if (bytes.get(i) == '{') { + byte b = bytes.get(i); + if (b == '{') { return XContentType.JSON; } + if (Character.isWhitespace(b) == false) { + break; + } } return null; } diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index d196d459fbd..5325950e202 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -28,14 +28,14 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedStreamInput; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent.Params; +import java.io.BufferedInputStream; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -49,45 +49,30 @@ import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; public class XContentHelper { public static XContentParser createParser(BytesReference bytes) throws IOException { - if (bytes.hasArray()) { - return createParser(bytes.array(), bytes.arrayOffset(), bytes.length()); - } Compressor compressor = CompressorFactory.compressor(bytes); if (compressor != null) { - CompressedStreamInput compressedInput = compressor.streamInput(bytes.streamInput()); + InputStream compressedInput = compressor.streamInput(bytes.streamInput()); + if (compressedInput.markSupported() == false) { + compressedInput = new BufferedInputStream(compressedInput); + } XContentType contentType = XContentFactory.xContentType(compressedInput); - compressedInput.resetToBufferStart(); return XContentFactory.xContent(contentType).createParser(compressedInput); } else { return XContentFactory.xContent(bytes).createParser(bytes.streamInput()); } } - - public static XContentParser createParser(byte[] data, int offset, int length) throws IOException { - Compressor compressor = CompressorFactory.compressor(data, offset, length); - if (compressor != null) { - CompressedStreamInput compressedInput = compressor.streamInput(StreamInput.wrap(data, offset, length)); - XContentType contentType = XContentFactory.xContentType(compressedInput); - compressedInput.resetToBufferStart(); - return XContentFactory.xContent(contentType).createParser(compressedInput); - } else { - return XContentFactory.xContent(data, offset, length).createParser(data, offset, length); - } - } - public static Tuple> convertToMap(BytesReference bytes, boolean ordered) throws ElasticsearchParseException { - if (bytes.hasArray()) { - return convertToMap(bytes.array(), bytes.arrayOffset(), bytes.length(), ordered); - } try { XContentParser parser; XContentType contentType; Compressor compressor = CompressorFactory.compressor(bytes); if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(bytes.streamInput()); + InputStream compressedStreamInput = compressor.streamInput(bytes.streamInput()); + if (compressedStreamInput.markSupported() == false) { + compressedStreamInput = new BufferedInputStream(compressedStreamInput); + } contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput); } else { contentType = XContentFactory.xContentType(bytes); @@ -103,34 +88,6 @@ public class XContentHelper { } } - public static Tuple> convertToMap(byte[] data, boolean ordered) throws ElasticsearchParseException { - return convertToMap(data, 0, data.length, ordered); - } - - public static Tuple> convertToMap(byte[] data, int offset, int length, boolean ordered) throws ElasticsearchParseException { - try { - XContentParser parser; - XContentType contentType; - Compressor compressor = CompressorFactory.compressor(data, offset, length); - if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(StreamInput.wrap(data, offset, length)); - contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); - parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput); - } else { - contentType = XContentFactory.xContentType(data, offset, length); - parser = XContentFactory.xContent(contentType).createParser(data, offset, length); - } - if (ordered) { - return Tuple.tuple(contentType, parser.mapOrderedAndClose()); - } else { - return Tuple.tuple(contentType, parser.mapAndClose()); - } - } catch (IOException e) { - throw new ElasticsearchParseException("Failed to parse content to map", e); - } - } - public static String convertToJson(BytesReference bytes, boolean reformatJson) throws IOException { return convertToJson(bytes, reformatJson, false); } @@ -426,9 +383,11 @@ public class XContentHelper { public static void writeDirect(BytesReference source, XContentBuilder rawBuilder, ToXContent.Params params) throws IOException { Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); + InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); + if (compressedStreamInput.markSupported() == false) { + compressedStreamInput = new BufferedInputStream(compressedStreamInput); + } XContentType contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); if (contentType == rawBuilder.contentType()) { Streams.copy(compressedStreamInput, rawBuilder.stream()); } else { @@ -457,9 +416,11 @@ public class XContentHelper { public static void writeRawField(String field, BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException { Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); + InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); XContentType contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); + if (compressedStreamInput.markSupported() == false) { + compressedStreamInput = new BufferedInputStream(compressedStreamInput); + } if (contentType == builder.contentType()) { builder.rawField(field, compressedStreamInput); } else { diff --git a/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index 9d88d84f64a..9ea7cf5e60b 100644 --- a/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -21,16 +21,26 @@ package org.elasticsearch.gateway; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; + import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.OutputStreamIndexOutput; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.io.OutputStream; @@ -280,7 +290,7 @@ public abstract class MetaDataStateFormat { logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); continue; } - parser = XContentHelper.createParser(data, 0, data.length); + parser = XContentHelper.createParser(new BytesArray(data)); state = fromXContent(parser); if (state == null) { logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); diff --git a/src/main/java/org/elasticsearch/index/aliases/IndexAlias.java b/src/main/java/org/elasticsearch/index/aliases/IndexAlias.java index 3d02731dbfa..48ebc4239ac 100644 --- a/src/main/java/org/elasticsearch/index/aliases/IndexAlias.java +++ b/src/main/java/org/elasticsearch/index/aliases/IndexAlias.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.aliases; import org.apache.lucene.search.Query; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; /** * @@ -30,11 +30,11 @@ public class IndexAlias { private final String alias; - private final CompressedString filter; + private final CompressedXContent filter; private final Query parsedFilter; - public IndexAlias(String alias, @Nullable CompressedString filter, @Nullable Query parsedFilter) { + public IndexAlias(String alias, @Nullable CompressedXContent filter, @Nullable Query parsedFilter) { this.alias = alias; this.filter = filter; this.parsedFilter = parsedFilter; @@ -45,7 +45,7 @@ public class IndexAlias { } @Nullable - public CompressedString filter() { + public CompressedXContent filter() { return filter; } diff --git a/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java b/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java index a097a01675b..21d6582e03f 100644 --- a/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java +++ b/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java @@ -23,7 +23,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -63,11 +63,11 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera return aliases.get(alias); } - public IndexAlias create(String alias, @Nullable CompressedString filter) { + public IndexAlias create(String alias, @Nullable CompressedXContent filter) { return new IndexAlias(alias, filter, parse(alias, filter)); } - public void add(String alias, @Nullable CompressedString filter) { + public void add(String alias, @Nullable CompressedXContent filter) { add(new IndexAlias(alias, filter, parse(alias, filter))); } @@ -120,7 +120,7 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera aliases.remove(alias); } - private Query parse(String alias, CompressedString filter) { + private Query parse(String alias, CompressedXContent filter) { if (filter == null) { return null; } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 45da4be320b..2c4bd053251 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -35,7 +35,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; @@ -150,7 +150,7 @@ public class DocumentMapper implements ToXContent { private final String type; private final StringAndBytesText typeText; - private volatile CompressedString mappingSource; + private volatile CompressedXContent mappingSource; private final Mapping mapping; @@ -235,7 +235,7 @@ public class DocumentMapper implements ToXContent { return mapping.meta; } - public CompressedString mappingSource() { + public CompressedXContent mappingSource() { return this.mappingSource; } @@ -473,7 +473,7 @@ public class DocumentMapper implements ToXContent { toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); builder.close(); - mappingSource = new CompressedString(bStream.bytes()); + mappingSource = new CompressedXContent(bStream.bytes()); } catch (Exception e) { throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index 9084e17d60b..4bb9e8d830e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -194,15 +194,15 @@ public class DocumentMapperParser extends AbstractIndexComponent { return parse(type, mapping, defaultSource); } - public DocumentMapper parseCompressed(@Nullable String type, CompressedString source) throws MapperParsingException { + public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source) throws MapperParsingException { return parseCompressed(type, source, null); } @SuppressWarnings({"unchecked"}) - public DocumentMapper parseCompressed(@Nullable String type, CompressedString source, String defaultSource) throws MapperParsingException { + public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException { Map mapping = null; if (source != null) { - Map root = XContentHelper.convertToMap(source.compressed(), true).v2(); + Map root = XContentHelper.convertToMap(source.compressedReference(), true).v2(); Tuple> t = extractMapping(type, root); type = t.v1(); mapping = t.v2(); diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b63df2d6cc4..8847550c6c9 100755 --- a/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -43,7 +43,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; @@ -214,7 +214,7 @@ public class MapperService extends AbstractIndexComponent { typeListeners.remove(listener); } - public DocumentMapper merge(String type, CompressedString mappingSource, boolean applyDefault) { + public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault) { if (DEFAULT_MAPPING.equals(type)) { // verify we can parse it DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource); @@ -293,7 +293,7 @@ public class MapperService extends AbstractIndexComponent { private boolean assertSerialization(DocumentMapper mapper) { // capture the source now, it may change due to concurrent parsing - final CompressedString mappingSource = mapper.mappingSource(); + final CompressedXContent mappingSource = mapper.mappingSource(); DocumentMapper newMapper = parse(mapper.type(), mappingSource, false); if (newMapper.mappingSource().equals(mappingSource) == false) { @@ -328,7 +328,7 @@ public class MapperService extends AbstractIndexComponent { this.fieldMappers = this.fieldMappers.copyAndAddAll(fieldMappers); } - public DocumentMapper parse(String mappingType, CompressedString mappingSource, boolean applyDefault) throws MapperParsingException { + public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { String defaultMappingSource; if (PercolatorService.TYPE_NAME.equals(mappingType)) { defaultMappingSource = this.defaultPercolatorMappingSource; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 9972ca45cbd..91375efed47 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -36,6 +36,9 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.compress.NotXContentException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; @@ -144,10 +147,16 @@ public class BinaryFieldMapper extends AbstractFieldMapper { } try { if (indexCreatedBefore2x) { - return CompressorFactory.uncompressIfNeeded(bytes); - } else { - return bytes; + try { + return CompressorFactory.uncompressIfNeeded(bytes); + } catch (NotXContentException e) { + // This is a BUG! We try to decompress by detecting a header in + // the stored bytes but since we accept arbitrary bytes, we have + // no guarantee that uncompressed bytes will be detected as + // compressed! + } } + return bytes; } catch (IOException e) { throw new ElasticsearchParseException("failed to decompress source", e); } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 7c8ae58d5fd..d5d745f263f 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.base.Objects; + import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.StoredField; @@ -31,7 +32,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedStreamInput; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -53,7 +53,9 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; +import java.io.BufferedInputStream; import java.io.IOException; +import java.io.InputStream; import java.util.Arrays; import java.util.Iterator; import java.util.List; @@ -324,9 +326,11 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper // see if we need to convert the content type Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); + InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); + if (compressedStreamInput.markSupported() == false) { + compressedStreamInput = new BufferedInputStream(compressedStreamInput); + } XContentType contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); if (contentType != formatContentType) { // we need to reread and store back, compressed.... BytesStreamOutput bStream = new BytesStreamOutput(); diff --git a/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 77832bb052d..6995ecae587 100644 --- a/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -41,7 +41,7 @@ import org.elasticsearch.cluster.routing.*; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -369,7 +369,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent cursor : indexMetaData.mappings().values()) { MappingMetaData mappingMd = cursor.value; String mappingType = mappingMd.type(); - CompressedString mappingSource = mappingMd.source(); + CompressedXContent mappingSource = mappingMd.source(); if (mappingType.equals(MapperService.DEFAULT_MAPPING)) { // we processed _default_ first continue; } @@ -396,7 +396,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent(index, mappingType))) { seenMappings.put(new Tuple<>(index, mappingType), true); } @@ -484,7 +484,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent cursor : aliases) { AliasMetaData aliasMd = cursor.value; String alias = aliasMd.alias(); - CompressedString filter = aliasMd.filter(); + CompressedXContent filter = aliasMd.filter(); try { if (!indexAliasesService.hasAlias(alias)) { if (logger.isDebugEnabled()) { diff --git a/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 180f6595521..83baf8d1e2d 100644 --- a/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -24,8 +24,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; + import org.apache.lucene.store.RateLimiter; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -35,6 +35,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.compress.CompressorFactory; @@ -407,7 +408,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent snapshots = new ArrayList<>(); - try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) { + try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) { if (parser.nextToken() == XContentParser.Token.START_OBJECT) { if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { String currentFieldName = parser.currentName(); diff --git a/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java b/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java index ee7018d397e..7c3a2aa4e2d 100644 --- a/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java +++ b/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java @@ -91,14 +91,6 @@ public class SourceLookup implements Map { return sourceAsMapAndType(source).v2(); } - public static Tuple> sourceAsMapAndType(byte[] bytes, int offset, int length) throws ElasticsearchParseException { - return XContentHelper.convertToMap(bytes, offset, length, false); - } - - public static Map sourceAsMap(byte[] bytes, int offset, int length) throws ElasticsearchParseException { - return sourceAsMapAndType(bytes, offset, length).v2(); - } - public void setSegmentAndDocument(LeafReaderContext context, int docId) { if (this.reader == context.reader() && this.docId == docId) { // if we are called with the same document, don't invalidate source diff --git a/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index b0689df6f2c..aa2d69dac6b 100644 --- a/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.compress.NotCompressedException; import org.elasticsearch.common.io.ThrowableObjectInputStream; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.ESLogger; @@ -91,8 +92,10 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { StreamInput wrappedStream; if (TransportStatus.isCompress(status) && hasMessageBytesToRead && buffer.readable()) { - Compressor compressor = CompressorFactory.compressor(buffer); - if (compressor == null) { + Compressor compressor; + try { + compressor = CompressorFactory.compressor(buffer); + } catch (NotCompressedException ex) { int maxToRead = Math.min(buffer.readableBytes(), 10); int offset = buffer.readerIndex(); StringBuilder sb = new StringBuilder("stream marked as compressed, but no compressor found, first [").append(maxToRead).append("] content bytes out of [").append(buffer.readableBytes()).append("] readable bytes with message size [").append(size).append("] ").append("] are ["); diff --git a/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java b/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java index 230889699a4..18ebbc8c430 100644 --- a/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java +++ b/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; @@ -34,7 +34,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -52,7 +52,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testFailIfIdIsNoValue() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -79,7 +79,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseRoutingAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -97,7 +97,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseTimestampAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -115,11 +115,11 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseTimestampEquals() throws Exception { - MappingMetaData md1 = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md1 = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - MappingMetaData md2 = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md2 = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -128,7 +128,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestamp() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -143,7 +143,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestampWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -161,7 +161,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -182,7 +182,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseRoutingWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -203,7 +203,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseTimestampWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -224,7 +224,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestampWithinSamePath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -242,7 +242,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestampWithinSamePathAndMoreLevels() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.obj0.id"), new MappingMetaData.Routing(true, "obj1.obj2.routing"), new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -271,7 +271,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestampWithSameRepeatedObject() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -291,7 +291,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { // @Test public void testParseIdRoutingTimestampWithRepeatedField() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("field1"), new MappingMetaData.Routing(true, "field1.field1"), new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -314,7 +314,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseNoIdRoutingWithRepeatedFieldAndObject() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "field1.field1.field2"), new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -337,7 +337,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseRoutingWithRepeatedFieldAndValidRouting() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id(null), new MappingMetaData.Routing(true, "field1.field2"), new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); diff --git a/src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java b/src/test/java/org/elasticsearch/common/compress/CompressedXContentTests.java similarity index 72% rename from src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java rename to src/test/java/org/elasticsearch/common/compress/CompressedXContentTests.java index 4d9de6736e7..670ad10aaf2 100644 --- a/src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java +++ b/src/test/java/org/elasticsearch/common/compress/CompressedXContentTests.java @@ -37,14 +37,14 @@ import static org.hamcrest.Matchers.not; /** * */ -public class CompressedStringTests extends ElasticsearchTestCase { +public class CompressedXContentTests extends ElasticsearchTestCase { @Test public void simpleTestsLZF() throws IOException { simpleTests("lzf"); } - private void assertEquals(CompressedString s1, CompressedString s2) { + private void assertEquals(CompressedXContent s1, CompressedXContent s2) { Assert.assertEquals(s1, s2); assertArrayEquals(s1.uncompressed(), s2.uncompressed()); assertEquals(s1.hashCode(), s2.hashCode()); @@ -52,16 +52,16 @@ public class CompressedStringTests extends ElasticsearchTestCase { public void simpleTests(String compressor) throws IOException { CompressorFactory.configure(Settings.settingsBuilder().put("compress.default.type", compressor).build()); - String str = "this is a simple string"; - CompressedString cstr = new CompressedString(str); + String str = "---\nf:this is a simple string"; + CompressedXContent cstr = new CompressedXContent(str); assertThat(cstr.string(), equalTo(str)); - assertThat(new CompressedString(str), equalTo(cstr)); + assertThat(new CompressedXContent(str), equalTo(cstr)); - String str2 = "this is a simple string 2"; - CompressedString cstr2 = new CompressedString(str2); + String str2 = "---\nf:this is a simple string 2"; + CompressedXContent cstr2 = new CompressedXContent(str2); assertThat(cstr2.string(), not(equalTo(str))); - assertThat(new CompressedString(str2), not(equalTo(cstr))); - assertEquals(new CompressedString(str2), cstr2); + assertThat(new CompressedXContent(str2), not(equalTo(cstr))); + assertEquals(new CompressedXContent(str2), cstr2); } public void testRandom() throws IOException { @@ -70,13 +70,15 @@ public class CompressedStringTests extends ElasticsearchTestCase { Random r = getRandom(); for (int i = 0; i < 1000; i++) { String string = TestUtil.randomUnicodeString(r, 10000); - CompressedString compressedString = new CompressedString(string); - assertThat(compressedString.string(), equalTo(string)); + // hack to make it detected as YAML + string = "---\n" + string; + CompressedXContent compressedXContent = new CompressedXContent(string); + assertThat(compressedXContent.string(), equalTo(string)); } } public void testDifferentCompressedRepresentation() throws Exception { - byte[] b = "abcdefghijabcdefghij".getBytes("UTF-8"); + byte[] b = "---\nf:abcdefghijabcdefghij".getBytes("UTF-8"); CompressorFactory.defaultCompressor(); Compressor compressor = CompressorFactory.defaultCompressor(); @@ -100,14 +102,14 @@ public class CompressedStringTests extends ElasticsearchTestCase { // of different size are being used assertFalse(b1.equals(b2)); // we used the compressed representation directly and did not recompress - assertArrayEquals(b1.toBytes(), new CompressedString(b1).compressed()); - assertArrayEquals(b2.toBytes(), new CompressedString(b2).compressed()); + assertArrayEquals(b1.toBytes(), new CompressedXContent(b1).compressed()); + assertArrayEquals(b2.toBytes(), new CompressedXContent(b2).compressed()); // but compressedstring instances are still equal - assertEquals(new CompressedString(b1), new CompressedString(b2)); + assertEquals(new CompressedXContent(b1), new CompressedXContent(b2)); } public void testHashCode() throws IOException { - assertFalse(new CompressedString("a").hashCode() == new CompressedString("b").hashCode()); + assertFalse(new CompressedXContent("{\"a\":\"b\"}").hashCode() == new CompressedXContent("{\"a\":\"c\"}").hashCode()); } } diff --git a/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index e904116221f..f3d89e86ca9 100644 --- a/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -82,7 +82,7 @@ public class XContentFactoryTests extends ElasticsearchTestCase { // this if for {"foo" : 5} in python CBOR bytes = new byte[] {(byte) 0xA1, (byte) 0x63, (byte) 0x66, (byte) 0x6f, (byte) 0x6f, (byte) 0x5}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.CBOR)); - assertThat(((Number) XContentHelper.convertToMap(bytes, true).v2().get("foo")).intValue(), equalTo(5)); + assertThat(((Number) XContentHelper.convertToMap(new BytesArray(bytes), true).v2().get("foo")).intValue(), equalTo(5)); // also make sure major type check doesn't collide with SMILE and JSON, just in case assertThat(CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, SmileConstants.HEADER_BYTE_1), equalTo(false)); diff --git a/src/test/java/org/elasticsearch/index/aliases/IndexAliasesServiceTests.java b/src/test/java/org/elasticsearch/index/aliases/IndexAliasesServiceTests.java index 3115edeca26..484e5c92270 100644 --- a/src/test/java/org/elasticsearch/index/aliases/IndexAliasesServiceTests.java +++ b/src/test/java/org/elasticsearch/index/aliases/IndexAliasesServiceTests.java @@ -19,7 +19,8 @@ package org.elasticsearch.index.aliases; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -47,11 +48,11 @@ public class IndexAliasesServiceTests extends ElasticsearchSingleNodeTest { return indexService.aliasesService(); } - public static CompressedString filter(QueryBuilder filterBuilder) throws IOException { + public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - return new CompressedString(builder.string()); + return new CompressedXContent(builder.string()); } @Test diff --git a/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index 144d137f46d..bdd3b716259 100644 --- a/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.mapper.Uid; @@ -63,10 +63,10 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTests { @Before public void before() throws Exception { mapperService.merge( - childType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true + childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true ); mapperService.merge( - grandChildType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true + grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true ); Document d = new Document(); diff --git a/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java index a7f8017efa7..efe22e0eaf8 100644 --- a/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java @@ -82,7 +82,7 @@ public class BinaryMappingTests extends ElasticsearchSingleNodeTest { new BytesArray(binaryValue1).writeTo(compressed); } final byte[] binaryValue2 = out.bytes().toBytes(); - assertTrue(CompressorFactory.isCompressed(binaryValue2)); + assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { ParsedDocument doc = mapper.parse("type", "id", XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes()); @@ -114,7 +114,7 @@ public class BinaryMappingTests extends ElasticsearchSingleNodeTest { new BytesArray(original).writeTo(compressed); } final byte[] binaryValue = out.bytes().toBytes(); - assertTrue(CompressorFactory.isCompressed(binaryValue)); + assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue))); ParsedDocument doc = mapper.parse("type", "id", XContentFactory.jsonBuilder().startObject().field("field", binaryValue).endObject().bytes()); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); diff --git a/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 620847559ee..98d17ba150d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper.merge; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -160,7 +160,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest { public void testConcurrentMergeTest() throws Throwable { final MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("test", new CompressedString("{\"test\":{}}"), true); + mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true); final DocumentMapper documentMapper = mapperService.documentMapper("test"); DocumentFieldMappers dfm = documentMapper.mappers(); @@ -186,7 +186,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest { Mapping update = doc.dynamicMappingsUpdate(); assert update != null; lastIntroducedFieldName.set(fieldName); - mapperService.merge("test", new CompressedString(update.toString()), false); + mapperService.merge("test", new CompressedXContent(update.toString()), false); } } catch (Throwable t) { error.set(t); diff --git a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index e7df72c3dcd..89e88469913 100644 --- a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -432,7 +432,7 @@ public class MultiFieldTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); Arrays.sort(multiFieldNames); - Map sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressed(), true).v2(); + Map sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressedReference(), true).v2(); @SuppressWarnings("unchecked") Map multiFields = (Map) XContentMapValues.extractValue("type.properties.my_field.fields", sourceAsMap); assertThat(multiFields.size(), equalTo(multiFieldNames.length)); diff --git a/src/test/java/org/elasticsearch/index/mapper/source/CompressSourceMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/source/CompressSourceMappingTests.java index 90da64b3fbd..e08562cfb78 100644 --- a/src/test/java/org/elasticsearch/index/mapper/source/CompressSourceMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/source/CompressSourceMappingTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.source; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -51,7 +52,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest { .field("field2", "value2") .endObject().bytes()); BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); - assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(false)); + assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(false)); } @Test @@ -68,7 +69,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().bytes()); BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); - assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(true)); + assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(true)); } @Test @@ -84,7 +85,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().bytes()); BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); - assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(false)); + assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(false)); doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() .field("field1", "value1") @@ -95,6 +96,6 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().bytes()); bytes = doc.rootDoc().getBinaryValue("_source"); - assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(true)); + assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(true)); } } diff --git a/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 0760255c2a8..10f33c9025d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -23,7 +23,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -193,7 +193,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true); DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1(); assertThat(mapper.type(), equalTo("my_type")); @@ -206,12 +206,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true); String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") .startObject("_source").field("enabled", true).endObject() .endObject().endObject().string(); - mapperService.merge("my_type", new CompressedString(mapping), true); + mapperService.merge("my_type", new CompressedXContent(mapping), true); DocumentMapper mapper = mapperService.documentMapper("my_type"); assertThat(mapper.type(), equalTo("my_type")); diff --git a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index e415ea76c07..c5e17a7fe6b 100644 --- a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -27,7 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.joda.Joda; @@ -450,7 +450,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { { MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null); - MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), + MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); @@ -467,7 +467,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { { MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null); - MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), + MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); @@ -484,7 +484,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { { MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false); - MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), + MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); @@ -652,7 +652,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { .endObject() .endObject().endObject().string(); // This was causing a NPE - new MappingMetaData(new CompressedString(mapping)); + new MappingMetaData(new CompressedXContent(mapping)); } @Test diff --git a/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index dca10c636fe..32b75094a8d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -27,7 +27,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -196,7 +197,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean()); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean()); assertFalse(mergeResult.hasConflicts()); } @@ -204,7 +205,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean()); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean()); assertFalse(mergeResult.hasConflicts()); } @@ -213,23 +214,23 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), false); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false); assertFalse(mergeResult.hasConflicts()); - CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); + assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @Test public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); - CompressedString mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterCreation, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); + assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), false); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false); assertFalse(mergeResult.hasConflicts()); - CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); + assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @Test @@ -238,12 +239,12 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { //check if default ttl changed when simulate set to true XContentBuilder mappingWithTtl = getMappingWithTtlEnabled("6d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl); - CompressedString mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDifferentDefault.string()), true).mapping(), true); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - no mappings applied - CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); client().admin().indices().prepareDelete("testindex").get(); @@ -252,7 +253,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled(); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), true); + mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - no mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); @@ -264,7 +265,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), true); + mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - no mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); @@ -275,21 +276,21 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { mappingWithoutTtl = getMappingWithTtlDisabled("6d"); indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), false); + mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); client().admin().indices().prepareDelete("testindex").get(); // check if switching simulate flag off works if nothing was applied in the beginning indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), false); + mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @@ -348,4 +349,4 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() .endObject().endObject(); } -} \ No newline at end of file +} diff --git a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index 1090f6df85a..35ae3dbb8c4 100644 --- a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.mapper.update; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -80,11 +80,11 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); // simulate like in MetaDataMappingService#putMapping - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), false); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false); // assure we have no conflicts assertThat(mergeResult.buildConflicts().length, equalTo(0)); // make sure mappings applied - CompressedString mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); } @@ -102,13 +102,13 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mapping, XContentBuilder mappingUpdate) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); - CompressedString mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), true); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true); // assure we have conflicts assertThat(mergeResult.buildConflicts().length, equalTo(1)); // make sure simulate flag actually worked - no mappings applied - CompressedString mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate)); } @@ -124,9 +124,9 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .endObject() .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); + DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); - documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); + documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); } @@ -146,11 +146,11 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .endObject() .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); + DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); - documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); + documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); @@ -168,10 +168,10 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .endObject() .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); + DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); assertThat(documentMapper.sizeFieldMapper().enabled(), equalTo(enabled)); assertTrue(documentMapper.sizeFieldMapper().fieldType().stored()); - documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); + documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); assertThat(documentMapper.sizeFieldMapper().enabled(), equalTo(enabled)); } @@ -179,9 +179,9 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { public void testSizeTimestampIndexParsing() throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build()); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json"); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(mapping), true); + DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true); assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); - documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); + documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); } diff --git a/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java b/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java index 9431f8a8cf6..5111b36969e 100644 --- a/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java +++ b/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java @@ -23,7 +23,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; @@ -57,7 +57,7 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin MapperService mapperService = indexService.mapperService(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedString(mapping), true); + mapperService.merge("person", new CompressedXContent(mapping), true); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); diff --git a/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java b/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java index 6d1a3dbb344..fde771c5457 100644 --- a/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java +++ b/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java @@ -23,7 +23,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; @@ -58,7 +58,7 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ElasticsearchS MapperService mapperService = indexService.mapperService(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedString(mapping), true); + mapperService.merge("person", new CompressedXContent(mapping), true); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); diff --git a/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java index 992f1777a2c..b42a622fcf1 100644 --- a/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ b/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java @@ -70,7 +70,7 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.lucene.search.Queries; @@ -209,7 +209,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest { MapperService mapperService = indexService.mapperService(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedString(mapping), true); + mapperService.merge("person", new CompressedXContent(mapping), true); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); diff --git a/src/test/java/org/elasticsearch/index/search/child/AbstractChildTests.java b/src/test/java/org/elasticsearch/index/search/child/AbstractChildTests.java index d6aa83c341b..8eabad9735d 100644 --- a/src/test/java/org/elasticsearch/index/search/child/AbstractChildTests.java +++ b/src/test/java/org/elasticsearch/index/search/child/AbstractChildTests.java @@ -31,7 +31,7 @@ import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; @@ -67,8 +67,8 @@ public abstract class AbstractChildTests extends ElasticsearchSingleNodeTest { MapperService mapperService = indexService.mapperService(); // Parent/child parsers require that the parent and child type to be presented in mapping // Sometimes we want a nested object field in the parent type that triggers nonNestedDocsFilter to be used - mapperService.merge(parentType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true); - mapperService.merge(childType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true); + mapperService.merge(parentType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true); + mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true); return createSearchContext(indexService); } diff --git a/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateTests.java b/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateTests.java index 6907736d3cb..131e8ad73df 100644 --- a/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateTests.java +++ b/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.template; import com.google.common.collect.Lists; import com.google.common.collect.Sets; + import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionRequestValidationException; @@ -32,6 +33,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; @@ -42,6 +44,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.junit.Test; +import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Set; @@ -668,4 +671,5 @@ public class SimpleIndexTemplateTests extends ElasticsearchIntegrationTest { assertThat(response.getItems()[0].isFailed(), equalTo(true)); assertThat(response.getItems()[0].getFailureMessage(), containsString("failed to parse filter for alias [alias4]")); } + } diff --git a/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTest.java b/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTest.java index 855f21de852..f07d7790ff4 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTest.java +++ b/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTest.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; @@ -117,7 +117,7 @@ public class NestedAggregatorTest extends ElasticsearchSingleNodeTest { IndexSearcher searcher = new IndexSearcher(directoryReader); IndexService indexService = createIndex("test"); - indexService.mapperService().merge("test", new CompressedString(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true); + indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true); SearchContext searchContext = createSearchContext(indexService); AggregationContext context = new AggregationContext(searchContext); From 18c8394fbd74257f215f4aa6980b21c10d792134 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 29 May 2015 12:30:43 +0200 Subject: [PATCH 065/123] REST spec: Renamed indices.flush.synced to indices.flush_synced --- rest-api-spec/api/indices.flush_synced.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/api/indices.flush_synced.json b/rest-api-spec/api/indices.flush_synced.json index 28cad291e6a..e2d40a9c402 100644 --- a/rest-api-spec/api/indices.flush_synced.json +++ b/rest-api-spec/api/indices.flush_synced.json @@ -1,5 +1,5 @@ { - "indices.flush.synced": { + "indices.flush_synced": { "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-flush.html", "methods": ["POST", "GET"], "url": { From 784a26321bc1b116939adb32ffc1324e85b4ad89 Mon Sep 17 00:00:00 2001 From: Spyros Kapnissis Date: Wed, 27 May 2015 09:43:33 +0300 Subject: [PATCH 066/123] Query DSL: throw an exception if array passed to `term` query. Closes #11246 Closes #11384 --- .../org/elasticsearch/index/query/TermQueryParser.java | 2 ++ .../index/query/SimpleIndexQueryParserTests.java | 7 +++++++ .../org/elasticsearch/index/query/term-array-invalid.json | 5 +++++ 3 files changed, 14 insertions(+) create mode 100644 src/test/java/org/elasticsearch/index/query/term-array-invalid.json diff --git a/src/main/java/org/elasticsearch/index/query/TermQueryParser.java b/src/main/java/org/elasticsearch/index/query/TermQueryParser.java index 5e6babdbaeb..c0755289f06 100644 --- a/src/main/java/org/elasticsearch/index/query/TermQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/TermQueryParser.java @@ -89,6 +89,8 @@ public class TermQueryParser implements QueryParser { fieldName = currentFieldName; value = parser.objectBytes(); } + } else if (token == XContentParser.Token.START_ARRAY) { + throw new QueryParsingException(parseContext, "[term] query does not support array of values"); } } diff --git a/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java index 992f1777a2c..88c3b9d2f90 100644 --- a/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ b/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java @@ -516,6 +516,13 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest { assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); } + @Test(expected = QueryParsingException.class) + public void testTermQueryArrayInvalid() throws IOException { + IndexQueryParserService queryParser = queryParser(); + String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term-array-invalid.json"); + unwrapTermQuery(queryParser.parse(query).query()); + } + private static TermQuery unwrapTermQuery(Query q) { assertThat(q, instanceOf(TermQuery.class)); return (TermQuery) q; diff --git a/src/test/java/org/elasticsearch/index/query/term-array-invalid.json b/src/test/java/org/elasticsearch/index/query/term-array-invalid.json new file mode 100644 index 00000000000..a198bc2dafc --- /dev/null +++ b/src/test/java/org/elasticsearch/index/query/term-array-invalid.json @@ -0,0 +1,5 @@ +{ + "term": { + "age": [34, 35] + } +} From d23449ee85256d3200c5e99c11f999211da87759 Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Fri, 29 May 2015 13:47:43 +0200 Subject: [PATCH 067/123] [test] fix name in synced flush test --- rest-api-spec/test/indices.flush/10_basic.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/test/indices.flush/10_basic.yaml b/rest-api-spec/test/indices.flush/10_basic.yaml index f85458da69e..3c8d34be31a 100644 --- a/rest-api-spec/test/indices.flush/10_basic.yaml +++ b/rest-api-spec/test/indices.flush/10_basic.yaml @@ -8,7 +8,7 @@ cluster.health: wait_for_status: yellow - do: - indices.flush.synced: + indices.flush_synced: index: testing - is_false: _shards.failed From 5a9694783b657e58ee2c3e3c84b9696a0fdd4eeb Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 29 May 2015 13:52:26 +0200 Subject: [PATCH 068/123] Consolidate shard level modules without logic into IndexShardModule We have a lot of module classes that don't contain any actual logic, only declarative bind actions. These classes are unnecessary and can be consolidated into the already existings IndexShardModule --- .../metadata/MetaDataCreateIndexService.java | 4 +- .../org/elasticsearch/index/IndexService.java | 39 +++++++----------- .../bitset/ShardBitsetFilterCacheModule.java | 32 --------------- .../cache/filter/ShardFilterCacheModule.java | 38 ----------------- .../cache/query/ShardQueryCacheModule.java | 32 --------------- .../gateway/IndexShardGatewayModule.java | 35 ---------------- .../index/get/ShardGetModule.java | 32 --------------- .../index/indexing/ShardIndexingModule.java | 34 --------------- .../percolator/PercolatorShardModule.java | 34 --------------- .../index/search/stats/ShardSearchModule.java | 34 --------------- .../index/shard/IndexShardModule.java | 41 ++++++++++++++++++- .../snapshots/IndexShardSnapshotModule.java | 33 --------------- .../index/store/StoreModule.java | 2 - .../termvectors/ShardTermVectorsModule.java | 32 --------------- .../index/shard/IndexShardModuleTests.java | 8 ++-- 15 files changed, 59 insertions(+), 371 deletions(-) delete mode 100644 src/main/java/org/elasticsearch/index/cache/bitset/ShardBitsetFilterCacheModule.java delete mode 100644 src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java delete mode 100644 src/main/java/org/elasticsearch/index/cache/query/ShardQueryCacheModule.java delete mode 100644 src/main/java/org/elasticsearch/index/gateway/IndexShardGatewayModule.java delete mode 100644 src/main/java/org/elasticsearch/index/get/ShardGetModule.java delete mode 100644 src/main/java/org/elasticsearch/index/indexing/ShardIndexingModule.java delete mode 100644 src/main/java/org/elasticsearch/index/percolator/PercolatorShardModule.java delete mode 100644 src/main/java/org/elasticsearch/index/search/stats/ShardSearchModule.java delete mode 100644 src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotModule.java delete mode 100644 src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsModule.java diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 41e310a95ad..b16edc21dd7 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -87,7 +87,6 @@ public class MetaDataCreateIndexService extends AbstractComponent { public final static int MAX_INDEX_NAME_BYTES = 255; private static final DefaultIndexTemplateFilter DEFAULT_INDEX_TEMPLATE_FILTER = new DefaultIndexTemplateFilter(); - private final Environment environment; private final ThreadPool threadPool; private final ClusterService clusterService; private final IndicesService indicesService; @@ -100,12 +99,11 @@ public class MetaDataCreateIndexService extends AbstractComponent { private final NodeEnvironment nodeEnv; @Inject - public MetaDataCreateIndexService(Settings settings, Environment environment, ThreadPool threadPool, ClusterService clusterService, + public MetaDataCreateIndexService(Settings settings, ThreadPool threadPool, ClusterService clusterService, IndicesService indicesService, AllocationService allocationService, MetaDataService metaDataService, Version version, @RiverIndexName String riverIndexName, AliasValidator aliasValidator, Set indexTemplateFilters, NodeEnvironment nodeEnv) { super(settings); - this.environment = environment; this.threadPool = threadPool; this.clusterService = clusterService; this.indicesService = indicesService; diff --git a/src/main/java/org/elasticsearch/index/IndexService.java b/src/main/java/org/elasticsearch/index/IndexService.java index 09335126c73..e6ff7f232f2 100644 --- a/src/main/java/org/elasticsearch/index/IndexService.java +++ b/src/main/java/org/elasticsearch/index/IndexService.java @@ -36,36 +36,37 @@ import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCacheModule; +import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache; import org.elasticsearch.index.cache.filter.ShardFilterCache; -import org.elasticsearch.index.cache.filter.ShardFilterCacheModule; -import org.elasticsearch.index.cache.query.ShardQueryCacheModule; +import org.elasticsearch.index.cache.query.ShardQueryCache; import org.elasticsearch.index.deletionpolicy.DeletionPolicyModule; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.fielddata.ShardFieldDataModule; -import org.elasticsearch.index.gateway.IndexShardGatewayModule; +import org.elasticsearch.index.fielddata.ShardFieldData; +import org.elasticsearch.index.gateway.IndexShardGateway; import org.elasticsearch.index.gateway.IndexShardGatewayService; -import org.elasticsearch.index.get.ShardGetModule; -import org.elasticsearch.index.indexing.ShardIndexingModule; +import org.elasticsearch.index.get.ShardGetService; +import org.elasticsearch.index.indexing.ShardIndexingService; +import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.merge.policy.MergePolicyModule; import org.elasticsearch.index.merge.policy.MergePolicyProvider; import org.elasticsearch.index.merge.scheduler.MergeSchedulerModule; import org.elasticsearch.index.merge.scheduler.MergeSchedulerProvider; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; -import org.elasticsearch.index.percolator.PercolatorShardModule; +import org.elasticsearch.index.percolator.stats.ShardPercolateService; import org.elasticsearch.index.query.IndexQueryParserService; -import org.elasticsearch.index.search.stats.ShardSearchModule; +import org.elasticsearch.index.search.slowlog.ShardSlowLogSearchService; +import org.elasticsearch.index.search.stats.ShardSearchService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsService; import org.elasticsearch.index.shard.*; import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.index.snapshots.IndexShardSnapshotModule; +import org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreModule; -import org.elasticsearch.index.suggest.SuggestShardModule; -import org.elasticsearch.index.termvectors.ShardTermVectorsModule; +import org.elasticsearch.index.suggest.stats.ShardSuggestService; +import org.elasticsearch.index.termvectors.ShardTermVectorsService; import org.elasticsearch.index.translog.TranslogService; import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesService; @@ -307,24 +308,12 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone final ShardFilterCache shardFilterCache = new ShardFilterCache(shardId, injector.getInstance(IndicesFilterCache.class)); ModulesBuilder modules = new ModulesBuilder(); modules.add(new ShardsPluginsModule(indexSettings, pluginsService)); - modules.add(new IndexShardModule(shardId, primary, indexSettings)); - modules.add(new ShardIndexingModule()); - modules.add(new ShardSearchModule()); - modules.add(new ShardGetModule()); + modules.add(new IndexShardModule(shardId, primary, indexSettings, shardFilterCache)); modules.add(new StoreModule(injector.getInstance(IndexStore.class).shardDirectory(), lock, new StoreCloseListener(shardId, canDeleteShardContent, shardFilterCache), path)); modules.add(new DeletionPolicyModule(indexSettings)); modules.add(new MergePolicyModule(indexSettings)); modules.add(new MergeSchedulerModule(indexSettings)); - modules.add(new ShardFilterCacheModule(shardFilterCache)); - modules.add(new ShardQueryCacheModule()); - modules.add(new ShardBitsetFilterCacheModule()); - modules.add(new ShardFieldDataModule()); - modules.add(new IndexShardGatewayModule()); - modules.add(new PercolatorShardModule()); - modules.add(new ShardTermVectorsModule()); - modules.add(new IndexShardSnapshotModule()); - modules.add(new SuggestShardModule()); try { shardInjector = modules.createChildInjector(injector); } catch (CreationException e) { diff --git a/src/main/java/org/elasticsearch/index/cache/bitset/ShardBitsetFilterCacheModule.java b/src/main/java/org/elasticsearch/index/cache/bitset/ShardBitsetFilterCacheModule.java deleted file mode 100644 index c0087119f66..00000000000 --- a/src/main/java/org/elasticsearch/index/cache/bitset/ShardBitsetFilterCacheModule.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.cache.bitset; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - */ -public class ShardBitsetFilterCacheModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardBitsetFilterCache.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java b/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java deleted file mode 100644 index 37bcb805768..00000000000 --- a/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.cache.filter; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - */ -public class ShardFilterCacheModule extends AbstractModule { - - private final ShardFilterCache shardFilterCache; - - public ShardFilterCacheModule(ShardFilterCache shardFilterCache) { - this.shardFilterCache = shardFilterCache; - } - - @Override - protected void configure() { - bind(ShardFilterCache.class).toInstance(shardFilterCache); - } -} diff --git a/src/main/java/org/elasticsearch/index/cache/query/ShardQueryCacheModule.java b/src/main/java/org/elasticsearch/index/cache/query/ShardQueryCacheModule.java deleted file mode 100644 index 938f016a8c3..00000000000 --- a/src/main/java/org/elasticsearch/index/cache/query/ShardQueryCacheModule.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.cache.query; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - */ -public class ShardQueryCacheModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardQueryCache.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/gateway/IndexShardGatewayModule.java b/src/main/java/org/elasticsearch/index/gateway/IndexShardGatewayModule.java deleted file mode 100644 index 11ff2cf717e..00000000000 --- a/src/main/java/org/elasticsearch/index/gateway/IndexShardGatewayModule.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.gateway; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * - */ -public class IndexShardGatewayModule extends AbstractModule { - - - @Override - protected void configure() { - bind(IndexShardGateway.class).asEagerSingleton(); - bind(IndexShardGatewayService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/get/ShardGetModule.java b/src/main/java/org/elasticsearch/index/get/ShardGetModule.java deleted file mode 100644 index bc1df275dc5..00000000000 --- a/src/main/java/org/elasticsearch/index/get/ShardGetModule.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.get; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - */ -public class ShardGetModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardGetService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/indexing/ShardIndexingModule.java b/src/main/java/org/elasticsearch/index/indexing/ShardIndexingModule.java deleted file mode 100644 index 4d5e29daca3..00000000000 --- a/src/main/java/org/elasticsearch/index/indexing/ShardIndexingModule.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.indexing; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService; - -/** - */ -public class ShardIndexingModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardIndexingService.class).asEagerSingleton(); - bind(ShardSlowLogIndexingService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/percolator/PercolatorShardModule.java b/src/main/java/org/elasticsearch/index/percolator/PercolatorShardModule.java deleted file mode 100644 index aba7e10fb2e..00000000000 --- a/src/main/java/org/elasticsearch/index/percolator/PercolatorShardModule.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.percolator; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.index.percolator.stats.ShardPercolateService; - -/** - * - */ -public class PercolatorShardModule extends AbstractModule { - - @Override - protected void configure() { - bind(PercolatorQueriesRegistry.class).asEagerSingleton(); - bind(ShardPercolateService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/search/stats/ShardSearchModule.java b/src/main/java/org/elasticsearch/index/search/stats/ShardSearchModule.java deleted file mode 100644 index 28f8c09c8c2..00000000000 --- a/src/main/java/org/elasticsearch/index/search/stats/ShardSearchModule.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.stats; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.index.search.slowlog.ShardSlowLogSearchService; - -/** - */ -public class ShardSearchModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardSearchService.class).asEagerSingleton(); - bind(ShardSlowLogSearchService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java b/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java index fc44f11eab9..672b63bfb1c 100644 --- a/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java +++ b/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java @@ -22,8 +22,27 @@ package org.elasticsearch.index.shard; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.ShardLock; +import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache; +import org.elasticsearch.index.cache.filter.ShardFilterCache; +import org.elasticsearch.index.cache.query.ShardQueryCache; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.fielddata.ShardFieldData; +import org.elasticsearch.index.gateway.IndexShardGateway; +import org.elasticsearch.index.gateway.IndexShardGatewayService; +import org.elasticsearch.index.get.ShardGetService; +import org.elasticsearch.index.indexing.ShardIndexingService; +import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService; +import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; +import org.elasticsearch.index.percolator.stats.ShardPercolateService; +import org.elasticsearch.index.search.slowlog.ShardSlowLogSearchService; +import org.elasticsearch.index.search.stats.ShardSearchService; +import org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService; +import org.elasticsearch.index.store.DirectoryService; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.suggest.stats.ShardSuggestService; +import org.elasticsearch.index.termvectors.ShardTermVectorsService; import org.elasticsearch.index.translog.TranslogService; import org.elasticsearch.index.warmer.ShardIndexWarmerService; @@ -43,9 +62,11 @@ public class IndexShardModule extends AbstractModule { private final ShardId shardId; private final Settings settings; private final boolean primary; + private final ShardFilterCache shardFilterCache; - public IndexShardModule(ShardId shardId, boolean primary, Settings settings) { + public IndexShardModule(ShardId shardId, boolean primary, Settings settings, ShardFilterCache shardFilterCache) { this.settings = settings; + this.shardFilterCache = shardFilterCache; this.shardId = shardId; this.primary = primary; if (settings.get("index.translog.type") != null) { @@ -69,7 +90,25 @@ public class IndexShardModule extends AbstractModule { } bind(EngineFactory.class).to(settings.getAsClass(ENGINE_FACTORY, DEFAULT_ENGINE_FACTORY_CLASS, ENGINE_PREFIX, ENGINE_SUFFIX)); + bind(ShardIndexWarmerService.class).asEagerSingleton(); + bind(ShardIndexingService.class).asEagerSingleton(); + bind(ShardSlowLogIndexingService.class).asEagerSingleton(); + bind(ShardSearchService.class).asEagerSingleton(); + bind(ShardSlowLogSearchService.class).asEagerSingleton(); + bind(ShardGetService.class).asEagerSingleton(); + bind(ShardFilterCache.class).toInstance(shardFilterCache); + bind(ShardQueryCache.class).asEagerSingleton(); + bind(ShardBitsetFilterCache.class).asEagerSingleton(); + bind(ShardFieldData.class).asEagerSingleton(); + bind(IndexShardGateway.class).asEagerSingleton(); + bind(IndexShardGatewayService.class).asEagerSingleton(); + bind(PercolatorQueriesRegistry.class).asEagerSingleton(); + bind(ShardPercolateService.class).asEagerSingleton(); + bind(ShardTermVectorsService.class).asEagerSingleton(); + bind(IndexShardSnapshotAndRestoreService.class).asEagerSingleton(); + bind(ShardSuggestService.class).asEagerSingleton(); } + } \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotModule.java b/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotModule.java deleted file mode 100644 index c0cf9788400..00000000000 --- a/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotModule.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.snapshots; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * This shard-level module configures {@link IndexShardSnapshotAndRestoreService} - */ -public class IndexShardSnapshotModule extends AbstractModule { - - @Override - protected void configure() { - bind(IndexShardSnapshotAndRestoreService.class).asEagerSingleton(); - } -} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/index/store/StoreModule.java b/src/main/java/org/elasticsearch/index/store/StoreModule.java index fe0e314ba16..fccd2de2e43 100644 --- a/src/main/java/org/elasticsearch/index/store/StoreModule.java +++ b/src/main/java/org/elasticsearch/index/store/StoreModule.java @@ -27,8 +27,6 @@ import org.elasticsearch.index.shard.ShardPath; * */ public class StoreModule extends AbstractModule { - - private final ShardLock lock; private final Store.OnClose closeCallback; private final ShardPath path; diff --git a/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsModule.java b/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsModule.java deleted file mode 100644 index 45a7d14b703..00000000000 --- a/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsModule.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.termvectors; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * - */ -public class ShardTermVectorsModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardTermVectorsService.class).asEagerSingleton(); - } -} diff --git a/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java b/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java index b5ac0cce405..8238c159df2 100644 --- a/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java +++ b/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java @@ -41,10 +41,10 @@ public class IndexShardModuleTests extends ElasticsearchTestCase { .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .build(); - IndexShardModule ism1 = new IndexShardModule(shardId, true, regularSettings); - IndexShardModule ism2 = new IndexShardModule(shardId, false, regularSettings); - IndexShardModule ism3 = new IndexShardModule(shardId, true, shadowSettings); - IndexShardModule ism4 = new IndexShardModule(shardId, false, shadowSettings); + IndexShardModule ism1 = new IndexShardModule(shardId, true, regularSettings, null); + IndexShardModule ism2 = new IndexShardModule(shardId, false, regularSettings, null); + IndexShardModule ism3 = new IndexShardModule(shardId, true, shadowSettings, null); + IndexShardModule ism4 = new IndexShardModule(shardId, false, shadowSettings, null); assertFalse("no shadow replicas for normal settings", ism1.useShadowEngine()); assertFalse("no shadow replicas for normal settings", ism2.useShadowEngine()); From c7ca64cc080ba9426e82e9223f71d0e9177a7868 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 26 May 2015 16:14:54 +0200 Subject: [PATCH 069/123] Fix typed parameters in IndexRequestBuilder and CreateIndexRequestBuilder IndexRequestBuilder#setSource as well as CreateIndexRequestBuilder#setSettings and CreateIndexRequestBuilder#setSouce() will not work with Map argument although the API looks like it should. This PR fixes the problem introducing correct wildcard parameters and adds tests. Closes #10825 --- .../indices/create/CreateIndexRequest.java | 4 +- .../create/CreateIndexRequestBuilder.java | 4 +- .../action/index/IndexRequestBuilder.java | 4 +- .../create/CreateIndexRequestBuilderTest.java | 112 ++++++++++++++++++ .../action/index/IndexRequestBuilderTest.java | 87 ++++++++++++++ .../HeadersAndContextCopyClientTests.java | 52 +++----- .../org/elasticsearch/rest/NoOpClient.java | 54 +++++++++ 7 files changed, 276 insertions(+), 41 deletions(-) create mode 100644 src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTest.java create mode 100644 src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTest.java create mode 100644 src/test/java/org/elasticsearch/rest/NoOpClient.java diff --git a/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 873e6e9e8ca..3a174484ef9 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -377,9 +377,9 @@ public class CreateIndexRequest extends AcknowledgedRequest * Sets the settings and mappings as a single source. */ @SuppressWarnings("unchecked") - public CreateIndexRequest source(Map source) { + public CreateIndexRequest source(Map source) { boolean found = false; - for (Map.Entry entry : source.entrySet()) { + for (Map.Entry entry : source.entrySet()) { String name = entry.getKey(); if (name.equals("settings")) { found = true; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 12648db563a..637c6d7ba08 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -93,7 +93,7 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder source) { + public CreateIndexRequestBuilder setSettings(Map source) { request.settings(source); return this; } @@ -223,7 +223,7 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder source) { + public CreateIndexRequestBuilder setSource(Map source) { request.source(source); return this; } diff --git a/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index cf494358628..5b6674e38a1 100644 --- a/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -90,7 +90,7 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder source) { + public IndexRequestBuilder setSource(Map source) { request.source(source); return this; } @@ -100,7 +100,7 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder source, XContentType contentType) { + public IndexRequestBuilder setSource(Map source, XContentType contentType) { request.source(source, contentType); return this; } diff --git a/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTest.java b/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTest.java new file mode 100644 index 00000000000..31576c38d06 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTest.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.create; + +import org.elasticsearch.action.index.IndexRequestBuilderTest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.rest.NoOpClient; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class CreateIndexRequestBuilderTest extends ElasticsearchTestCase { + + private static final String KEY = "my.settings.key"; + private static final String VALUE = "my.settings.value"; + private NoOpClient testClient; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + this.testClient = new NoOpClient(getTestName()); + } + + @Override + @After + public void tearDown() throws Exception { + this.testClient.close(); + super.tearDown(); + } + + /** + * test setting the source with available setters + */ + @Test + public void testSetSource() throws IOException { + CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient, CreateIndexAction.INSTANCE); + builder.setSource("{\""+KEY+"\" : \""+VALUE+"\"}"); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + XContentBuilder xContent = XContentFactory.jsonBuilder().startObject().field(KEY, VALUE).endObject(); + xContent.close(); + builder.setSource(xContent); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + ByteArrayOutputStream docOut = new ByteArrayOutputStream(); + XContentBuilder doc = XContentFactory.jsonBuilder(docOut).startObject().field(KEY, VALUE).endObject(); + doc.close(); + builder.setSource(docOut.toByteArray()); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + Map settingsMap = new HashMap<>(); + settingsMap.put(KEY, VALUE); + builder.setSettings(settingsMap); + assertEquals(VALUE, builder.request().settings().get(KEY)); + } + + /** + * test setting the settings with available setters + */ + @Test + public void testSetSettings() throws IOException { + CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient, CreateIndexAction.INSTANCE); + builder.setSettings(KEY, VALUE); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + builder.setSettings("{\""+KEY+"\" : \""+VALUE+"\"}"); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + builder.setSettings(Settings.builder().put(KEY, VALUE)); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + builder.setSettings(Settings.builder().put(KEY, VALUE).build()); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + Map settingsMap = new HashMap<>(); + settingsMap.put(KEY, VALUE); + builder.setSettings(settingsMap); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + XContentBuilder xContent = XContentFactory.jsonBuilder().startObject().field(KEY, VALUE).endObject(); + xContent.close(); + builder.setSettings(xContent); + assertEquals(VALUE, builder.request().settings().get(KEY)); + } + +} diff --git a/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTest.java b/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTest.java new file mode 100644 index 00000000000..478e12051d6 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTest.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.index; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.NoOpClient; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.ByteArrayOutputStream; +import java.util.HashMap; +import java.util.Map; + +public class IndexRequestBuilderTest extends ElasticsearchTestCase { + + private static final String EXPECTED_SOURCE = "{\"SomeKey\":\"SomeValue\"}"; + private NoOpClient testClient; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + this.testClient = new NoOpClient(getTestName()); + } + + @Override + @After + public void tearDown() throws Exception { + this.testClient.close(); + super.tearDown(); + } + + /** + * test setting the source for the request with different available setters + */ + @Test + public void testSetSource() throws Exception { + IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(this.testClient, IndexAction.INSTANCE); + Map source = new HashMap<>(); + source.put("SomeKey", "SomeValue"); + indexRequestBuilder.setSource(source); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + indexRequestBuilder.setSource(source, XContentType.JSON); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + indexRequestBuilder.setSource("SomeKey", "SomeValue"); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + // force the Object... setter + indexRequestBuilder.setSource((Object) "SomeKey", "SomeValue"); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + ByteArrayOutputStream docOut = new ByteArrayOutputStream(); + XContentBuilder doc = XContentFactory.jsonBuilder(docOut).startObject().field("SomeKey", "SomeValue").endObject(); + doc.close(); + indexRequestBuilder.setSource(docOut.toByteArray()); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + doc = XContentFactory.jsonBuilder().startObject().field("SomeKey", "SomeValue").endObject(); + doc.close(); + indexRequestBuilder.setSource(doc); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + } +} diff --git a/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java b/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java index 9d87de9a354..6a110cd5da4 100644 --- a/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java +++ b/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java @@ -20,8 +20,9 @@ package org.elasticsearch.rest; import com.google.common.collect.Maps; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.*; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; @@ -31,24 +32,26 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.*; -import org.elasticsearch.client.support.AbstractClient; -import org.elasticsearch.client.support.Headers; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.Requests; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.threadpool.ThreadPool; import org.junit.Test; -import java.util.*; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.is; public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { @@ -108,7 +111,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { SearchRequest searchRequest = Requests.searchRequest(); putHeaders(searchRequest, transportHeaders); @@ -154,7 +157,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, expectedContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, expectedContext), usefulRestHeaders)) { ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest(); putHeaders(clusterHealthRequest, transportHeaders); @@ -200,7 +203,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { CreateIndexRequest createIndexRequest = Requests.createIndexRequest("test"); putHeaders(createIndexRequest, transportHeaders); @@ -246,7 +249,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ client.prepareIndex("index", "type"), @@ -287,7 +290,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ client.admin().cluster().prepareNodesInfo(), @@ -327,7 +330,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ client.admin().indices().prepareValidateQuery(), @@ -420,25 +423,4 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { } } } - - private class NoOpClient extends AbstractClient { - - public NoOpClient() { - super(Settings.EMPTY, new ThreadPool(getTestName()), Headers.EMPTY); - } - - @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { - listener.onResponse(null); - } - - @Override - public void close() { - try { - terminate(threadPool()); - } catch (Throwable t) { - throw new ElasticsearchException(t.getMessage(), t); - } - } - } } diff --git a/src/test/java/org/elasticsearch/rest/NoOpClient.java b/src/test/java/org/elasticsearch/rest/NoOpClient.java new file mode 100644 index 00000000000..245bdb96a33 --- /dev/null +++ b/src/test/java/org/elasticsearch/rest/NoOpClient.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.support.AbstractClient; +import org.elasticsearch.client.support.Headers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.concurrent.TimeUnit; + +public class NoOpClient extends AbstractClient { + + public NoOpClient(String testName) { + super(Settings.EMPTY, new ThreadPool(testName), Headers.EMPTY); + } + + @Override + protected > void doExecute(Action action, Request request, ActionListener listener) { + listener.onResponse(null); + } + + @Override + public void close() { + try { + ThreadPool.terminate(threadPool(), 10, TimeUnit.SECONDS); + } catch (Throwable t) { + throw new ElasticsearchException(t.getMessage(), t); + } + } +} \ No newline at end of file From b6a3952036b1e3297ae56de385aecf7adeab8f37 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 May 2015 17:56:51 +0200 Subject: [PATCH 070/123] Internal: Use DEFLATE instead of LZF for compression. LZF only stays for backward-compatibility reasons and can only read, not write. DEFLATE is configured to use level=3, which is a nice trade-off between speed and compression ratio and is the same as we use for Lucene's high compression codec. --- .../client/transport/TransportClient.java | 11 +- .../common/compress/Compressor.java | 5 - .../common/compress/CompressorFactory.java | 64 ++----- .../common/compress/NotXContentException.java | 7 +- .../compress/deflate/DeflateCompressor.java | 156 ++++++++++++++++++ .../common/compress/lzf/LZFCompressor.java | 22 +-- .../common/xcontent/XContentHelper.java | 2 +- .../publish/PublishClusterStateAction.java | 20 +-- .../index/mapper/DocumentMapper.java | 10 +- .../index/mapper/core/BinaryFieldMapper.java | 12 +- .../java/org/elasticsearch/node/Node.java | 2 - .../blobstore/BlobStoreRepository.java | 68 +++++--- ...ava => AbstractCompressedStreamTests.java} | 48 +++--- ...a => AbstractCompressedXContentTests.java} | 63 +++---- .../deflate/DeflateCompressedStreamTests.java | 30 ++++ .../deflate/DeflateXContentTests.java | 30 ++++ .../compress/lzf}/CompressedStreamOutput.java | 2 +- .../lzf/LZFCompressedStreamOutput.java | 2 +- .../lzf/LZFCompressedStreamTests.java | 30 ++++ .../compress/lzf/LZFTestCompressor.java | 34 ++++ .../common/compress/lzf/LZFXContentTests.java | 30 ++++ .../compress/SearchSourceCompressTests.java | 16 +- 22 files changed, 480 insertions(+), 184 deletions(-) create mode 100644 src/main/java/org/elasticsearch/common/compress/deflate/DeflateCompressor.java rename src/test/java/org/elasticsearch/common/compress/{CompressedStreamTests.java => AbstractCompressedStreamTests.java} (97%) rename src/test/java/org/elasticsearch/common/compress/{CompressedXContentTests.java => AbstractCompressedXContentTests.java} (62%) create mode 100644 src/test/java/org/elasticsearch/common/compress/deflate/DeflateCompressedStreamTests.java create mode 100644 src/test/java/org/elasticsearch/common/compress/deflate/DeflateXContentTests.java rename src/{main/java/org/elasticsearch/common/compress => test/java/org/elasticsearch/common/compress/lzf}/CompressedStreamOutput.java (98%) rename src/{main => test}/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamOutput.java (97%) create mode 100644 src/test/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamTests.java create mode 100644 src/test/java/org/elasticsearch/common/compress/lzf/LZFTestCompressor.java create mode 100644 src/test/java/org/elasticsearch/common/compress/lzf/LZFXContentTests.java diff --git a/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 9e3cb1f0f80..d63e94d2ffe 100644 --- a/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -20,8 +20,14 @@ package org.elasticsearch.client.transport; import com.google.common.collect.ImmutableList; + import org.elasticsearch.Version; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.client.support.Headers; @@ -30,7 +36,6 @@ import org.elasticsearch.cluster.ClusterNameModule; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.network.NetworkModule; @@ -122,8 +127,6 @@ public class TransportClient extends AbstractClient { Version version = Version.CURRENT; - CompressorFactory.configure(this.settings); - final ThreadPool threadPool = new ThreadPool(settings); boolean success = false; diff --git a/src/main/java/org/elasticsearch/common/compress/Compressor.java b/src/main/java/org/elasticsearch/common/compress/Compressor.java index d8f0ae82bf6..252fad09807 100644 --- a/src/main/java/org/elasticsearch/common/compress/Compressor.java +++ b/src/main/java/org/elasticsearch/common/compress/Compressor.java @@ -23,7 +23,6 @@ import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; @@ -32,10 +31,6 @@ import java.io.IOException; */ public interface Compressor { - String type(); - - void configure(Settings settings); - boolean isCompressed(BytesReference bytes); boolean isCompressed(ChannelBuffer buffer); diff --git a/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java b/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java index 9873123e558..72c57a97a01 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java @@ -19,70 +19,36 @@ package org.elasticsearch.common.compress; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; - import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.compress.deflate.DeflateCompressor; import org.elasticsearch.common.compress.lzf.LZFCompressor; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; -import java.util.List; -import java.util.Locale; /** */ public class CompressorFactory { - private static final LZFCompressor LZF = new LZFCompressor(); - private static final Compressor[] compressors; - private static final ImmutableMap compressorsByType; - private static Compressor defaultCompressor; + private static volatile Compressor defaultCompressor; static { - List compressorsX = Lists.newArrayList(); - compressorsX.add(LZF); - - compressors = compressorsX.toArray(new Compressor[compressorsX.size()]); - MapBuilder compressorsByTypeX = MapBuilder.newMapBuilder(); - for (Compressor compressor : compressors) { - compressorsByTypeX.put(compressor.type(), compressor); - } - compressorsByType = compressorsByTypeX.immutableMap(); - - defaultCompressor = LZF; + compressors = new Compressor[] { + new LZFCompressor(), + new DeflateCompressor() + }; + defaultCompressor = new DeflateCompressor(); } - public static synchronized void configure(Settings settings) { - for (Compressor compressor : compressors) { - compressor.configure(settings); - } - String defaultType = settings.get("compress.default.type", "lzf").toLowerCase(Locale.ENGLISH); - boolean found = false; - for (Compressor compressor : compressors) { - if (defaultType.equalsIgnoreCase(compressor.type())) { - defaultCompressor = compressor; - found = true; - break; - } - } - if (!found) { - Loggers.getLogger(CompressorFactory.class).warn("failed to find default type [{}]", defaultType); - } - } - - public static synchronized void setDefaultCompressor(Compressor defaultCompressor) { + public static void setDefaultCompressor(Compressor defaultCompressor) { CompressorFactory.defaultCompressor = defaultCompressor; } @@ -94,6 +60,10 @@ public class CompressorFactory { return compressor(bytes) != null; } + /** + * @deprecated we don't compress lucene indexes anymore and rely on lucene codecs + */ + @Deprecated public static boolean isCompressed(IndexInput in) throws IOException { return compressor(in) != null; } @@ -127,6 +97,10 @@ public class CompressorFactory { throw new NotCompressedException(); } + /** + * @deprecated we don't compress lucene indexes anymore and rely on lucene codecs + */ + @Deprecated @Nullable public static Compressor compressor(IndexInput in) throws IOException { for (Compressor compressor : compressors) { @@ -137,10 +111,6 @@ public class CompressorFactory { return null; } - public static Compressor compressor(String type) { - return compressorsByType.get(type); - } - /** * Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}. */ @@ -160,7 +130,7 @@ public class CompressorFactory { public static BytesReference uncompress(BytesReference bytes) throws IOException { Compressor compressor = compressor(bytes); if (compressor == null) { - throw new IllegalArgumentException("Bytes are not compressed"); + throw new NotCompressedException(); } return uncompress(bytes, compressor); } diff --git a/src/main/java/org/elasticsearch/common/compress/NotXContentException.java b/src/main/java/org/elasticsearch/common/compress/NotXContentException.java index bca35c317d8..68bbf4da81c 100644 --- a/src/main/java/org/elasticsearch/common/compress/NotXContentException.java +++ b/src/main/java/org/elasticsearch/common/compress/NotXContentException.java @@ -19,9 +19,10 @@ package org.elasticsearch.common.compress; -/** Exception indicating that we were expecting something compressed, which - * was not compressed or corrupted so that the compression format could not - * be detected. */ +import org.elasticsearch.common.xcontent.XContent; + +/** Exception indicating that we were expecting some {@link XContent} but could + * not detect its type. */ public class NotXContentException extends RuntimeException { public NotXContentException(String message) { diff --git a/src/main/java/org/elasticsearch/common/compress/deflate/DeflateCompressor.java b/src/main/java/org/elasticsearch/common/compress/deflate/DeflateCompressor.java new file mode 100644 index 00000000000..b2aea1fa0ce --- /dev/null +++ b/src/main/java/org/elasticsearch/common/compress/deflate/DeflateCompressor.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.compress.deflate; + +import org.apache.lucene.store.IndexInput; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedIndexInput; +import org.elasticsearch.common.compress.Compressor; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.jboss.netty.buffer.ChannelBuffer; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.zip.Deflater; +import java.util.zip.DeflaterOutputStream; +import java.util.zip.Inflater; +import java.util.zip.InflaterInputStream; + +/** + * {@link Compressor} implementation based on the DEFLATE compression algorithm. + */ +public class DeflateCompressor implements Compressor { + + // An arbitrary header that we use to identify compressed streams + // It needs to be different from other compressors and to not be specific + // enough so that no stream starting with these bytes could be detected as + // a XContent + private static final byte[] HEADER = new byte[] { 'D', 'F', 'L', '\0' }; + // 3 is a good trade-off between speed and compression ratio + private static final int LEVEL = 3; + // We use buffering on the input and ouput of in/def-laters in order to + // limit the number of JNI calls + private static final int BUFFER_SIZE = 4096; + + @Override + public boolean isCompressed(BytesReference bytes) { + if (bytes.length() < HEADER.length) { + return false; + } + for (int i = 0; i < HEADER.length; ++i) { + if (bytes.get(i) != HEADER[i]) { + return false; + } + } + return true; + } + + @Override + public boolean isCompressed(ChannelBuffer buffer) { + if (buffer.readableBytes() < HEADER.length) { + return false; + } + final int offset = buffer.readerIndex(); + for (int i = 0; i < HEADER.length; ++i) { + if (buffer.getByte(offset + i) != HEADER[i]) { + return false; + } + } + return true; + } + + @Override + public StreamInput streamInput(StreamInput in) throws IOException { + final byte[] headerBytes = new byte[HEADER.length]; + int len = 0; + while (len < headerBytes.length) { + final int read = in.read(headerBytes, len, headerBytes.length - len); + if (read == -1) { + break; + } + len += read; + } + if (len != HEADER.length || Arrays.equals(headerBytes, HEADER) == false) { + throw new IllegalArgumentException("Input stream is not compressed with DEFLATE!"); + } + + final boolean nowrap = true; + final Inflater inflater = new Inflater(nowrap); + InputStream decompressedIn = new InflaterInputStream(in, inflater, BUFFER_SIZE); + decompressedIn = new BufferedInputStream(decompressedIn, BUFFER_SIZE); + return new InputStreamStreamInput(decompressedIn) { + private boolean closed = false; + + public void close() throws IOException { + try { + super.close(); + } finally { + if (closed == false) { + // important to release native memory + inflater.end(); + closed = true; + } + } + } + }; + } + + @Override + public StreamOutput streamOutput(StreamOutput out) throws IOException { + out.writeBytes(HEADER); + final boolean nowrap = true; + final Deflater deflater = new Deflater(LEVEL, nowrap); + final boolean syncFlush = true; + OutputStream compressedOut = new DeflaterOutputStream(out, deflater, BUFFER_SIZE, syncFlush); + compressedOut = new BufferedOutputStream(compressedOut, BUFFER_SIZE); + return new OutputStreamStreamOutput(compressedOut) { + private boolean closed = false; + + public void close() throws IOException { + try { + super.close(); + } finally { + if (closed == false) { + // important to release native memory + deflater.end(); + closed = true; + } + } + } + }; + } + + @Override + public boolean isCompressed(IndexInput in) throws IOException { + return false; + } + + @Override + public CompressedIndexInput indexInput(IndexInput in) throws IOException { + throw new UnsupportedOperationException(); + } +} diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java index c5c937ccbb9..3646595f724 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java +++ b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java @@ -25,25 +25,23 @@ import com.ning.compress.lzf.util.ChunkDecoderFactory; import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedIndexInput; -import org.elasticsearch.common.compress.CompressedStreamInput; -import org.elasticsearch.common.compress.CompressedStreamOutput; import org.elasticsearch.common.compress.Compressor; +import org.elasticsearch.common.compress.deflate.DeflateCompressor; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; /** + * @deprecated Use {@link DeflateCompressor} instead */ +@Deprecated public class LZFCompressor implements Compressor { static final byte[] LUCENE_HEADER = {'L', 'Z', 'F', 0}; - public static final String TYPE = "lzf"; - private ChunkDecoder decoder; public LZFCompressor() { @@ -52,14 +50,6 @@ public class LZFCompressor implements Compressor { this.decoder.getClass().getSimpleName()); } - @Override - public String type() { - return TYPE; - } - - @Override - public void configure(Settings settings) {} - @Override public boolean isCompressed(BytesReference bytes) { return bytes.length() >= 3 && @@ -95,13 +85,13 @@ public class LZFCompressor implements Compressor { } @Override - public CompressedStreamInput streamInput(StreamInput in) throws IOException { + public StreamInput streamInput(StreamInput in) throws IOException { return new LZFCompressedStreamInput(in, decoder); } @Override - public CompressedStreamOutput streamOutput(StreamOutput out) throws IOException { - return new LZFCompressedStreamOutput(out); + public StreamOutput streamOutput(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("LZF is only here for back compat, no write support"); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 5325950e202..4efd18e8fa9 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -417,10 +417,10 @@ public class XContentHelper { Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); - XContentType contentType = XContentFactory.xContentType(compressedStreamInput); if (compressedStreamInput.markSupported() == false) { compressedStreamInput = new BufferedInputStream(compressedStreamInput); } + XContentType contentType = XContentFactory.xContentType(compressedStreamInput); if (contentType == builder.contentType()) { builder.rawField(field, compressedStreamInput); } else { diff --git a/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java index 92d5bad4bf6..7fd585a6a41 100644 --- a/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java +++ b/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java @@ -227,21 +227,21 @@ public class PublishClusterStateAction extends AbstractComponent { public static BytesReference serializeFullClusterState(ClusterState clusterState, Version nodeVersion) throws IOException { BytesStreamOutput bStream = new BytesStreamOutput(); - StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream); - stream.setVersion(nodeVersion); - stream.writeBoolean(true); - clusterState.writeTo(stream); - stream.close(); + try (StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream)) { + stream.setVersion(nodeVersion); + stream.writeBoolean(true); + clusterState.writeTo(stream); + } return bStream.bytes(); } public static BytesReference serializeDiffClusterState(Diff diff, Version nodeVersion) throws IOException { BytesStreamOutput bStream = new BytesStreamOutput(); - StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream); - stream.setVersion(nodeVersion); - stream.writeBoolean(false); - diff.writeTo(stream); - stream.close(); + try (StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream)) { + stream.setVersion(nodeVersion); + stream.writeBoolean(false); + diff.writeTo(stream); + } return bStream.bytes(); } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 2c4bd053251..7f8bb8ffa0a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -468,11 +468,11 @@ public class DocumentMapper implements ToXContent { private void refreshSource() throws ElasticsearchGenerationException { try { BytesStreamOutput bStream = new BytesStreamOutput(); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream)); - builder.startObject(); - toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); - builder.close(); + try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream))) { + builder.startObject(); + toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + } mappingSource = new CompressedXContent(bStream.bytes()); } catch (Exception e) { throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 91375efed47..cda0877fdae 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -37,8 +37,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.NotXContentException; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; @@ -150,10 +148,12 @@ public class BinaryFieldMapper extends AbstractFieldMapper { try { return CompressorFactory.uncompressIfNeeded(bytes); } catch (NotXContentException e) { - // This is a BUG! We try to decompress by detecting a header in - // the stored bytes but since we accept arbitrary bytes, we have - // no guarantee that uncompressed bytes will be detected as - // compressed! + // NOTE: previous versions of Elasticsearch used to try to detect if + // data was compressed. However this could cause decompression failures + // as a user may have submitted arbitrary data which looks like it is + // compressed to elasticsearch but is not. So we removed the ability to + // compress binary fields and keep this empty catch block for backward + // compatibility with 1.x } } return bytes; diff --git a/src/main/java/org/elasticsearch/node/Node.java b/src/main/java/org/elasticsearch/node/Node.java index 820c3a84534..355bea50643 100644 --- a/src/main/java/org/elasticsearch/node/Node.java +++ b/src/main/java/org/elasticsearch/node/Node.java @@ -36,7 +36,6 @@ import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.lease.Releasable; @@ -151,7 +150,6 @@ public class Node implements Releasable { // create the environment based on the finalized (processed) view of the settings this.environment = new Environment(this.settings()); - CompressorFactory.configure(settings); final NodeEnvironment nodeEnvironment; try { nodeEnvironment = new NodeEnvironment(this.settings, this.environment); diff --git a/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 83baf8d1e2d..2cf35a9905d 100644 --- a/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -26,12 +26,13 @@ import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; import org.apache.lucene.store.RateLimiter; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.SnapshotId; -import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -39,13 +40,19 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.compress.NotXContentException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.shard.IndexShardException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -55,14 +62,21 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.RepositoryVerificationException; -import org.elasticsearch.snapshots.*; +import org.elasticsearch.snapshots.InvalidSnapshotNameException; +import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotCreationException; +import org.elasticsearch.snapshots.SnapshotException; +import org.elasticsearch.snapshots.SnapshotMissingException; +import org.elasticsearch.snapshots.SnapshotShardFailure; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.NoSuchFileException; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; import static com.google.common.collect.Lists.newArrayList; @@ -230,19 +244,15 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent Date: Fri, 29 May 2015 17:07:04 +0200 Subject: [PATCH 071/123] Mappings: Refactor core index/query time properties into FieldType Mappers are currently used at both index and query time for deciding how to "use" a field. For #8871, we need the index wide view of mappings to have a unified set of settings for each field of a given name within the index. This change moves all the current settings (and methods defining query time behavior) into subclasses of FieldType. In a future PR, this will allow storing the field type at the index level, instead of mappers (which can still have settings that differ per document type). The change is quite large (I'm sorry). I could not see a way to migrate to this in a more piecemeal way. I did leave out cutting over callers of the query methods to using the field type, as that can be done in a follow up. --- .../classic/MapperQueryParser.java | 8 +- .../analyze/TransportAnalyzeAction.java | 4 +- .../TransportGetFieldMappingsIndexAction.java | 16 +- .../cluster/metadata/MappingMetaData.java | 2 +- .../common/geo/builders/ShapeBuilder.java | 2 +- .../index/fielddata/FieldDataType.java | 2 +- .../index/fielddata/IndexFieldData.java | 3 +- .../index/fielddata/IndexFieldDataCache.java | 5 +- .../fielddata/IndexFieldDataService.java | 9 +- .../index/fielddata/ShardFieldData.java | 6 +- .../GlobalOrdinalsIndexFieldData.java | 7 +- .../InternalGlobalOrdinalsIndexFieldData.java | 3 +- .../plain/AbstractIndexFieldData.java | 7 +- .../plain/AbstractIndexGeoPointFieldData.java | 2 +- .../plain/AbstractIndexOrdinalsFieldData.java | 2 +- .../plain/BinaryDVIndexFieldData.java | 2 +- .../plain/BinaryDVNumericIndexFieldData.java | 2 +- .../plain/BytesBinaryDVIndexFieldData.java | 6 +- .../plain/DisabledIndexFieldData.java | 4 +- .../plain/DocValuesIndexFieldData.java | 17 +- .../plain/DoubleArrayIndexFieldData.java | 5 +- .../plain/FSTBytesIndexFieldData.java | 5 +- .../plain/FloatArrayIndexFieldData.java | 5 +- .../plain/GeoPointBinaryDVIndexFieldData.java | 7 +- .../GeoPointCompressedIndexFieldData.java | 7 +- .../GeoPointDoubleArrayIndexFieldData.java | 5 +- .../fielddata/plain/IndexIndexFieldData.java | 5 +- .../plain/NumericDVIndexFieldData.java | 2 +- .../plain/PackedArrayIndexFieldData.java | 5 +- .../plain/PagedBytesIndexFieldData.java | 5 +- .../plain/ParentChildIndexFieldData.java | 9 +- .../plain/SortedNumericDVIndexFieldData.java | 2 +- .../SortedSetDVOrdinalsIndexFieldData.java | 2 +- .../fieldvisitor/SingleFieldsVisitor.java | 2 +- .../index/mapper/DocumentFieldMappers.java | 6 +- .../index/mapper/FieldMapper.java | 152 +----- .../index/mapper/FieldMappersLookup.java | 22 +- .../index/mapper/MappedFieldType.java | 368 +++++++++++++ .../index/mapper/MapperService.java | 8 +- .../mapper/core/AbstractFieldMapper.java | 373 +++++-------- .../index/mapper/core/BinaryFieldMapper.java | 126 +++-- .../index/mapper/core/BooleanFieldMapper.java | 152 +++--- .../index/mapper/core/ByteFieldMapper.java | 174 ++++--- .../mapper/core/CompletionFieldMapper.java | 79 +-- .../index/mapper/core/DateFieldMapper.java | 491 +++++++++--------- .../index/mapper/core/DoubleFieldMapper.java | 189 +++---- .../index/mapper/core/FloatFieldMapper.java | 189 +++---- .../index/mapper/core/IntegerFieldMapper.java | 167 +++--- .../index/mapper/core/LongFieldMapper.java | 168 +++--- .../index/mapper/core/Murmur3FieldMapper.java | 33 +- .../index/mapper/core/NumberFieldMapper.java | 167 +++--- .../index/mapper/core/ShortFieldMapper.java | 174 ++++--- .../index/mapper/core/StringFieldMapper.java | 120 ++--- .../mapper/core/TokenCountFieldMapper.java | 43 +- .../index/mapper/core/TypeParsers.java | 2 +- .../index/mapper/geo/GeoPointFieldMapper.java | 381 ++++++++------ .../index/mapper/geo/GeoShapeFieldMapper.java | 186 ++++--- .../index/mapper/internal/AllFieldMapper.java | 84 +-- .../internal/FieldNamesFieldMapper.java | 67 ++- .../index/mapper/internal/IdFieldMapper.java | 197 +++---- .../mapper/internal/IndexFieldMapper.java | 62 ++- .../mapper/internal/ParentFieldMapper.java | 223 ++++---- .../mapper/internal/RoutingFieldMapper.java | 55 +- .../mapper/internal/SizeFieldMapper.java | 31 +- .../mapper/internal/SourceFieldMapper.java | 71 ++- .../index/mapper/internal/TTLFieldMapper.java | 75 ++- .../mapper/internal/TimestampFieldMapper.java | 122 +++-- .../mapper/internal/TypeFieldMapper.java | 86 +-- .../index/mapper/internal/UidFieldMapper.java | 63 ++- .../mapper/internal/VersionFieldMapper.java | 49 +- .../index/mapper/ip/IpFieldMapper.java | 182 ++++--- .../index/query/CommonTermsQueryParser.java | 4 +- .../index/query/ExistsQueryParser.java | 2 +- .../query/FieldMaskingSpanQueryParser.java | 2 +- .../index/query/GeoShapeQueryParser.java | 4 +- .../index/query/GeohashCellQuery.java | 7 +- .../index/query/MissingQueryParser.java | 2 +- .../index/query/MoreLikeThisQueryParser.java | 2 +- .../index/query/QueryParseContext.java | 8 +- .../index/query/RangeQueryParser.java | 2 +- .../index/query/SimpleQueryStringParser.java | 2 +- .../index/query/SpanTermQueryParser.java | 2 +- .../index/query/TermsQueryParser.java | 2 +- .../index/query/WildcardQueryParser.java | 2 +- .../functionscore/DecayFunctionParser.java | 2 +- .../index/search/MatchQuery.java | 2 +- .../index/search/MultiMatchQuery.java | 2 +- .../geo/IndexedGeoBoundingBoxQuery.java | 12 +- .../index/similarity/SimilarityService.java | 2 +- .../termvectors/ShardTermVectorsService.java | 2 +- .../cache/IndicesFieldDataCache.java | 7 +- .../cache/IndicesFieldDataCacheListener.java | 6 +- .../SingleDocumentPercolatorIndex.java | 4 +- .../elasticsearch/search/SearchService.java | 26 +- .../bucket/children/ChildrenParser.java | 2 +- .../support/AggregationContext.java | 2 +- .../support/format/ValueFormat.java | 2 +- .../support/format/ValueFormatter.java | 2 +- .../support/format/ValueParser.java | 2 +- .../search/fetch/FetchPhase.java | 2 +- .../highlight/FastVectorHighlighter.java | 6 +- .../search/highlight/HighlightUtils.java | 6 +- .../search/highlight/PlainHighlighter.java | 6 +- .../search/highlight/PostingsHighlighter.java | 2 +- .../FragmentBuilderHelper.java | 4 +- .../SourceScoreOrderFragmentsBuilder.java | 4 +- .../SourceSimpleFragmentsBuilder.java | 4 +- .../search/lookup/FieldLookup.java | 4 +- .../search/lookup/LeafFieldsLookup.java | 4 +- .../search/sort/SortParseElement.java | 2 +- .../AnalyzingCompletionLookupProvider.java | 28 +- .../suggest/phrase/PhraseSuggestParser.java | 4 +- .../index/analysis/PreBuiltAnalyzerTests.java | 4 +- .../NoOrdinalsStringFieldDataTests.java | 2 +- .../index/mapper/FieldMappersLookupTests.java | 11 +- .../mapper/all/SimpleAllMapperTests.java | 4 +- .../mapper/core/Murmur3FieldMapperTests.java | 2 +- .../mapper/externalvalues/ExternalMapper.java | 24 +- .../mapper/geo/GeoShapeFieldMapperTests.java | 34 +- .../geo/GeohashMappingGeoPointTests.java | 8 +- .../internal/FieldNamesFieldMapperTests.java | 2 +- .../mapper/merge/TestMergeMapperTests.java | 8 +- .../mapper/multifield/MultiFieldTests.java | 20 +- .../MultiFieldsIntegrationTests.java | 1 + .../mapper/simple/SimpleMapperTests.java | 14 +- .../string/SimpleStringMappingTests.java | 17 +- .../timestamp/TimestampMappingTests.java | 28 +- .../mapper/update/UpdateMappingTests.java | 4 +- .../index/similarity/SimilarityTests.java | 24 +- .../warmer/SimpleIndicesWarmerTests.java | 2 +- .../search/child/ParentFieldLoadingTest.java | 18 +- .../child/SimpleChildQuerySearchTests.java | 2 +- .../search/geo/GeoShapeIntegrationTests.java | 4 +- .../AnalyzingCompletionLookupProviderV1.java | 28 +- .../CompletionPostingsFormatTest.java | 35 +- .../test/ElasticsearchIntegrationTest.java | 5 +- 136 files changed, 3247 insertions(+), 2558 deletions(-) create mode 100644 src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java diff --git a/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 84fcc62af81..593c8aa80cd 100644 --- a/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -260,7 +260,7 @@ public class MapperQueryParser extends QueryParser { } } if (query == null) { - query = super.getFieldQuery(currentMapper.names().indexName(), queryText, quoted); + query = super.getFieldQuery(currentMapper.fieldType().names().indexName(), queryText, quoted); } return query; } @@ -372,7 +372,7 @@ public class MapperQueryParser extends QueryParser { Query rangeQuery; if (currentMapper instanceof DateFieldMapper && settings.timeZone() != null) { DateFieldMapper dateFieldMapper = (DateFieldMapper) this.currentMapper; - rangeQuery = dateFieldMapper.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext); + rangeQuery = dateFieldMapper.fieldType().rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext); } else { rangeQuery = currentMapper.rangeQuery(part1, part2, startInclusive, endInclusive, parseContext); } @@ -508,7 +508,7 @@ public class MapperQueryParser extends QueryParser { query = currentMapper.prefixQuery(termStr, multiTermRewriteMethod, parseContext); } if (query == null) { - query = getPossiblyAnalyzedPrefixQuery(currentMapper.names().indexName(), termStr); + query = getPossiblyAnalyzedPrefixQuery(currentMapper.fieldType().names().indexName(), termStr); } return query; } @@ -644,7 +644,7 @@ public class MapperQueryParser extends QueryParser { if (!forcedAnalyzer) { setAnalyzer(parseContext.getSearchAnalyzer(currentMapper)); } - indexedNameField = currentMapper.names().indexName(); + indexedNameField = currentMapper.fieldType().names().indexName(); return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); } return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index bf18ee1ab8a..0b44c9484ce 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -113,8 +113,8 @@ public class TransportAnalyzeAction extends TransportSingleCustomOperationAction if (fieldMapper.isNumeric()) { throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields"); } - analyzer = fieldMapper.indexAnalyzer(); - field = fieldMapper.names().indexName(); + analyzer = fieldMapper.fieldType().indexAnalyzer(); + field = fieldMapper.fieldType().names().indexName(); } } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 5ded196f0f3..e9ef1538df2 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -179,7 +179,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { for (FieldMapper fieldMapper : allFieldMappers) { - addFieldMapper(fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); + addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { // go through the field mappers 3 times, to make sure we give preference to the resolve order: full name, index name, name. @@ -187,22 +187,22 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO Collection remainingFieldMappers = Lists.newLinkedList(allFieldMappers); for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.names().fullName())) { - addFieldMapper(fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().names().fullName())) { + addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.names().indexName())) { - addFieldMapper(fieldMapper.names().indexName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().names().indexName())) { + addFieldMapper(fieldMapper.fieldType().names().indexName(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.names().shortName())) { - addFieldMapper(fieldMapper.names().shortName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().names().shortName())) { + addFieldMapper(fieldMapper.fieldType().names().shortName(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } @@ -229,7 +229,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO builder.startObject(); fieldMapper.toXContent(builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS); builder.endObject(); - fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.names().fullName(), builder.bytes())); + fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().names().fullName(), builder.bytes())); } catch (IOException e) { throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); } diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index e6067c46817..4c376465b66 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -289,7 +289,7 @@ public class MappingMetaData extends AbstractDiffable { this.id = new Id(docMapper.idFieldMapper().path()); this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path()); this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(), - docMapper.timestampFieldMapper().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(), + docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(), docMapper.timestampFieldMapper().ignoreMissing()); this.hasParentField = docMapper.parentFieldMapper().active(); } diff --git a/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index 07ceaf1b762..3e733dbd619 100644 --- a/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -728,7 +728,7 @@ public abstract class ShapeBuilder implements ToXContent { Distance radius = null; CoordinateNode node = null; GeometryCollectionBuilder geometryCollections = null; - Orientation requestedOrientation = (shapeMapper == null) ? Orientation.RIGHT : shapeMapper.orientation(); + Orientation requestedOrientation = (shapeMapper == null) ? Orientation.RIGHT : shapeMapper.fieldType().orientation(); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { diff --git a/src/main/java/org/elasticsearch/index/fielddata/FieldDataType.java b/src/main/java/org/elasticsearch/index/fielddata/FieldDataType.java index f42ba96b762..371b802dc0c 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/FieldDataType.java +++ b/src/main/java/org/elasticsearch/index/fielddata/FieldDataType.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.fielddata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; /** */ diff --git a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index daca8e1bfbc..3070c1e56ad 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexComponent; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -77,7 +78,7 @@ public interface IndexFieldData extends IndexCompone /** * The field name. */ - FieldMapper.Names getFieldNames(); + MappedFieldType.Names getFieldNames(); /** * The field data type. diff --git a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java index a2b73221d91..76d9c24da29 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java +++ b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.Accountable; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; /** * A simple field data cache abstraction on the *index* level. @@ -47,9 +48,9 @@ public interface IndexFieldDataCache { interface Listener { - void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage); + void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage); - void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes); + void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes); } class None implements IndexFieldDataCache { diff --git a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 847fa59df48..c3fb6309907 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.plain.*; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; @@ -46,6 +47,8 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; +import static org.elasticsearch.index.mapper.MappedFieldType.Names; + /** */ public class IndexFieldDataService extends AbstractIndexComponent { @@ -226,12 +229,12 @@ public class IndexFieldDataService extends AbstractIndexComponent { @SuppressWarnings("unchecked") public > IFD getForField(FieldMapper mapper) { - final FieldMapper.Names fieldNames = mapper.names(); - final FieldDataType type = mapper.fieldDataType(); + final Names fieldNames = mapper.fieldType().names(); + final FieldDataType type = mapper.fieldType().fieldDataType(); if (type == null) { throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]"); } - final boolean docValues = mapper.hasDocValues(); + final boolean docValues = mapper.fieldType().hasDocValues(); final String key = fieldNames.indexName(); IndexFieldData fieldData = loadedFieldData.get(key); if (fieldData == null) { diff --git a/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java index 70d8bb18534..f5edf6e21de 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.ShardId; @@ -62,7 +62,7 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index } @Override - public void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { totalMetric.inc(ramUsage.ramBytesUsed()); String keyFieldName = fieldNames.indexName(); CounterMetric total = perFieldTotals.get(keyFieldName); @@ -79,7 +79,7 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index } @Override - public void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (wasEvicted) { evictionsMetric.inc(); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java index bf06cb3433d..3b4db994a8e 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.MultiValueMode; import java.util.Collection; @@ -41,11 +42,11 @@ import java.util.Collections; */ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexOrdinalsFieldData, Accountable { - private final FieldMapper.Names fieldNames; + private final MappedFieldType.Names fieldNames; private final FieldDataType fieldDataType; private final long memorySizeInBytes; - protected GlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) { + protected GlobalOrdinalsIndexFieldData(Index index, Settings settings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) { super(index, settings); this.fieldNames = fieldNames; this.fieldDataType = fieldDataType; @@ -68,7 +69,7 @@ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponen } @Override - public FieldMapper.Names getFieldNames() { + public MappedFieldType.Names getFieldNames() { return fieldNames; } diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java index 69a39465df6..b91d98f4a62 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import java.util.Collection; @@ -38,7 +39,7 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel private final Atomic[] atomicReaders; - InternalGlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) { + InternalGlobalOrdinalsIndexFieldData(Index index, Settings settings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) { super(index, settings, fieldNames, fieldDataType, memorySizeInBytes); this.atomicReaders = new Atomic[segmentAfd.length]; for (int i = 0; i < segmentAfd.length; i++) { diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java index c78a10d4786..f5038c2a17c 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.settings.IndexSettings; import java.io.IOException; @@ -38,11 +39,11 @@ import java.io.IOException; */ public abstract class AbstractIndexFieldData extends AbstractIndexComponent implements IndexFieldData { - private final FieldMapper.Names fieldNames; + private final MappedFieldType.Names fieldNames; protected final FieldDataType fieldDataType; protected final IndexFieldDataCache cache; - public AbstractIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { + public AbstractIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { super(index, indexSettings); this.fieldNames = fieldNames; this.fieldDataType = fieldDataType; @@ -50,7 +51,7 @@ public abstract class AbstractIndexFieldData extends } @Override - public FieldMapper.Names getFieldNames() { + public MappedFieldType.Names getFieldNames() { return this.fieldNames; } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index 1759067f780..b225ba2e6d2 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java index 74e77d6e921..893efa69ceb 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java index f731cd8eb29..2e03b74a41f 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java @@ -25,7 +25,7 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData { diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVNumericIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVNumericIndexFieldData.java index 4404ae461df..c78da7c6446 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVNumericIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVNumericIndexFieldData.java @@ -39,7 +39,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java index feabfce0092..369682f377c 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -67,8 +67,8 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme public IndexFieldData build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore breaker - final Names fieldNames = mapper.names(); - return new BytesBinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType()); + final Names fieldNames = mapper.fieldType().names(); + return new BytesBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType()); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java index c1dfe339131..e0c82e2f7c2 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java @@ -25,7 +25,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.search.MultiValueMode; @@ -42,7 +42,7 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore Circuit Breaker - return new DisabledIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache); + return new DisabledIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java index d3bea6283eb..d2343b36bf5 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java @@ -31,7 +31,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; @@ -93,8 +94,8 @@ public abstract class DocValuesIndexFieldData { public IndexFieldData build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore Circuit Breaker - final FieldMapper.Names fieldNames = mapper.names(); - final Settings fdSettings = mapper.fieldDataType().getSettings(); + final Names fieldNames = mapper.fieldType().names(); + final Settings fdSettings = mapper.fieldType().fieldDataType().getSettings(); final Map filter = fdSettings.getGroups("filter"); if (filter != null && !filter.isEmpty()) { throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]"); @@ -102,19 +103,19 @@ public abstract class DocValuesIndexFieldData { if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) { assert numericType == null; - return new BinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType()); + return new BinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType()); } else if (NUMERIC_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) { assert !numericType.isFloatingPoint(); - return new NumericDVIndexFieldData(index, fieldNames, mapper.fieldDataType()); + return new NumericDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType()); } else if (numericType != null) { if (Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1)) { - return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, mapper.fieldDataType()); + return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType()); } else { // prior to ES 1.4: multi-valued numerics were boxed inside a byte[] as BINARY - return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, mapper.fieldDataType()); + return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType()); } } else { - return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, mapper.fieldDataType()); + return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, mapper.fieldType().fieldDataType()); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayIndexFieldData.java index 6d4b9dbc1db..6b99ad05771 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayIndexFieldData.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorS import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -74,11 +75,11 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new DoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new DoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - public DoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public DoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); this.breakerService = breakerService; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/FSTBytesIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/FSTBytesIndexFieldData.java index 3f460376e5c..1aa45a517c0 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/FSTBytesIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/FSTBytesIndexFieldData.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -48,11 +49,11 @@ public class FSTBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new FSTBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new FSTBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - FSTBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, + FSTBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache, breakerService); this.breakerService = breakerService; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayIndexFieldData.java index 8f7bee30247..b50c742e15e 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayIndexFieldData.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSo import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -73,11 +74,11 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new FloatArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new FloatArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - public FloatArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public FloatArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); this.breakerService = breakerService; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointBinaryDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointBinaryDVIndexFieldData.java index 37a3fccd0eb..e33512a668f 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointBinaryDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointBinaryDVIndexFieldData.java @@ -27,7 +27,8 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -65,8 +66,8 @@ public class GeoPointBinaryDVIndexFieldData extends DocValuesIndexFieldData impl public IndexFieldData build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore breaker - final FieldMapper.Names fieldNames = mapper.names(); - return new GeoPointBinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType()); + final Names fieldNames = mapper.fieldType().names(); + return new GeoPointBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType()); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointCompressedIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointCompressedIndexFieldData.java index 1b48b014239..6bea9d873e5 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointCompressedIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointCompressedIndexFieldData.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.settings.IndexSettings; @@ -54,7 +55,7 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField @Override public IndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - FieldDataType type = mapper.fieldDataType(); + FieldDataType type = mapper.fieldType().fieldDataType(); final String precisionAsString = type.getSettings().get(PRECISION_KEY); final Distance precision; if (precisionAsString != null) { @@ -62,13 +63,13 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField } else { precision = DEFAULT_PRECISION_VALUE; } - return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, precision, breakerService); + return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, precision, breakerService); } } private final GeoPointFieldMapper.Encoding encoding; - public GeoPointCompressedIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public GeoPointCompressedIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, Distance precision, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDoubleArrayIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDoubleArrayIndexFieldData.java index 7a0beb06353..ae41404d53a 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDoubleArrayIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDoubleArrayIndexFieldData.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -48,11 +49,11 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFiel @Override public IndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - public GeoPointDoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public GeoPointDoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); this.breakerService = breakerService; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java index 3fb3a82eb6a..1789d2e148b 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -47,7 +48,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexFieldData build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new IndexIndexFieldData(index, mapper.names()); + return new IndexIndexFieldData(index, mapper.fieldType().names()); } } @@ -101,7 +102,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { private final AtomicOrdinalsFieldData atomicFieldData; - private IndexIndexFieldData(Index index, FieldMapper.Names names) { + private IndexIndexFieldData(Index index, MappedFieldType.Names names) { super(index, Settings.EMPTY, names, new FieldDataType("string"), null, null); atomicFieldData = new IndexAtomicFieldData(index().name()); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java index 710ddba04a4..49e03015cba 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java @@ -31,7 +31,7 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java index 50f6e631e84..71af0e0de60 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java @@ -57,6 +57,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSou import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -86,14 +87,14 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new PackedArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, numericType, breakerService); + return new PackedArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, numericType, breakerService); } } private final NumericType numericType; private final CircuitBreakerService breakerService; - public PackedArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public PackedArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, NumericType numericType, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java index 8d91132b698..91487fe3e69 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -49,11 +50,11 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new PagedBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new PagedBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - public PagedBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public PagedBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache, breakerService); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java index e748805e329..51460fa8b73 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java @@ -61,7 +61,8 @@ import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentTypeListener; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -96,7 +97,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new ParentChildIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, - mapperService, breakerService); + return new ParentChildIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, + mapperService, breakerService); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java index 18995573ee4..32bd21c3759 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java @@ -40,7 +40,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index 7ca547f6159..9d29b3b1a8a 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -28,7 +28,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.indices.breaker.CircuitBreakerService; diff --git a/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java b/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java index 945dee616ca..74572f21bd8 100644 --- a/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -67,7 +67,7 @@ public class SingleFieldsVisitor extends FieldsVisitor { if (fieldsValues == null) { return; } - List fieldValues = fieldsValues.get(mapper.names().indexName()); + List fieldValues = fieldsValues.get(mapper.fieldType().names().indexName()); if (fieldValues == null) { return; } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index e4f61db2df1..f7166ad769a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -59,19 +59,19 @@ public final class DocumentFieldMappers implements Iterable { FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function>() { @Override public Map.Entry apply(FieldMapper input) { - return Maps.immutableEntry(input.names().indexName(), input.indexAnalyzer()); + return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer()); } })); FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function>() { @Override public Map.Entry apply(FieldMapper input) { - return Maps.immutableEntry(input.names().indexName(), input.searchAnalyzer()); + return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer()); } })); FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function>() { @Override public Map.Entry apply(FieldMapper input) { - return Maps.immutableEntry(input.names().indexName(), input.searchQuoteAnalyzer()); + return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchQuoteAnalyzer()); } })); return new DocumentFieldMappers(fieldMappers, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer); diff --git a/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 172c05c32ba..02f6459b76b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -19,9 +19,6 @@ package org.elasticsearch.index.mapper; -import com.google.common.base.Strings; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.search.MultiTermQuery; @@ -30,10 +27,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.List; @@ -45,146 +40,7 @@ public interface FieldMapper extends Mapper { String DOC_VALUES_FORMAT = "doc_values_format"; - class Names { - - private final String shortName; - - private final String indexName; - - private final String originalIndexName; - - private final String fullName; - - public Names(String name) { - this(name, name, name, name); - } - - public Names(String shortName, String indexName, String originalIndexName, String fullName) { - this.shortName = shortName; - this.indexName = indexName; - this.originalIndexName = originalIndexName; - this.fullName = fullName; - } - - /** - * The logical name of the field. - */ - public String shortName() { - return shortName; - } - - /** - * The indexed name of the field. This is the name under which we will - * store it in the index. - */ - public String indexName() { - return indexName; - } - - /** - * The original index name, before any "path" modifications performed on it. - */ - public String originalIndexName() { - return originalIndexName; - } - - /** - * The full name, including dot path. - */ - public String fullName() { - return fullName; - } - - @Override - public boolean equals(Object o) { - if (o == null || getClass() != o.getClass()) return false; - - Names names = (Names) o; - - if (!fullName.equals(names.fullName)) return false; - if (!indexName.equals(names.indexName)) return false; - if (!originalIndexName.equals(names.originalIndexName)) return false; - if (!shortName.equals(names.shortName)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = shortName.hashCode(); - result = 31 * result + indexName.hashCode(); - result = 31 * result + originalIndexName.hashCode(); - result = 31 * result + fullName.hashCode(); - return result; - } - } - - enum Loading { - LAZY { - @Override - public String toString() { - return LAZY_VALUE; - } - }, - EAGER { - @Override - public String toString() { - return EAGER_VALUE; - } - }, - EAGER_GLOBAL_ORDINALS { - @Override - public String toString() { - return EAGER_GLOBAL_ORDINALS_VALUE; - } - }; - - public static final String KEY = "loading"; - public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals"; - public static final String EAGER_VALUE = "eager"; - public static final String LAZY_VALUE = "lazy"; - - public static Loading parse(String loading, Loading defaultValue) { - if (Strings.isNullOrEmpty(loading)) { - return defaultValue; - } else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) { - return EAGER_GLOBAL_ORDINALS; - } else if (EAGER_VALUE.equalsIgnoreCase(loading)) { - return EAGER; - } else if (LAZY_VALUE.equalsIgnoreCase(loading)) { - return LAZY; - } else { - throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]"); - } - } - - } - - Names names(); - - FieldType fieldType(); - - float boost(); - - /** - * The analyzer that will be used to index the field. - */ - Analyzer indexAnalyzer(); - - /** - * The analyzer that will be used to search the field. - */ - Analyzer searchAnalyzer(); - - /** - * The analyzer that will be used for quoted search on the field. - */ - Analyzer searchQuoteAnalyzer(); - - /** - * Similarity used for scoring queries on the field - */ - SimilarityProvider similarity(); + MappedFieldType fieldType(); /** * List of fields where this field should be copied to @@ -236,18 +92,12 @@ public interface FieldMapper extends Mapper { @Nullable Query nullValueFilter(); - FieldDataType fieldDataType(); - boolean isNumeric(); boolean isSortable(); boolean supportsNullValue(); - boolean hasDocValues(); - - Loading normsLoading(Loading defaultLoading); - /** * Fields might not be available before indexing, for example _all, token_count,... * When get is called and these fields are requested, this case needs special treatment. diff --git a/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java b/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java index d751c95910e..eda694a939d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java +++ b/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java @@ -53,7 +53,7 @@ class FieldMappersLookup implements Iterable { CopyOnWriteHashMap map = this.mappers; for (FieldMapper mapper : newMappers) { - String key = mapper.names().fullName(); + String key = mapper.fieldType().names().fullName(); FieldMappers mappers = map.get(key); if (mappers == null) { @@ -76,13 +76,13 @@ class FieldMappersLookup implements Iterable { public FieldMappers indexName(String indexName) { FieldMappers fieldMappers = fullName(indexName); if (fieldMappers != null) { - if (fieldMappers.mapper().names().indexName().equals(indexName)) { + if (fieldMappers.mapper().fieldType().names().indexName().equals(indexName)) { return fieldMappers; } } fieldMappers = new FieldMappers(); for (FieldMapper mapper : this) { - if (mapper.names().indexName().equals(indexName)) { + if (mapper.fieldType().names().indexName().equals(indexName)) { fieldMappers = fieldMappers.concat(mapper); } } @@ -117,10 +117,10 @@ class FieldMappersLookup implements Iterable { public Collection simpleMatchToIndexNames(String pattern) { Set fields = Sets.newHashSet(); for (FieldMapper fieldMapper : this) { - if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) { - fields.add(fieldMapper.names().indexName()); - } else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) { - fields.add(fieldMapper.names().indexName()); + if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) { + fields.add(fieldMapper.fieldType().names().indexName()); + } else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) { + fields.add(fieldMapper.fieldType().names().indexName()); } } return fields; @@ -132,10 +132,10 @@ class FieldMappersLookup implements Iterable { public Collection simpleMatchToFullName(String pattern) { Set fields = Sets.newHashSet(); for (FieldMapper fieldMapper : this) { - if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) { - fields.add(fieldMapper.names().fullName()); - } else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) { - fields.add(fieldMapper.names().fullName()); + if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) { + fields.add(fieldMapper.fieldType().names().fullName()); + } else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) { + fields.add(fieldMapper.fieldType().names().fullName()); } } return fields; diff --git a/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java new file mode 100644 index 00000000000..644af16991b --- /dev/null +++ b/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -0,0 +1,368 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import com.google.common.base.Strings; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.fieldstats.FieldStats; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.similarity.SimilarityProvider; + +import java.io.IOException; +import java.util.List; + +/** + * This defines the core properties and functions to operate on a field. + */ +public class MappedFieldType extends FieldType { + + public static class Names { + + private final String shortName; + + private final String indexName; + + private final String originalIndexName; + + private final String fullName; + + public Names(String name) { + this(name, name, name, name); + } + + public Names(String shortName, String indexName, String originalIndexName, String fullName) { + this.shortName = shortName; + this.indexName = indexName; + this.originalIndexName = originalIndexName; + this.fullName = fullName; + } + + /** + * The logical name of the field. + */ + public String shortName() { + return shortName; + } + + /** + * The indexed name of the field. This is the name under which we will + * store it in the index. + */ + public String indexName() { + return indexName; + } + + /** + * The original index name, before any "path" modifications performed on it. + */ + public String originalIndexName() { + return originalIndexName; + } + + /** + * The full name, including dot path. + */ + public String fullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + + Names names = (Names) o; + + if (!fullName.equals(names.fullName)) return false; + if (!indexName.equals(names.indexName)) return false; + if (!originalIndexName.equals(names.originalIndexName)) return false; + if (!shortName.equals(names.shortName)) return false; + + return true; + } + + @Override + public int hashCode() { + int result = shortName.hashCode(); + result = 31 * result + indexName.hashCode(); + result = 31 * result + originalIndexName.hashCode(); + result = 31 * result + fullName.hashCode(); + return result; + } + } + + public enum Loading { + LAZY { + @Override + public String toString() { + return LAZY_VALUE; + } + }, + EAGER { + @Override + public String toString() { + return EAGER_VALUE; + } + }, + EAGER_GLOBAL_ORDINALS { + @Override + public String toString() { + return EAGER_GLOBAL_ORDINALS_VALUE; + } + }; + + public static final String KEY = "loading"; + public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals"; + public static final String EAGER_VALUE = "eager"; + public static final String LAZY_VALUE = "lazy"; + + public static Loading parse(String loading, Loading defaultValue) { + if (Strings.isNullOrEmpty(loading)) { + return defaultValue; + } else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) { + return EAGER_GLOBAL_ORDINALS; + } else if (EAGER_VALUE.equalsIgnoreCase(loading)) { + return EAGER; + } else if (LAZY_VALUE.equalsIgnoreCase(loading)) { + return LAZY; + } else { + throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]"); + } + } + } + + private Names names; + private float boost; + // TODO: remove this docvalues flag and use docValuesType + private boolean docValues; + private NamedAnalyzer indexAnalyzer; + private NamedAnalyzer searchAnalyzer; + private NamedAnalyzer searchQuoteAnalyzer; + private SimilarityProvider similarity; + private Loading normsLoading; + private FieldDataType fieldDataType; + + protected MappedFieldType(MappedFieldType ref) { + super(ref); + this.names = ref.names(); + this.boost = ref.boost(); + this.docValues = ref.hasDocValues(); + this.indexAnalyzer = ref.indexAnalyzer(); + this.searchAnalyzer = ref.searchAnalyzer(); + this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer(); + this.similarity = ref.similarity(); + this.normsLoading = ref.normsLoading(); + this.fieldDataType = ref.fieldDataType(); + } + + public MappedFieldType() {} + + public MappedFieldType clone() { + return new MappedFieldType(this); + } + + public boolean isNumeric() { + return false; + } + + public boolean isSortable() { + return true; + } + + public Names names() { + return names; + } + + public void setNames(Names names) { + checkIfFrozen(); + this.names = names; + } + + public float boost() { + return boost; + } + + public void setBoost(float boost) { + checkIfFrozen(); + this.boost = boost; + } + + public FieldDataType fieldDataType() { + return fieldDataType; + } + + public void setFieldDataType(FieldDataType fieldDataType) { + checkIfFrozen(); + this.fieldDataType = fieldDataType; + } + + public boolean hasDocValues() { + return docValues; + } + + public void setHasDocValues(boolean hasDocValues) { + checkIfFrozen(); + this.docValues = hasDocValues; + } + + public Loading normsLoading() { + return normsLoading; + } + + public void setNormsLoading(Loading normsLoading) { + checkIfFrozen(); + this.normsLoading = normsLoading; + } + + public NamedAnalyzer indexAnalyzer() { + return indexAnalyzer; + } + + public void setIndexAnalyzer(NamedAnalyzer analyzer) { + checkIfFrozen(); + this.indexAnalyzer = analyzer; + } + + public NamedAnalyzer searchAnalyzer() { + return searchAnalyzer; + } + + public void setSearchAnalyzer(NamedAnalyzer analyzer) { + checkIfFrozen(); + this.searchAnalyzer = analyzer; + } + + public NamedAnalyzer searchQuoteAnalyzer() { + return searchQuoteAnalyzer == null ? searchAnalyzer : searchQuoteAnalyzer; + } + + public void setSearchQuoteAnalyzer(NamedAnalyzer analyzer) { + checkIfFrozen(); + this.searchQuoteAnalyzer = analyzer; + } + + public SimilarityProvider similarity() { + return similarity; + } + + public void setSimilarity(SimilarityProvider similarity) { + checkIfFrozen(); + this.similarity = similarity; + } + + /** Returns the actual value of the field. */ + public Object value(Object value) { + return value; + } + + /** Returns the value that will be used as a result for search. Can be only of specific types... */ + public Object valueForSearch(Object value) { + return value; + } + + /** Returns the indexed value used to construct search "values". */ + public BytesRef indexedValueForSearch(Object value) { + return BytesRefs.toBytesRef(value); + } + + /** + * Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this + * field in query string. + */ + public boolean useTermQueryWithQueryString() { + return false; + } + + /** Creates a term associated with the field of this mapper for the given value */ + protected Term createTerm(Object value) { + return new Term(names().indexName(), indexedValueForSearch(value)); + } + + public Query termQuery(Object value, @Nullable QueryParseContext context) { + return new TermQuery(createTerm(value)); + } + + public Query termsQuery(List values, @Nullable QueryParseContext context) { + BytesRef[] bytesRefs = new BytesRef[values.size()]; + for (int i = 0; i < bytesRefs.length; i++) { + bytesRefs[i] = indexedValueForSearch(values.get(i)); + } + return new TermsQuery(names.indexName(), bytesRefs); + } + + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return new TermRangeQuery(names().indexName(), + lowerTerm == null ? null : indexedValueForSearch(lowerTerm), + upperTerm == null ? null : indexedValueForSearch(upperTerm), + includeLower, includeUpper); + } + + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + return new FuzzyQuery(createTerm(value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions); + } + + public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + PrefixQuery query = new PrefixQuery(createTerm(value)); + if (method != null) { + query.setRewriteMethod(method); + } + return query; + } + + public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates); + if (method != null) { + query.setRewriteMethod(method); + } + return query; + } + + /** + * @return a {@link FieldStats} instance that maps to the type of this field based on the provided {@link Terms} instance. + */ + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + return new FieldStats.Text( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax() + ); + } + + /** A term query to use when parsing a query string. Can return null. */ + @Nullable + public Query queryStringTermQuery(Term term) { + return null; + } +} diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b63df2d6cc4..3b223007268 100755 --- a/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -690,8 +690,8 @@ public class MapperService extends AbstractIndexComponent { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { FieldMapper mapper = smartNameFieldMapper(fieldName); - if (mapper != null && mapper.searchAnalyzer() != null) { - return mapper.searchAnalyzer(); + if (mapper != null && mapper.fieldType().searchAnalyzer() != null) { + return mapper.fieldType().searchAnalyzer(); } return defaultAnalyzer; } @@ -709,8 +709,8 @@ public class MapperService extends AbstractIndexComponent { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { FieldMapper mapper = smartNameFieldMapper(fieldName); - if (mapper != null && mapper.searchQuoteAnalyzer() != null) { - return mapper.searchQuoteAnalyzer(); + if (mapper != null && mapper.fieldType().searchQuoteAnalyzer() != null) { + return mapper.fieldType().searchQuoteAnalyzer(); } return defaultAnalyzer; } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java index f7217a5b8e0..db0be598d91 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java @@ -25,28 +25,19 @@ import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.RegexpQuery; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -54,6 +45,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -74,14 +66,12 @@ import java.util.List; import java.util.Locale; import java.util.TreeMap; -/** - * - */ +import static org.elasticsearch.index.mapper.core.TypeParsers.DOC_VALUES; + public abstract class AbstractFieldMapper implements FieldMapper { public static class Defaults { - public static final FieldType FIELD_TYPE = new FieldType(); - public static final boolean PRE_2X_DOC_VALUES = false; + public static final MappedFieldType FIELD_TYPE = new MappedFieldType(); static { FIELD_TYPE.setTokenized(true); @@ -89,6 +79,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { FIELD_TYPE.setStoreTermVectors(false); FIELD_TYPE.setOmitNorms(false); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + FIELD_TYPE.setBoost(Defaults.BOOST); FIELD_TYPE.freeze(); } @@ -98,26 +89,21 @@ public abstract class AbstractFieldMapper implements FieldMapper { public abstract static class Builder extends Mapper.Builder { - protected final FieldType fieldType; + protected final MappedFieldType fieldType; private final IndexOptions defaultOptions; protected Boolean docValues; - protected float boost = Defaults.BOOST; protected boolean omitNormsSet = false; protected String indexName; - protected NamedAnalyzer indexAnalyzer; - protected NamedAnalyzer searchAnalyzer; protected Boolean includeInAll; protected boolean indexOptionsSet = false; - protected SimilarityProvider similarity; - protected Loading normsLoading; @Nullable protected Settings fieldDataSettings; protected final MultiFields.Builder multiFieldsBuilder; protected CopyTo copyTo; - protected Builder(String name, FieldType fieldType) { + protected Builder(String name, MappedFieldType fieldType) { super(name); - this.fieldType = fieldType; + this.fieldType = fieldType.clone(); this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable multiFieldsBuilder = new MultiFields.Builder(); } @@ -191,7 +177,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { } public T boost(float boost) { - this.boost = boost; + this.fieldType.setBoost(boost); return builder; } @@ -213,12 +199,12 @@ public abstract class AbstractFieldMapper implements FieldMapper { } public T indexAnalyzer(NamedAnalyzer indexAnalyzer) { - this.indexAnalyzer = indexAnalyzer; + this.fieldType.setIndexAnalyzer(indexAnalyzer); return builder; } public T searchAnalyzer(NamedAnalyzer searchAnalyzer) { - this.searchAnalyzer = searchAnalyzer; + this.fieldType.setSearchAnalyzer(searchAnalyzer); return builder; } @@ -228,12 +214,12 @@ public abstract class AbstractFieldMapper implements FieldMapper { } public T similarity(SimilarityProvider similarity) { - this.similarity = similarity; + this.fieldType.setSimilarity(similarity); return builder; } - public T normsLoading(Loading normsLoading) { - this.normsLoading = normsLoading; + public T normsLoading(MappedFieldType.Loading normsLoading) { + this.fieldType.setNormsLoading(normsLoading); return builder; } @@ -257,8 +243,8 @@ public abstract class AbstractFieldMapper implements FieldMapper { return builder; } - protected Names buildNames(BuilderContext context) { - return new Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context)); + protected MappedFieldType.Names buildNames(BuilderContext context) { + return new MappedFieldType.Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context)); } protected String buildIndexName(BuilderContext context) { @@ -279,136 +265,82 @@ public abstract class AbstractFieldMapper implements FieldMapper { protected String buildFullName(BuilderContext context) { return context.path().fullPathAsText(name); } + + protected void setupFieldType(BuilderContext context) { + fieldType.setNames(buildNames(context)); + } } - protected final Names names; - protected float boost; - protected FieldType fieldType; - protected final Boolean docValues; - protected final NamedAnalyzer indexAnalyzer; - protected NamedAnalyzer searchAnalyzer; - protected final SimilarityProvider similarity; - protected Loading normsLoading; + protected MappedFieldType fieldType; + protected final boolean hasDefaultDocValues; protected Settings customFieldDataSettings; - protected FieldDataType fieldDataType; protected final MultiFields multiFields; protected CopyTo copyTo; protected final boolean indexCreatedBefore2x; - protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer, - NamedAnalyzer searchAnalyzer, SimilarityProvider similarity, - Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings) { - this(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, similarity, - normsLoading, fieldDataSettings, indexSettings, MultiFields.empty(), null); + protected AbstractFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) { + this(fieldType, docValues, fieldDataSettings, indexSettings, MultiFields.empty(), null); } - protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer, - NamedAnalyzer searchAnalyzer, SimilarityProvider similarity, - Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + protected AbstractFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { assert indexSettings != null; - this.names = names; - this.boost = boost; - this.fieldType = fieldType; - this.fieldType.freeze(); this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0); - - boolean indexedNotAnalyzed = this.fieldType.tokenized() == false && this.fieldType.indexOptions() != IndexOptions.NONE; - if (indexAnalyzer == null && indexedNotAnalyzed) { - this.indexAnalyzer = this.searchAnalyzer = Lucene.KEYWORD_ANALYZER; - } else { - this.indexAnalyzer = indexAnalyzer; - this.searchAnalyzer = searchAnalyzer; - } - - this.similarity = similarity; - this.normsLoading = normsLoading; - this.customFieldDataSettings = fieldDataSettings; + FieldDataType fieldDataType; if (fieldDataSettings == null) { - this.fieldDataType = defaultFieldDataType(); + fieldDataType = defaultFieldDataType(); } else { // create a new field data type, with the default settings as well as the "new ones" - this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(), - Settings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings) + fieldDataType = new FieldDataType(defaultFieldDataType().getType(), + Settings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings) ); } - - if (docValues != null) { - // explicitly set - this.docValues = docValues; - } else if (fieldDataType != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings))) { - // convoluted way to enable doc values, should be removed in the future - this.docValues = true; - } else { - this.docValues = null; // use the default + + // TODO: hasDocValues should just be set directly on the field type by callers of this ctor, but + // then we need to eliminate defaultDocValues() (only needed by geo, which needs to be fixed with passing + // doc values setting down to lat/lon) and get rid of specifying doc values in fielddata (which + // complicates whether we can just compare to the default value to know whether to write the setting) + if (docValues == null && fieldDataType != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings))) { + docValues = true; } + hasDefaultDocValues = docValues == null; + + this.fieldType = fieldType.clone(); + if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) { + this.fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + this.fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + } + this.fieldType.setHasDocValues(docValues == null ? defaultDocValues() : docValues); + this.fieldType.setFieldDataType(fieldDataType); + this.fieldType.freeze(); + this.multiFields = multiFields; this.copyTo = copyTo; } protected boolean defaultDocValues() { if (indexCreatedBefore2x) { - return Defaults.PRE_2X_DOC_VALUES; + return false; } else { return fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE; } } - @Override - public final boolean hasDocValues() { - return docValues == null ? defaultDocValues() : docValues; - } - @Override public String name() { // TODO: cleanup names so Mapper knows about paths, so that it is always clear whether we are using short or full name - return names.shortName(); + return fieldType.names().shortName(); } - @Override - public Names names() { - return this.names; - } - - public abstract FieldType defaultFieldType(); + public abstract MappedFieldType defaultFieldType(); public abstract FieldDataType defaultFieldDataType(); @Override - public final FieldDataType fieldDataType() { - return fieldDataType; - } - - @Override - public FieldType fieldType() { + public MappedFieldType fieldType() { return fieldType; } - @Override - public float boost() { - return this.boost; - } - - @Override - public Analyzer indexAnalyzer() { - return this.indexAnalyzer; - } - - @Override - public Analyzer searchAnalyzer() { - return this.searchAnalyzer; - } - - @Override - public Analyzer searchQuoteAnalyzer() { - return this.searchAnalyzer; - } - - @Override - public SimilarityProvider similarity() { - return similarity; - } - @Override public CopyTo copyTo() { return copyTo; @@ -421,12 +353,12 @@ public abstract class AbstractFieldMapper implements FieldMapper { parseCreateField(context, fields); for (Field field : fields) { if (!customBoost()) { - field.setBoost(boost); + field.setBoost(fieldType.boost()); } context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e); + throw new MapperParsingException("failed to parse [" + fieldType.names().fullName() + "]", e); } multiFields.parse(this, context); return null; @@ -452,72 +384,59 @@ public abstract class AbstractFieldMapper implements FieldMapper { } @Override - public Object valueForSearch(Object value) { - return value; + public final Object value(Object value) { + return fieldType().value(value); } + @Override + public final Object valueForSearch(Object value) { + return fieldType().valueForSearch(value); + } + + // TODO: this is not final so ParentFieldMapper can have custom behavior, per type... @Override public BytesRef indexedValueForSearch(Object value) { - return BytesRefs.toBytesRef(value); + return fieldType().indexedValueForSearch(value); } @Override - public Query queryStringTermQuery(Term term) { - return null; + public final Query queryStringTermQuery(Term term) { + return fieldType().queryStringTermQuery(term); } @Override - public boolean useTermQueryWithQueryString() { - return false; + public final boolean useTermQueryWithQueryString() { + return fieldType().useTermQueryWithQueryString(); } @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { - return new TermQuery(createTerm(value)); + public final Query termQuery(Object value, @Nullable QueryParseContext context) { + return fieldType().termQuery(value, context); } @Override - public Query termsQuery(List values, @Nullable QueryParseContext context) { - BytesRef[] bytesRefs = new BytesRef[values.size()]; - for (int i = 0; i < bytesRefs.length; i++) { - bytesRefs[i] = indexedValueForSearch(values.get(i)); - } - return new TermsQuery(names.indexName(), bytesRefs); + public final Query termsQuery(List values, @Nullable QueryParseContext context) { + return fieldType().termsQuery(values, context); } @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return new TermRangeQuery(names.indexName(), - lowerTerm == null ? null : indexedValueForSearch(lowerTerm), - upperTerm == null ? null : indexedValueForSearch(upperTerm), - includeLower, includeUpper); + public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return fieldType().rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context); } @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - return new FuzzyQuery(createTerm(value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions); + public final Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + return fieldType().fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions); } @Override - public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { - PrefixQuery query = new PrefixQuery(createTerm(value)); - if (method != null) { - query.setRewriteMethod(method); - } - return query; + public final Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + return fieldType().prefixQuery(value, method, context); } @Override - public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { - RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates); - if (method != null) { - query.setRewriteMethod(method); - } - return query; - } - - protected Term createTerm(Object value) { - return new Term(names.indexName(), indexedValueForSearch(value)); + public final Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + return fieldType().regexpQuery(value, flags, maxDeterminizedStates, method, context); } @Override @@ -532,7 +451,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { if (mergeWith instanceof AbstractFieldMapper) { mergedType = ((AbstractFieldMapper) mergeWith).contentType(); } - mergeResult.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); // different types, return return; } @@ -540,86 +459,86 @@ public abstract class AbstractFieldMapper implements FieldMapper { boolean indexed = fieldType.indexOptions() != IndexOptions.NONE; boolean mergeWithIndexed = fieldMergeWith.fieldType().indexOptions() != IndexOptions.NONE; if (indexed != mergeWithIndexed || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different index values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different index values"); } if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store values"); } - if (!this.hasDocValues() && fieldMergeWith.hasDocValues()) { + if (!this.fieldType().hasDocValues() && fieldMergeWith.fieldType().hasDocValues()) { // don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set // when the doc_values field data format is configured - mergeResult.addConflict("mapper [" + names.fullName() + "] has different " + TypeParsers.DOC_VALUES + " values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different " + TypeParsers.DOC_VALUES + " values"); } if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType.omitNorms()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] cannot enable norms (`norms.enabled`)"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] cannot enable norms (`norms.enabled`)"); } if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different tokenize values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tokenize values"); } if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector values"); } if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_offsets values"); } if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_positions values"); } if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_payloads values"); } // null and "default"-named index analyzers both mean the default is used - if (this.indexAnalyzer == null || "default".equals(this.indexAnalyzer.name())) { - if (fieldMergeWith.indexAnalyzer != null && !"default".equals(fieldMergeWith.indexAnalyzer.name())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer"); + if (this.fieldType.indexAnalyzer() == null || "default".equals(this.fieldType.indexAnalyzer().name())) { + if (fieldMergeWith.fieldType.indexAnalyzer() != null && "default".equals(fieldMergeWith.fieldType.indexAnalyzer().name()) == false) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer"); } - } else if (fieldMergeWith.indexAnalyzer == null || "default".equals(fieldMergeWith.indexAnalyzer.name())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer"); - } else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer"); + } else if (fieldMergeWith.fieldType.indexAnalyzer() == null || "default".equals(fieldMergeWith.fieldType.indexAnalyzer().name())) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer"); + } else if (this.fieldType.indexAnalyzer().name().equals(fieldMergeWith.fieldType.indexAnalyzer().name()) == false) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer"); } - if (!this.names().equals(fieldMergeWith.names())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different index_name"); + if (!this.fieldType().names().equals(fieldMergeWith.fieldType().names())) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different index_name"); } - if (this.similarity == null) { - if (fieldMergeWith.similarity() != null) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity"); + if (this.fieldType.similarity() == null) { + if (fieldMergeWith.fieldType.similarity() != null) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity"); } - } else if (fieldMergeWith.similarity() == null) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity"); - } else if (!this.similarity().equals(fieldMergeWith.similarity())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity"); + } else if (fieldMergeWith.fieldType().similarity() == null) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity"); + } else if (!this.fieldType().similarity().equals(fieldMergeWith.fieldType().similarity())) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity"); } multiFields.merge(mergeWith, mergeResult); if (!mergeResult.simulate()) { // apply changeable values - this.fieldType = new FieldType(this.fieldType); + this.fieldType = this.fieldType.clone(); this.fieldType.setOmitNorms(fieldMergeWith.fieldType.omitNorms()); - this.fieldType.freeze(); - this.boost = fieldMergeWith.boost; - this.normsLoading = fieldMergeWith.normsLoading; - this.copyTo = fieldMergeWith.copyTo; - if (fieldMergeWith.searchAnalyzer != null) { - this.searchAnalyzer = fieldMergeWith.searchAnalyzer; + this.fieldType.setBoost(fieldMergeWith.fieldType.boost()); + this.fieldType.setNormsLoading(fieldMergeWith.fieldType.normsLoading()); + if (fieldMergeWith.fieldType.searchAnalyzer() != null) { + this.fieldType.setSearchAnalyzer(fieldMergeWith.fieldType.searchAnalyzer()); } if (fieldMergeWith.customFieldDataSettings != null) { if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) { this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings; - this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(), - Settings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings) - ); + this.fieldType.setFieldDataType(new FieldDataType(defaultFieldDataType().getType(), + Settings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings) + )); } } + this.fieldType.freeze(); + this.copyTo = fieldMergeWith.copyTo; } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(names.shortName()); + builder.startObject(fieldType.names().shortName()); boolean includeDefaults = params.paramAsBoolean("include_defaults", false); doXContentBody(builder, includeDefaults, params); return builder.endObject(); @@ -628,12 +547,12 @@ public abstract class AbstractFieldMapper implements FieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { builder.field("type", contentType()); - if (indexCreatedBefore2x && (includeDefaults || !names.shortName().equals(names.originalIndexName()))) { - builder.field("index_name", names.originalIndexName()); + if (indexCreatedBefore2x && (includeDefaults || !fieldType.names().shortName().equals(fieldType.names().originalIndexName()))) { + builder.field("index_name", fieldType.names().originalIndexName()); } - if (includeDefaults || boost != 1.0f) { - builder.field("boost", boost); + if (includeDefaults || fieldType.boost() != 1.0f) { + builder.field("boost", fieldType.boost()); } FieldType defaultFieldType = defaultFieldType(); @@ -650,13 +569,13 @@ public abstract class AbstractFieldMapper implements FieldMapper { if (includeDefaults || fieldType.storeTermVectors() != defaultFieldType.storeTermVectors()) { builder.field("term_vector", termVectorOptionsToString(fieldType)); } - if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || normsLoading != null) { + if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || fieldType.normsLoading() != null) { builder.startObject("norms"); if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms()) { builder.field("enabled", !fieldType.omitNorms()); } - if (normsLoading != null) { - builder.field(Loading.KEY, normsLoading); + if (fieldType.normsLoading() != null) { + builder.field(MappedFieldType.Loading.KEY, fieldType.normsLoading()); } builder.endObject(); } @@ -666,8 +585,8 @@ public abstract class AbstractFieldMapper implements FieldMapper { doXContentAnalyzers(builder, includeDefaults); - if (similarity() != null) { - builder.field("similarity", similarity().name()); + if (fieldType().similarity() != null) { + builder.field("similarity", fieldType().similarity().name()); } else if (includeDefaults) { builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY); } @@ -677,7 +596,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { orderedFielddataSettings.putAll(customFieldDataSettings.getAsMap()); builder.field("fielddata", orderedFielddataSettings); } else if (includeDefaults) { - orderedFielddataSettings.putAll(fieldDataType.getSettings().getAsMap()); + orderedFielddataSettings.putAll(fieldType.fieldDataType().getSettings().getAsMap()); builder.field("fielddata", orderedFielddataSettings); } multiFields.toXContent(builder, params); @@ -688,21 +607,21 @@ public abstract class AbstractFieldMapper implements FieldMapper { } protected void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException { - if (indexAnalyzer == null) { + if (fieldType.indexAnalyzer() == null) { if (includeDefaults) { builder.field("analyzer", "default"); } - } else if (includeDefaults || indexAnalyzer.name().startsWith("_") == false && indexAnalyzer.name().equals("default") == false) { - builder.field("analyzer", indexAnalyzer.name()); - if (searchAnalyzer.name().equals(indexAnalyzer.name()) == false) { - builder.field("search_analyzer", searchAnalyzer.name()); + } else if (includeDefaults || fieldType.indexAnalyzer().name().startsWith("_") == false && fieldType.indexAnalyzer().name().equals("default") == false) { + builder.field("analyzer", fieldType.indexAnalyzer().name()); + if (fieldType.searchAnalyzer().name().equals(fieldType.indexAnalyzer().name()) == false) { + builder.field("search_analyzer", fieldType.searchAnalyzer().name()); } } } protected void doXContentDocValues(XContentBuilder builder, boolean includeDefaults) throws IOException { - if (includeDefaults || docValues != null) { - builder.field(TypeParsers.DOC_VALUES, hasDocValues()); + if (includeDefaults || hasDefaultDocValues == false) { + builder.field(DOC_VALUES, fieldType().hasDocValues()); } } @@ -753,7 +672,6 @@ public abstract class AbstractFieldMapper implements FieldMapper { } } - protected abstract String contentType(); @Override @@ -762,13 +680,13 @@ public abstract class AbstractFieldMapper implements FieldMapper { } @Override - public boolean isNumeric() { - return false; + public final boolean isNumeric() { + return fieldType().isNumeric(); } @Override - public boolean isSortable() { - return true; + public final boolean isSortable() { + return fieldType().isSortable(); } @Override @@ -776,11 +694,6 @@ public abstract class AbstractFieldMapper implements FieldMapper { return true; } - @Override - public Loading normsLoading(Loading defaultLoading) { - return normsLoading == null ? defaultLoading : normsLoading; - } - public static class MultiFields { public static MultiFields empty() { @@ -854,7 +767,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(pathType); - context.path().add(mainField.names().shortName()); + context.path().add(mainField.fieldType().names().shortName()); for (ObjectCursor cursor : mappers.values()) { cursor.value.parse(context); } @@ -871,7 +784,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { for (ObjectCursor cursor : mergeWithMultiField.multiFields.mappers.values()) { FieldMapper mergeWithMapper = cursor.value; - Mapper mergeIntoMapper = mappers.get(mergeWithMapper.names().shortName()); + Mapper mergeIntoMapper = mappers.get(mergeWithMapper.fieldType().names().shortName()); if (mergeIntoMapper == null) { // no mapping, simply add it if not simulating if (!mergeResult.simulate()) { @@ -882,7 +795,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { if (newMappersBuilder == null) { newMappersBuilder = ImmutableOpenMap.builder(mappers); } - newMappersBuilder.put(mergeWithMapper.names().shortName(), mergeWithMapper); + newMappersBuilder.put(mergeWithMapper.fieldType().names().shortName(), mergeWithMapper); if (mergeWithMapper instanceof AbstractFieldMapper) { if (newFieldMappers == null) { newFieldMappers = new ArrayList<>(2); @@ -992,9 +905,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { } @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - return new FieldStats.Text( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax() - ); + public final FieldStats stats(Terms terms, int maxDoc) throws IOException { + return fieldType().stats(terms, maxDoc); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 9972ca45cbd..12b053b4b11 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.core; import com.carrotsearch.hppc.ObjectArrayList; - import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DocValuesType; @@ -40,6 +39,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; @@ -63,7 +63,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper { public static class Defaults extends AbstractFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new BinaryFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); @@ -74,13 +74,15 @@ public class BinaryFieldMapper extends AbstractFieldMapper { public static class Builder extends AbstractFieldMapper.Builder { public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); builder = this; } @Override public BinaryFieldMapper build(BuilderContext context) { - return new BinaryFieldMapper(buildNames(context), fieldType, docValues, + setupFieldType(context); + ((BinaryFieldType)fieldType).tryUncompressing = context.indexCreatedVersion().before(Version.V_2_0_0); + return new BinaryFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } @@ -102,13 +104,67 @@ public class BinaryFieldMapper extends AbstractFieldMapper { } } - protected BinaryFieldMapper(Names names, FieldType fieldType, Boolean docValues, + public static class BinaryFieldType extends MappedFieldType { + protected boolean tryUncompressing = false; + + public BinaryFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected BinaryFieldType(BinaryFieldType ref) { + super(ref); + this.tryUncompressing = ref.tryUncompressing; + } + + @Override + public MappedFieldType clone() { + return new BinaryFieldType(this); + } + + @Override + public BytesReference value(Object value) { + if (value == null) { + return null; + } + + BytesReference bytes; + if (value instanceof BytesRef) { + bytes = new BytesArray((BytesRef) value); + } else if (value instanceof BytesReference) { + bytes = (BytesReference) value; + } else if (value instanceof byte[]) { + bytes = new BytesArray((byte[]) value); + } else { + try { + bytes = new BytesArray(Base64.decode(value.toString())); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to convert bytes", e); + } + } + try { + if (tryUncompressing) { // backcompat behavior + return CompressorFactory.uncompressIfNeeded(bytes); + } else { + return bytes; + } + } catch (IOException e) { + throw new ElasticsearchParseException("failed to decompress source", e); + } + } + + @Override + public Object valueForSearch(Object value) { + return value(value); + } + } + + protected BinaryFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, 1.0f, fieldType, docValues, null, null, null, null, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -117,45 +173,9 @@ public class BinaryFieldMapper extends AbstractFieldMapper { return new FieldDataType("binary"); } - @Override - public Object valueForSearch(Object value) { - return value(value); - } - - @Override - public BytesReference value(Object value) { - if (value == null) { - return null; - } - - BytesReference bytes; - if (value instanceof BytesRef) { - bytes = new BytesArray((BytesRef) value); - } else if (value instanceof BytesReference) { - bytes = (BytesReference) value; - } else if (value instanceof byte[]) { - bytes = new BytesArray((byte[]) value); - } else { - try { - bytes = new BytesArray(Base64.decode(value.toString())); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to convert bytes", e); - } - } - try { - if (indexCreatedBefore2x) { - return CompressorFactory.uncompressIfNeeded(bytes); - } else { - return bytes; - } - } catch (IOException e) { - throw new ElasticsearchParseException("failed to decompress source", e); - } - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - if (!fieldType().stored() && !hasDocValues()) { + if (!fieldType().stored() && !fieldType().hasDocValues()) { return; } byte[] value = context.parseExternalValue(byte[].class); @@ -170,14 +190,14 @@ public class BinaryFieldMapper extends AbstractFieldMapper { return; } if (fieldType().stored()) { - fields.add(new Field(names.indexName(), value, fieldType)); + fields.add(new Field(fieldType().names().indexName(), value, fieldType())); } - if (hasDocValues()) { - CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(names().indexName()); + if (fieldType().hasDocValues()) { + CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field == null) { - field = new CustomBinaryDocValuesField(names().indexName(), value); - context.doc().addWithKey(names().indexName(), field); + field = new CustomBinaryDocValuesField(fieldType().names().indexName(), value); + context.doc().addWithKey(fieldType().names().indexName(), field); } else { field.add(value); } @@ -192,17 +212,11 @@ public class BinaryFieldMapper extends AbstractFieldMapper { public static class CustomBinaryDocValuesField extends NumberFieldMapper.CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - private final ObjectArrayList bytesList; private int totalSize = 0; - public CustomBinaryDocValuesField(String name, byte[] bytes) { + public CustomBinaryDocValuesField(String name, byte[] bytes) { super(name); bytesList = new ObjectArrayList<>(); add(bytes); diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index a9850b466b4..cfbcbc45ef1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.ConstantScoreQuery; @@ -34,6 +33,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -58,12 +58,14 @@ public class BooleanFieldMapper extends AbstractFieldMapper { public static final String CONTENT_TYPE = "boolean"; public static class Defaults extends AbstractFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new BooleanFieldType(); static { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.freeze(); } @@ -80,7 +82,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper { private Boolean nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); this.builder = this; } @@ -99,8 +101,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper { @Override public BooleanFieldMapper build(BuilderContext context) { - return new BooleanFieldMapper(buildNames(context), boost, fieldType, docValues, nullValue, - similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + return new BooleanFieldMapper(fieldType, docValues, nullValue, + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } @@ -125,17 +128,86 @@ public class BooleanFieldMapper extends AbstractFieldMapper { } } + public static class BooleanFieldType extends MappedFieldType { + + public BooleanFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected BooleanFieldType(BooleanFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new BooleanFieldType(this); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + if (value == null) { + return Values.FALSE; + } + if (value instanceof Boolean) { + return ((Boolean) value) ? Values.TRUE : Values.FALSE; + } + String sValue; + if (value instanceof BytesRef) { + sValue = ((BytesRef) value).utf8ToString(); + } else { + sValue = value.toString(); + } + if (sValue.length() == 0) { + return Values.FALSE; + } + if (sValue.length() == 1 && sValue.charAt(0) == 'F') { + return Values.FALSE; + } + if (Booleans.parseBoolean(sValue, false)) { + return Values.TRUE; + } + return Values.FALSE; + } + + @Override + public Boolean value(Object value) { + if (value == null) { + return Boolean.FALSE; + } + String sValue = value.toString(); + if (sValue.length() == 0) { + return Boolean.FALSE; + } + if (sValue.length() == 1 && sValue.charAt(0) == 'F') { + return Boolean.FALSE; + } + if (Booleans.parseBoolean(sValue, false)) { + return Boolean.TRUE; + } + return Boolean.FALSE; + } + + @Override + public Object valueForSearch(Object value) { + return value(value); + } + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + } + private Boolean nullValue; - protected BooleanFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, Boolean nullValue, - SimilarityProvider similarity, Loading normsLoading, + protected BooleanFieldMapper(MappedFieldType fieldType, Boolean docValues, Boolean nullValue, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, boost, fieldType, docValues, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -145,60 +217,6 @@ public class BooleanFieldMapper extends AbstractFieldMapper { return new FieldDataType(CONTENT_TYPE); } - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - - @Override - public Boolean value(Object value) { - if (value == null) { - return Boolean.FALSE; - } - String sValue = value.toString(); - if (sValue.length() == 0) { - return Boolean.FALSE; - } - if (sValue.length() == 1 && sValue.charAt(0) == 'F') { - return Boolean.FALSE; - } - if (Booleans.parseBoolean(sValue, false)) { - return Boolean.TRUE; - } - return Boolean.FALSE; - } - - @Override - public Object valueForSearch(Object value) { - return value(value); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - if (value == null) { - return Values.FALSE; - } - if (value instanceof Boolean) { - return ((Boolean) value) ? Values.TRUE : Values.FALSE; - } - String sValue; - if (value instanceof BytesRef) { - sValue = ((BytesRef) value).utf8ToString(); - } else { - sValue = value.toString(); - } - if (sValue.length() == 0) { - return Values.FALSE; - } - if (sValue.length() == 1 && sValue.charAt(0) == 'F') { - return Values.FALSE; - } - if (Booleans.parseBoolean(sValue, false)) { - return Values.TRUE; - } - return Values.FALSE; - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -209,7 +227,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper { @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !hasDocValues()) { + if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) { return; } @@ -228,9 +246,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper { if (value == null) { return; } - fields.add(new Field(names.indexName(), value ? "T" : "F", fieldType)); - if (hasDocValues()) { - fields.add(new SortedNumericDocValuesField(names.indexName(), value ? 1 : 0)); + fields.add(new Field(fieldType.names().indexName(), value ? "T" : "F", fieldType)); + if (fieldType().hasDocValues()) { + fields.add(new SortedNumericDocValuesField(fieldType.names().indexName(), value ? 1 : 0)); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index b1ad42a4d5c..1a65d58025e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -41,13 +40,13 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -66,7 +65,7 @@ public class ByteFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "byte"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new ByteFieldType(); static { FIELD_TYPE.freeze(); @@ -80,7 +79,7 @@ public class ByteFieldMapper extends NumberFieldMapper { protected Byte nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_8_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_8_BIT); builder = this; } @@ -91,14 +90,23 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override public ByteFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - ByteFieldMapper fieldMapper = new ByteFieldMapper(buildNames(context), - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), - coerce(context), similarity, normsLoading, - fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + ByteFieldMapper fieldMapper = new ByteFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), + coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + String name = precisionStep == Integer.MAX_VALUE ? "_byte/max" : ("_byte/" + precisionStep); + return new NamedAnalyzer(name, new NumericIntegerAnalyzer(precisionStep)); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -122,24 +130,81 @@ public class ByteFieldMapper extends NumberFieldMapper { } } + public static class ByteFieldType extends NumberFieldType { + public ByteFieldType() {} + + protected ByteFieldType(ByteFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new ByteFieldType(this); + } + + @Override + public Byte value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).byteValue(); + } + if (value instanceof BytesRef) { + return ((BytesRef) value).bytes[((BytesRef) value).offset]; + } + return Byte.parseByte(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : (int)parseValue(lowerTerm), + upperTerm == null ? null : (int)parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + byte iValue = Byte.parseByte(value); + byte iSim = fuzziness.asByte(); + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinInt(terms); + long maxValue = NumericUtils.getMaxInt(terms); + return new FieldStats.Long( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Byte nullValue; private String nullValueAsString; - protected ByteFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, - Byte nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, + protected ByteFieldMapper(MappedFieldType fieldType, Boolean docValues, + Byte nullValue, Explicit ignoreMalformed, Explicit coerce, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, - ignoreMalformed, coerce, new NamedAnalyzer("_byte/" + precisionStep, new NumericIntegerAnalyzer(precisionStep)), - new NamedAnalyzer("_byte/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -148,33 +213,7 @@ public class ByteFieldMapper extends NumberFieldMapper { return new FieldDataType("byte"); } - @Override - protected int maxPrecisionStep() { - return 32; - } - - @Override - public Byte value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).byteValue(); - } - if (value instanceof BytesRef) { - return ((BytesRef) value).bytes[((BytesRef) value).offset]; - } - return Byte.parseByte(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private byte parseValue(Object value) { + private static byte parseValue(Object value) { if (value instanceof Number) { return ((Number) value).byteValue(); } @@ -184,28 +223,6 @@ public class ByteFieldMapper extends NumberFieldMapper { return Byte.parseByte(value.toString()); } - private int parseValueAsInt(Object value) { - return parseValue(value); - } - - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - byte iValue = Byte.parseByte(value); - byte iSim = fuzziness.asByte(); - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValueAsInt(lowerTerm), - upperTerm == null ? null : parseValueAsInt(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -222,7 +239,7 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { byte value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -244,7 +261,7 @@ public class ByteFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).byteValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Byte.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Byte.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -255,7 +272,7 @@ public class ByteFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -284,7 +301,7 @@ public class ByteFieldMapper extends NumberFieldMapper { } else { value = (byte) parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } @@ -293,7 +310,7 @@ public class ByteFieldMapper extends NumberFieldMapper { field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -319,8 +336,8 @@ public class ByteFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_8_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_8_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -332,22 +349,13 @@ public class ByteFieldMapper extends NumberFieldMapper { } } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomByteNumericField extends CustomNumericField { private final byte number; private final NumberFieldMapper mapper; - public CustomByteNumericField(NumberFieldMapper mapper, byte number, FieldType fieldType) { + public CustomByteNumericField(NumberFieldMapper mapper, byte number, MappedFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index e8d0cf54990..eeb28e24121 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -25,7 +25,6 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; @@ -39,11 +38,12 @@ import org.elasticsearch.common.xcontent.XContentParser.NumberType; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider; @@ -72,7 +72,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { public static final String CONTENT_TYPE = "completion"; public static class Defaults extends AbstractFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new CompletionFieldType(); static { FIELD_TYPE.setOmitNorms(true); @@ -114,7 +114,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { private SortedMap contextMapping = ContextMapping.EMPTY_MAPPING; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); builder = this; } @@ -148,7 +148,8 @@ public class CompletionFieldMapper extends AbstractFieldMapper { @Override public CompletionFieldMapper build(Mapper.BuilderContext context) { - return new CompletionFieldMapper(buildNames(context), indexAnalyzer, searchAnalyzer, null, similarity, payloads, + setupFieldType(context); + return new CompletionFieldMapper(fieldType, null, payloads, preserveSeparators, preservePositionIncrements, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, this.contextMapping); } @@ -219,6 +220,35 @@ public class CompletionFieldMapper extends AbstractFieldMapper { } } + public static class CompletionFieldType extends MappedFieldType { + + public CompletionFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected CompletionFieldType(CompletionFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new CompletionFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean isSortable() { + return false; + } + } + private static final BytesRef EMPTY = new BytesRef(); private PostingsFormat postingsFormat; @@ -236,9 +266,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper { */ // Custom postings formats are deprecated but we still accept a postings format here to be able to test backward compatibility // with older postings formats such as Elasticsearch090 - public CompletionFieldMapper(Names names, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, PostingsFormat wrappedPostingsFormat, SimilarityProvider similarity, boolean payloads, + public CompletionFieldMapper(MappedFieldType fieldType, PostingsFormat wrappedPostingsFormat, boolean payloads, boolean preserveSeparators, boolean preservePositionIncrements, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, SortedMap contextMappings) { - super(names, 1.0f, Defaults.FIELD_TYPE, false, indexAnalyzer, searchAnalyzer, similarity, null, null, indexSettings, multiFields, copyTo); + super(fieldType, false, null, indexSettings, multiFields, copyTo); analyzingSuggestLookupProvider = new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads); if (wrappedPostingsFormat == null) { // delayed until postingsFormat() is called @@ -424,7 +454,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { + "] at position " + i + " is a reserved character"); } } - return new SuggestField(names.indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider); + return new SuggestField(fieldType.names().indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider); } public static int correctSubStringLen(String input, int len) { @@ -445,7 +475,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { private final CompletionTokenStream.ToFiniteStrings toFiniteStrings; private final ContextMapping.Context ctx; - public SuggestField(String name, ContextMapping.Context ctx, String value, FieldType type, BytesRef payload, CompletionTokenStream.ToFiniteStrings toFiniteStrings) { + public SuggestField(String name, ContextMapping.Context ctx, String value, MappedFieldType type, BytesRef payload, CompletionTokenStream.ToFiniteStrings toFiniteStrings) { super(name, value, type); this.payload = payload; this.toFiniteStrings = toFiniteStrings; @@ -461,12 +491,12 @@ public class CompletionFieldMapper extends AbstractFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(names().shortName()) + builder.startObject(fieldType().names().shortName()) .field(Fields.TYPE, CONTENT_TYPE); - builder.field(Fields.ANALYZER, indexAnalyzer.name()); - if (indexAnalyzer.name().equals(searchAnalyzer.name()) == false) { - builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), searchAnalyzer.name()); + builder.field(Fields.ANALYZER, fieldType.indexAnalyzer().name()); + if (fieldType.indexAnalyzer().name().equals(fieldType.searchAnalyzer().name()) == false) { + builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType.searchAnalyzer().name()); } builder.field(Fields.PAYLOADS, this.payloads); builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), this.preserveSeparators); @@ -494,18 +524,13 @@ public class CompletionFieldMapper extends AbstractFieldMapper { return CONTENT_TYPE; } - @Override - public boolean isSortable() { - return false; - } - @Override public boolean supportsNullValue() { return false; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -514,14 +539,6 @@ public class CompletionFieldMapper extends AbstractFieldMapper { return null; } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - public boolean isStoringPayloads() { return payloads; } @@ -531,16 +548,16 @@ public class CompletionFieldMapper extends AbstractFieldMapper { super.merge(mergeWith, mergeResult); CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith; if (payloads != fieldMergeWith.payloads) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different payload values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different payload values"); } if (preservePositionIncrements != fieldMergeWith.preservePositionIncrements) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_position_increments' values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_position_increments' values"); } if (preserveSeparators != fieldMergeWith.preserveSeparators) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_separators' values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_separators' values"); } if(!ContextMapping.mappingsAreEqual(getContextMapping(), fieldMergeWith.getContextMapping())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'context_mapping' values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'context_mapping' values"); } if (!mergeResult.simulate()) { this.maxInputLength = fieldMergeWith.maxInputLength; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index e3842fe474a..b33182d8b17 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Terms; @@ -44,8 +43,10 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericDateAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -53,7 +54,6 @@ import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTimeZone; @@ -75,37 +75,35 @@ public class DateFieldMapper extends NumberFieldMapper { public static class Defaults extends NumberFieldMapper.Defaults { public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime", Locale.ROOT); - - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; + public static final DateFieldType FIELD_TYPE = new DateFieldType(); static { FIELD_TYPE.freeze(); } public static final String NULL_VALUE = null; - - public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; } public static class Builder extends NumberFieldMapper.Builder { - protected TimeUnit timeUnit = Defaults.TIME_UNIT; - protected String nullValue = Defaults.NULL_VALUE; - protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; - private Locale locale; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); builder = this; // do *NOT* rely on the default locale locale = Locale.ROOT; } + DateFieldType fieldType() { + return (DateFieldType)fieldType; + } + public Builder timeUnit(TimeUnit timeUnit) { - this.timeUnit = timeUnit; + fieldType().setTimeUnit(timeUnit); return this; } @@ -115,28 +113,42 @@ public class DateFieldMapper extends NumberFieldMapper { } public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - this.dateTimeFormatter = dateTimeFormatter; + fieldType().setDateTimeFormatter(dateTimeFormatter); return this; } @Override public DateFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - if (!locale.equals(dateTimeFormatter.locale())) { - dateTimeFormatter = new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale); - } - DateFieldMapper fieldMapper = new DateFieldMapper(buildNames(context), dateTimeFormatter, - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, timeUnit, ignoreMalformed(context), coerce(context), - similarity, normsLoading, fieldDataSettings, context.indexSettings(), - multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + DateFieldMapper fieldMapper = new DateFieldMapper(fieldType, + docValues, nullValue, ignoreMalformed(context), coerce(context), + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + protected void setupFieldType(BuilderContext context) { + FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; + if (!locale.equals(dateTimeFormatter.locale())) { + fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); + } + super.setupFieldType(context); + } + public Builder locale(Locale locale) { this.locale = locale; return this; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericDateAnalyzer.buildNamedAnalyzer(fieldType().dateTimeFormatter, precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -169,37 +181,222 @@ public class DateFieldMapper extends NumberFieldMapper { } } - protected FormatDateTimeFormatter dateTimeFormatter; + public static class DateFieldType extends NumberFieldType { - private final DateMathParser dateMathParser; + final class LateParsingQuery extends Query { + + final Object lowerTerm; + final Object upperTerm; + final boolean includeLower; + final boolean includeUpper; + final DateTimeZone timeZone; + final DateMathParser forcedDateParser; + + public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) { + this.lowerTerm = lowerTerm; + this.upperTerm = upperTerm; + this.includeLower = includeLower; + this.includeUpper = includeUpper; + this.timeZone = timeZone; + this.forcedDateParser = forcedDateParser; + } + + @Override + public Query rewrite(IndexReader reader) throws IOException { + Query query = innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + return query.rewrite(reader); + } + + @Override + public String toString(String s) { + final StringBuilder sb = new StringBuilder(); + return sb.append(names().indexName()).append(':') + .append(includeLower ? '[' : '{') + .append((lowerTerm == null) ? "*" : lowerTerm.toString()) + .append(" TO ") + .append((upperTerm == null) ? "*" : upperTerm.toString()) + .append(includeUpper ? ']' : '}') + .append(ToStringUtils.boost(getBoost())) + .toString(); + } + } + + protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; + protected TimeUnit timeUnit = Defaults.TIME_UNIT; + protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + + public DateFieldType() {} + + protected DateFieldType(DateFieldType ref) { + super(ref); + this.dateTimeFormatter = ref.dateTimeFormatter; + this.timeUnit = ref.timeUnit; + this.dateMathParser = ref.dateMathParser; + } + + public DateFieldType clone() { + return new DateFieldType(this); + } + + public FormatDateTimeFormatter dateTimeFormatter() { + return dateTimeFormatter; + } + + public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { + checkIfFrozen(); + this.dateTimeFormatter = dateTimeFormatter; + this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + } + + public TimeUnit timeUnit() { + return timeUnit; + } + + public void setTimeUnit(TimeUnit timeUnit) { + checkIfFrozen(); + this.timeUnit = timeUnit; + this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + } + + protected DateMathParser dateMathParser() { + return dateMathParser; + } + + private long parseValue(Object value) { + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return dateTimeFormatter().parser().parseMillis(((BytesRef) value).utf8ToString()); + } + return dateTimeFormatter().parser().parseMillis(value.toString()); + } + + protected long parseStringValue(String value) { + try { + return dateTimeFormatter().parser().parseMillis(value); + } catch (RuntimeException e) { + try { + return timeUnit().toMillis(Long.parseLong(value)); + } catch (NumberFormatException e1) { + throw new MapperParsingException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter().format() + "], and timestamp number with locale [" + dateTimeFormatter().locale() + "]", e); + } + } + } + + @Override + public Long value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToLong((BytesRef) value); + } + return parseStringValue(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Object valueForSearch(Object value) { + if (value instanceof String) { + // assume its the string that was indexed, just return it... (for example, with get) + return value; + } + Long val = value(value); + if (val == null) { + return null; + } + return dateTimeFormatter().printer().print(val); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long iValue = dateMathParser().parse(value, now()); + long iSim; + try { + iSim = fuzziness.asTimeValue().millis(); + } catch (Exception e) { + // not a time format + iSim = fuzziness.asLong(); + } + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinLong(terms); + long maxValue = NumericUtils.getMaxLong(terms); + return new FieldStats.Date( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter() + ); + } + + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, @Nullable QueryParseContext context) { + // If the current search context is null we're parsing percolator query or a index alias filter. + if (SearchContext.current() == null) { + return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + } else { + return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + } + } + + private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), + upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), + includeLower, includeUpper); + } + + public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { + if (value instanceof Number) { + return ((Number) value).longValue(); + } + DateMathParser dateParser = dateMathParser(); + if (forcedDateParser != null) { + dateParser = forcedDateParser; + } + String strValue; + if (value instanceof BytesRef) { + strValue = ((BytesRef) value).utf8ToString(); + } else { + strValue = value.toString(); + } + return dateParser.parse(strValue, now(), inclusive, zone); + } + } private String nullValue; - protected final TimeUnit timeUnit; - - protected DateFieldMapper(Names names, FormatDateTimeFormatter dateTimeFormatter, int precisionStep, float boost, FieldType fieldType, Boolean docValues, - String nullValue, TimeUnit timeUnit, Explicit ignoreMalformed,Explicit coerce, - SimilarityProvider similarity, - Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, NumericDateAnalyzer.buildNamedAnalyzer(dateTimeFormatter, precisionStep), - NumericDateAnalyzer.buildNamedAnalyzer(dateTimeFormatter, Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); - this.dateTimeFormatter = dateTimeFormatter; + protected DateFieldMapper(MappedFieldType fieldType, Boolean docValues, String nullValue, Explicit ignoreMalformed,Explicit coerce, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; - this.timeUnit = timeUnit; - this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); - } - - public FormatDateTimeFormatter dateTimeFormatter() { - return dateTimeFormatter; - } - - public DateMathParser dateMathParser() { - return dateMathParser; } @Override - public FieldType defaultFieldType() { + public DateFieldType fieldType() { + return (DateFieldType)fieldType; + } + + @Override + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -208,63 +405,6 @@ public class DateFieldMapper extends NumberFieldMapper { return new FieldDataType("long"); } - @Override - protected int maxPrecisionStep() { - return 64; - } - - @Override - public Long value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToLong((BytesRef) value); - } - return parseStringValue(value.toString()); - } - - /** Dates should return as a string. */ - @Override - public Object valueForSearch(Object value) { - if (value instanceof String) { - // assume its the string that was indexed, just return it... (for example, with get) - return value; - } - Long val = value(value); - if (val == null) { - return null; - } - return dateTimeFormatter.printer().print(val); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private long parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return dateTimeFormatter.parser().parseMillis(((BytesRef) value).utf8ToString()); - } - return dateTimeFormatter.parser().parseMillis(value.toString()); - } - - private String convertToString(Object value) { - if (value instanceof BytesRef) { - return ((BytesRef) value).utf8ToString(); - } - return value.toString(); - } - private static Callable now() { return new Callable() { @Override @@ -277,62 +417,6 @@ public class DateFieldMapper extends NumberFieldMapper { }; } - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - long iValue = dateMathParser.parse(value, now()); - long iSim; - try { - iSim = fuzziness.asTimeValue().millis(); - } catch (Exception e) { - // not a time format - iSim = fuzziness.asLong(); - } - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - public long parseToMilliseconds(Object value) { - return parseToMilliseconds(value, false, null, dateMathParser); - } - - public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - return parseToMilliseconds(convertToString(value), inclusive, zone, forcedDateParser); - } - - public long parseToMilliseconds(String value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { - DateMathParser dateParser = dateMathParser; - if (forcedDateParser != null) { - dateParser = forcedDateParser; - } - return dateParser.parse(value, now(), inclusive, zone); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context); - } - - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, @Nullable QueryParseContext context) { - // If the current search context is null we're parsing percolator query or a index alias filter. - if (SearchContext.current() == null) { - return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); - } else { - return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); - } - } - - private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), - upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -351,7 +435,7 @@ public class DateFieldMapper extends NumberFieldMapper { protected void innerParseCreateField(ParseContext context, List fields) throws IOException { String dateAsString = null; Long value = null; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue instanceof Number) { @@ -398,20 +482,20 @@ public class DateFieldMapper extends NumberFieldMapper { if (dateAsString != null) { assert value == null; if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), dateAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), dateAsString, boost); } - value = parseStringValue(dateAsString); + value = fieldType().parseStringValue(dateAsString); } else if (value != null) { - value = timeUnit.toMillis(value); + value = ((DateFieldType)fieldType).timeUnit().toMillis(value); } if (value != null) { if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType); + CustomLongNumericField field = new CustomLongNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -430,7 +514,9 @@ public class DateFieldMapper extends NumberFieldMapper { } if (!mergeResult.simulate()) { this.nullValue = ((DateFieldMapper) mergeWith).nullValue; - this.dateTimeFormatter = ((DateFieldMapper) mergeWith).dateTimeFormatter; + this.fieldType = this.fieldType.clone(); + fieldType().setDateTimeFormatter(((DateFieldMapper) mergeWith).fieldType().dateTimeFormatter()); + this.fieldType.freeze(); } } @@ -438,10 +524,10 @@ public class DateFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } - builder.field("format", dateTimeFormatter.format()); + builder.field("format", fieldType().dateTimeFormatter().format()); if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); } @@ -451,77 +537,18 @@ public class DateFieldMapper extends NumberFieldMapper { builder.field("include_in_all", false); } - if (includeDefaults || timeUnit != Defaults.TIME_UNIT) { - builder.field("numeric_resolution", timeUnit.name().toLowerCase(Locale.ROOT)); + if (includeDefaults || fieldType().timeUnit() != Defaults.TIME_UNIT) { + builder.field("numeric_resolution", fieldType().timeUnit().name().toLowerCase(Locale.ROOT)); } // only serialize locale if needed, ROOT is the default, so no need to serialize that case as well... - if (dateTimeFormatter.locale() != null && dateTimeFormatter.locale() != Locale.ROOT) { - builder.field("locale", dateTimeFormatter.locale()); + if (fieldType().dateTimeFormatter().locale() != null && fieldType().dateTimeFormatter().locale() != Locale.ROOT) { + builder.field("locale", fieldType().dateTimeFormatter().locale()); } else if (includeDefaults) { - if (dateTimeFormatter.locale() == null) { + if (fieldType().dateTimeFormatter().locale() == null) { builder.field("locale", Locale.ROOT); } else { - builder.field("locale", dateTimeFormatter.locale()); + builder.field("locale", fieldType().dateTimeFormatter().locale()); } } } - - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinLong(terms); - long maxValue = NumericUtils.getMaxLong(terms); - return new FieldStats.Date( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter - ); - } - - private long parseStringValue(String value) { - try { - return dateTimeFormatter.parser().parseMillis(value); - } catch (RuntimeException e) { - try { - return timeUnit.toMillis(Long.parseLong(value)); - } catch (NumberFormatException e1) { - throw new MapperParsingException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter.format() + "], and timestamp number with locale [" + dateTimeFormatter.locale() + "]", e); - } - } - } - - public final class LateParsingQuery extends Query { - - final Object lowerTerm; - final Object upperTerm; - final boolean includeLower; - final boolean includeUpper; - final DateTimeZone timeZone; - final DateMathParser forcedDateParser; - - public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) { - this.lowerTerm = lowerTerm; - this.upperTerm = upperTerm; - this.includeLower = includeLower; - this.includeUpper = includeUpper; - this.timeZone = timeZone; - this.forcedDateParser = forcedDateParser; - } - - @Override - public Query rewrite(IndexReader reader) throws IOException { - Query query = innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); - return query.rewrite(reader); - } - - @Override - public String toString(String s) { - final StringBuilder sb = new StringBuilder(); - return sb.append(names.indexName()).append(':') - .append(includeLower ? '[' : '{') - .append((lowerTerm == null) ? "*" : lowerTerm.toString()) - .append(" TO ") - .append((upperTerm == null) ? "*" : upperTerm.toString()) - .append(includeUpper ? ']' : '}') - .append(ToStringUtils.boost(getBoost())) - .toString(); - } - } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index 48282232d28..a8f4a44e6ab 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -20,12 +20,10 @@ package org.elasticsearch.index.mapper.core; import com.carrotsearch.hppc.DoubleArrayList; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -44,21 +42,23 @@ import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericDoubleAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.apache.lucene.util.NumericUtils.doubleToSortableLong; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeDoubleValue; import static org.elasticsearch.index.mapper.MapperBuilders.doubleField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -71,7 +71,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "double"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new DoubleFieldType(); static { FIELD_TYPE.freeze(); @@ -85,7 +85,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { protected Double nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); builder = this; } @@ -96,13 +96,22 @@ public class DoubleFieldMapper extends NumberFieldMapper { @Override public DoubleFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - DoubleFieldMapper fieldMapper = new DoubleFieldMapper(buildNames(context), - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), - similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + DoubleFieldMapper fieldMapper = new DoubleFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericDoubleAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -126,24 +135,82 @@ public class DoubleFieldMapper extends NumberFieldMapper { } } + public static class DoubleFieldType extends NumberFieldType { + + public DoubleFieldType() {} + + protected DoubleFieldType(DoubleFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new DoubleFieldType(this); + } + + @Override + public Double value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).doubleValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToDouble((BytesRef) value); + } + return Double.parseDouble(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value)); + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseDoubleValue(lowerTerm), + upperTerm == null ? null : parseDoubleValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + double iValue = Double.parseDouble(value); + double iSim = fuzziness.asDouble(); + return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + double minValue = NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms)); + double maxValue = NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms)); + return new FieldStats.Double( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } private Double nullValue; private String nullValueAsString; - protected DoubleFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, - Double nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, - NumericDoubleAnalyzer.buildNamedAnalyzer(precisionStep), NumericDoubleAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + protected DoubleFieldMapper(MappedFieldType fieldType, Boolean docValues, Double nullValue, Explicit ignoreMalformed, Explicit coerce, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -152,53 +219,8 @@ public class DoubleFieldMapper extends NumberFieldMapper { return new FieldDataType("double"); } - @Override - protected int maxPrecisionStep() { - return 64; - } - - @Override - public Double value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).doubleValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToDouble((BytesRef) value); - } - return Double.parseDouble(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value)); - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - double iValue = Double.parseDouble(value); - double iSim = fuzziness.asDouble(); - return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseDoubleValue(lowerTerm), - upperTerm == null ? null : parseDoubleValue(upperTerm), - includeLower, includeUpper); - } - public Query rangeFilter(Double lowerTerm, Double upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, lowerTerm, upperTerm, includeLower, includeUpper); + return NumericRangeQuery.newDoubleRange(fieldType.names().indexName(), fieldType.numericPrecisionStep(), lowerTerm, upperTerm, includeLower, includeUpper); } @Override @@ -217,7 +239,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { double value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -239,7 +261,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).doubleValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Double.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Double.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -250,7 +272,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -279,26 +301,26 @@ public class DoubleFieldMapper extends NumberFieldMapper { } else { value = parser.doubleValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomDoubleNumericField field = new CustomDoubleNumericField(this, value, fieldType); + CustomDoubleNumericField field = new CustomDoubleNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { if (useSortedNumericDocValues) { - addDocValue(context, fields, NumericUtils.doubleToSortableLong(value)); + addDocValue(context, fields, doubleToSortableLong(value)); } else { - CustomDoubleNumericDocValuesField field = (CustomDoubleNumericDocValuesField) context.doc().getByKey(names().indexName()); + CustomDoubleNumericDocValuesField field = (CustomDoubleNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field != null) { field.add(value); } else { - field = new CustomDoubleNumericDocValuesField(names().indexName(), value); - context.doc().addWithKey(names().indexName(), field); + field = new CustomDoubleNumericDocValuesField(fieldType().names().indexName(), value); + context.doc().addWithKey(fieldType().names().indexName(), field); } } } @@ -325,8 +347,8 @@ public class DoubleFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -339,22 +361,13 @@ public class DoubleFieldMapper extends NumberFieldMapper { } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - double minValue = NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms)); - double maxValue = NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms)); - return new FieldStats.Double( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomDoubleNumericField extends CustomNumericField { private final double number; private final NumberFieldMapper mapper; - public CustomDoubleNumericField(NumberFieldMapper mapper, double number, FieldType fieldType) { + public CustomDoubleNumericField(NumberFieldMapper mapper, double number, NumberFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; @@ -376,12 +389,6 @@ public class DoubleFieldMapper extends NumberFieldMapper { public static class CustomDoubleNumericDocValuesField extends CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - private final DoubleArrayList values; public CustomDoubleNumericDocValuesField(String name, double value) { diff --git a/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index 4a3f6c2e3fc..50c5f578a48 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -20,11 +20,9 @@ package org.elasticsearch.index.mapper.core; import com.carrotsearch.hppc.FloatArrayList; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; @@ -45,21 +43,23 @@ import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericFloatAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.apache.lucene.util.NumericUtils.floatToSortableInt; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; import static org.elasticsearch.index.mapper.MapperBuilders.floatField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -72,7 +72,7 @@ public class FloatFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "float"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new FloatFieldType(); static { FIELD_TYPE.freeze(); @@ -86,7 +86,7 @@ public class FloatFieldMapper extends NumberFieldMapper { protected Float nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); builder = this; } @@ -97,13 +97,22 @@ public class FloatFieldMapper extends NumberFieldMapper { @Override public FloatFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - FloatFieldMapper fieldMapper = new FloatFieldMapper(buildNames(context), - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), - similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + FloatFieldMapper fieldMapper = new FloatFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -127,23 +136,83 @@ public class FloatFieldMapper extends NumberFieldMapper { } } + public static class FloatFieldType extends NumberFieldType { + + public FloatFieldType() {} + + protected FloatFieldType(FloatFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new FloatFieldType(this); + } + + @Override + public Float value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).floatValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToFloat((BytesRef) value); + } + return Float.parseFloat(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + int intValue = NumericUtils.floatToSortableInt(parseValue(value)); + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseValue(lowerTerm), + upperTerm == null ? null : parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + float iValue = Float.parseFloat(value); + final float iSim = fuzziness.asFloat(); + return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms)); + float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms)); + return new FieldStats.Float( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Float nullValue; private String nullValueAsString; - protected FloatFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected FloatFieldMapper(MappedFieldType fieldType, Boolean docValues, Float nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, - NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep), NumericFloatAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -152,34 +221,7 @@ public class FloatFieldMapper extends NumberFieldMapper { return new FieldDataType("float"); } - @Override - protected int maxPrecisionStep() { - return 32; - } - - @Override - public Float value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).floatValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToFloat((BytesRef) value); - } - return Float.parseFloat(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - int intValue = NumericUtils.floatToSortableInt(parseValue(value)); - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private float parseValue(Object value) { + private static float parseValue(Object value) { if (value instanceof Number) { return ((Number) value).floatValue(); } @@ -189,24 +231,6 @@ public class FloatFieldMapper extends NumberFieldMapper { return Float.parseFloat(value.toString()); } - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - float iValue = Float.parseFloat(value); - final float iSim = fuzziness.asFloat(); - return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -223,7 +247,7 @@ public class FloatFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { float value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -245,7 +269,7 @@ public class FloatFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).floatValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Float.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Float.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -256,7 +280,7 @@ public class FloatFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -285,26 +309,26 @@ public class FloatFieldMapper extends NumberFieldMapper { } else { value = parser.floatValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomFloatNumericField field = new CustomFloatNumericField(this, value, fieldType); + CustomFloatNumericField field = new CustomFloatNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { if (useSortedNumericDocValues) { - addDocValue(context, fields, NumericUtils.floatToSortableInt(value)); + addDocValue(context, fields, floatToSortableInt(value)); } else { - CustomFloatNumericDocValuesField field = (CustomFloatNumericDocValuesField) context.doc().getByKey(names().indexName()); + CustomFloatNumericDocValuesField field = (CustomFloatNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field != null) { field.add(value); } else { - field = new CustomFloatNumericDocValuesField(names().indexName(), value); - context.doc().addWithKey(names().indexName(), field); + field = new CustomFloatNumericDocValuesField(fieldType().names().indexName(), value); + context.doc().addWithKey(fieldType().names().indexName(), field); } } } @@ -332,8 +356,8 @@ public class FloatFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_32_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -346,22 +370,13 @@ public class FloatFieldMapper extends NumberFieldMapper { } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms)); - float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms)); - return new FieldStats.Float( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomFloatNumericField extends CustomNumericField { private final float number; private final NumberFieldMapper mapper; - public CustomFloatNumericField(NumberFieldMapper mapper, float number, FieldType fieldType) { + public CustomFloatNumericField(NumberFieldMapper mapper, float number, NumberFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; @@ -383,12 +398,6 @@ public class FloatFieldMapper extends NumberFieldMapper { public static class CustomFloatNumericDocValuesField extends CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - private final FloatArrayList values; public CustomFloatNumericDocValuesField(String name, float value) { diff --git a/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 5f05e72b4d3..893722d5a0a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -40,15 +39,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -67,7 +67,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "integer"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new IntegerFieldType(); static { FIELD_TYPE.freeze(); @@ -81,7 +81,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { protected Integer nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); builder = this; } @@ -92,13 +92,23 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override public IntegerFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - IntegerFieldMapper fieldMapper = new IntegerFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, - nullValue, ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings, + setupFieldType(context); + IntegerFieldMapper fieldMapper = new IntegerFieldMapper(fieldType, docValues, + nullValue, ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -122,23 +132,83 @@ public class IntegerFieldMapper extends NumberFieldMapper { } } + public static class IntegerFieldType extends NumberFieldType { + + public IntegerFieldType() {} + + protected IntegerFieldType(IntegerFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new IntegerFieldType(this); + } + + @Override + public Integer value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).intValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToInt((BytesRef) value); + } + return Integer.parseInt(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseValue(lowerTerm), + upperTerm == null ? null : parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + int iValue = Integer.parseInt(value); + int iSim = fuzziness.asInt(); + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinInt(terms); + long maxValue = NumericUtils.getMaxInt(terms); + return new FieldStats.Long( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Integer nullValue; private String nullValueAsString; - protected IntegerFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected IntegerFieldMapper(MappedFieldType fieldType, Boolean docValues, Integer nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, - NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep), NumericIntegerAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -147,33 +217,9 @@ public class IntegerFieldMapper extends NumberFieldMapper { return new FieldDataType("int"); } - @Override - protected int maxPrecisionStep() { - return 32; - } - @Override - public Integer value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).intValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToInt((BytesRef) value); - } - return Integer.parseInt(value.toString()); - } - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private int parseValue(Object value) { + private static int parseValue(Object value) { if (value instanceof Number) { return ((Number) value).intValue(); } @@ -183,24 +229,6 @@ public class IntegerFieldMapper extends NumberFieldMapper { return Integer.parseInt(value.toString()); } - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - int iValue = Integer.parseInt(value); - int iSim = fuzziness.asInt(); - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -217,7 +245,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { int value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -239,7 +267,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).intValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Integer.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Integer.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -250,7 +278,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -279,7 +307,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { } else { value = parser.intValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } @@ -288,11 +316,11 @@ public class IntegerFieldMapper extends NumberFieldMapper { protected void addIntegerFields(ParseContext context, List fields, int value, float boost) { if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, fieldType); + CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -322,8 +350,8 @@ public class IntegerFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_32_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -336,22 +364,13 @@ public class IntegerFieldMapper extends NumberFieldMapper { } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomIntegerNumericField extends CustomNumericField { private final int number; private final NumberFieldMapper mapper; - public CustomIntegerNumericField(NumberFieldMapper mapper, int number, FieldType fieldType) { + public CustomIntegerNumericField(NumberFieldMapper mapper, int number, MappedFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index cd9b9ca5df1..ccf20f976f1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -40,15 +39,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericLongAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -67,7 +67,7 @@ public class LongFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "long"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new LongFieldType(); static { FIELD_TYPE.freeze(); @@ -81,7 +81,7 @@ public class LongFieldMapper extends NumberFieldMapper { protected Long nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); builder = this; } @@ -92,13 +92,22 @@ public class LongFieldMapper extends NumberFieldMapper { @Override public LongFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - LongFieldMapper fieldMapper = new LongFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, - ignoreMalformed(context), coerce(context), similarity, normsLoading, - fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + LongFieldMapper fieldMapper = new LongFieldMapper(fieldType, docValues, nullValue, + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -122,23 +131,83 @@ public class LongFieldMapper extends NumberFieldMapper { } } + public static class LongFieldType extends NumberFieldType { + + public LongFieldType() {} + + protected LongFieldType(LongFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new LongFieldType(this); + } + + @Override + public Long value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToLong((BytesRef) value); + } + return Long.parseLong(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseLongValue(lowerTerm), + upperTerm == null ? null : parseLongValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long iValue = Long.parseLong(value); + final long iSim = fuzziness.asLong(); + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinLong(terms); + long maxValue = NumericUtils.getMaxLong(terms); + return new FieldStats.Long( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Long nullValue; private String nullValueAsString; - protected LongFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected LongFieldMapper(MappedFieldType fieldType, Boolean docValues, Long nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, - NumericLongAnalyzer.buildNamedAnalyzer(precisionStep), NumericLongAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -147,50 +216,6 @@ public class LongFieldMapper extends NumberFieldMapper { return new FieldDataType("long"); } - @Override - protected int maxPrecisionStep() { - return 64; - } - - @Override - public Long value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToLong((BytesRef) value); - } - return Long.parseLong(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - long iValue = Long.parseLong(value); - final long iSim = fuzziness.asLong(); - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseLongValue(lowerTerm), - upperTerm == null ? null : parseLongValue(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -207,7 +232,7 @@ public class LongFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { long value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -229,7 +254,7 @@ public class LongFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).longValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Long.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Long.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -240,7 +265,7 @@ public class LongFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -269,16 +294,16 @@ public class LongFieldMapper extends NumberFieldMapper { } else { value = parser.longValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType); + CustomLongNumericField field = new CustomLongNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -304,8 +329,8 @@ public class LongFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -317,22 +342,13 @@ public class LongFieldMapper extends NumberFieldMapper { } } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinLong(terms); - long maxValue = NumericUtils.getMaxLong(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomLongNumericField extends CustomNumericField { private final long number; private final NumberFieldMapper mapper; - public CustomLongNumericField(NumberFieldMapper mapper, long number, FieldType fieldType) { + public CustomLongNumericField(NumberFieldMapper mapper, long number, MappedFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapper.java index 7c9c920a3c6..9401081a79f 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapper.java @@ -20,13 +20,16 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericDateAnalyzer; +import org.elasticsearch.index.analysis.NumericLongAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; @@ -36,7 +39,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.murmur3Field; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -50,20 +52,30 @@ public class Murmur3FieldMapper extends LongFieldMapper { public static class Builder extends NumberFieldMapper.Builder { public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Integer.MAX_VALUE); + super(name, Defaults.FIELD_TYPE, Integer.MAX_VALUE); builder = this; builder.precisionStep(Integer.MAX_VALUE); } @Override public Murmur3FieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, null, - ignoreMalformed(context), coerce(context), similarity, normsLoading, + setupFieldType(context); + Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(fieldType, docValues, null, + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -92,13 +104,12 @@ public class Murmur3FieldMapper extends LongFieldMapper { } } - protected Murmur3FieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected Murmur3FieldMapper(MappedFieldType fieldType, Boolean docValues, Long nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, nullValue, ignoreMalformed, coerce, - similarity, normsLoading, fieldDataSettings, - indexSettings, multiFields, copyTo); + super(fieldType, docValues, nullValue, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, multiFields, copyTo); } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index ecb2e742e79..9d2dddb3db0 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -31,9 +31,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; -import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; @@ -45,14 +43,13 @@ import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.io.Reader; @@ -70,16 +67,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A public static final int PRECISION_STEP_32_BIT = 8; // 4tpv public static final int PRECISION_STEP_64_BIT = 16; // 4tpv - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); - - static { - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setStoreTermVectors(false); - FIELD_TYPE.freeze(); - } - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); public static final Explicit COERCE = new Explicit<>(true, false); } @@ -90,9 +77,9 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A private Boolean coerce; - public Builder(String name, FieldType fieldType, int defaultPrecisionStep) { + public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { super(name, fieldType); - fieldType.setNumericPrecisionStep(defaultPrecisionStep); + this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); } public T precisionStep(int precisionStep) { @@ -129,10 +116,60 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } return Defaults.COERCE; } - + + protected void setupFieldType(BuilderContext context) { + super.setupFieldType(context); + fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f); + int precisionStep = fieldType.numericPrecisionStep(); + if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { + fieldType.setNumericPrecisionStep(Integer.MAX_VALUE); + } + fieldType.setIndexAnalyzer(makeNumberAnalyzer(fieldType.numericPrecisionStep())); + fieldType.setSearchAnalyzer(makeNumberAnalyzer(Integer.MAX_VALUE)); + } + + protected abstract NamedAnalyzer makeNumberAnalyzer(int precisionStep); + + protected abstract int maxPrecisionStep(); } - protected int precisionStep; + public static abstract class NumberFieldType extends MappedFieldType { + + public NumberFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + setTokenized(false); + setOmitNorms(true); + setIndexOptions(IndexOptions.DOCS); + setStoreTermVectors(false); + } + + protected NumberFieldType(NumberFieldType ref) { + super(ref); + } + + public abstract NumberFieldType clone(); + + @Override + public abstract Object value(Object value); + + @Override + public Object valueForSearch(Object value) { + return value(value); + } + + @Override + public abstract Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions); + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + + @Override + public boolean isNumeric() { + return true; + } + } protected Boolean includeInAll; @@ -151,7 +188,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A private ThreadLocal tokenStream = new ThreadLocal() { @Override protected NumericTokenStream initialValue() { - return new NumericTokenStream(precisionStep); + return new NumericTokenStream(fieldType.numericPrecisionStep()); } }; @@ -183,23 +220,14 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } }; - protected NumberFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, - Explicit ignoreMalformed, Explicit coerce, NamedAnalyzer indexAnalyzer, - NamedAnalyzer searchAnalyzer, SimilarityProvider similarity, - Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, + protected NumberFieldMapper(MappedFieldType fieldType, Boolean docValues, + Explicit ignoreMalformed, Explicit coerce, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { // LUCENE 4 UPGRADE: Since we can't do anything before the super call, we have to push the boost check down to subclasses - super(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); - if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { - this.precisionStep = Integer.MAX_VALUE; - } else { - this.precisionStep = precisionStep; - } + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); this.ignoreMalformed = ignoreMalformed; this.coerce = coerce; - Version v = Version.indexCreated(indexSettings); - this.useSortedNumericDocValues = v.onOrAfter(Version.V_1_4_0_Beta1); + this.useSortedNumericDocValues = Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1); } @Override @@ -221,12 +249,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A includeInAll = null; } - protected abstract int maxPrecisionStep(); - - public int precisionStep() { - return this.precisionStep; - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { RuntimeException e = null; @@ -247,41 +269,22 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A protected final void addDocValue(ParseContext context, List fields, long value) { if (useSortedNumericDocValues) { - fields.add(new SortedNumericDocValuesField(names().indexName(), value)); + fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value)); } else { - CustomLongNumericDocValuesField field = (CustomLongNumericDocValuesField) context.doc().getByKey(names().indexName()); + CustomLongNumericDocValuesField field = (CustomLongNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field != null) { field.add(value); } else { - field = new CustomLongNumericDocValuesField(names().indexName(), value); - context.doc().addWithKey(names().indexName(), field); + field = new CustomLongNumericDocValuesField(fieldType().names().indexName(), value); + context.doc().addWithKey(fieldType().names().indexName(), field); } } } - /** - * Use the field query created here when matching on numbers. - */ - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - - @Override - public final Query termQuery(Object value, @Nullable QueryParseContext context) { - return new TermQuery(new Term(names.indexName(), indexedValueForSearch(value))); - } - - @Override - public abstract Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context); - - @Override - public abstract Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions); - /** * Converts an object value into a double */ - public double parseDoubleValue(Object value) { + public static double parseDoubleValue(Object value) { if (value instanceof Number) { return ((Number) value).doubleValue(); } @@ -296,7 +299,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A /** * Converts an object value into a long */ - public long parseLongValue(Object value) { + public static long parseLongValue(Object value) { if (value instanceof Number) { return ((Number) value).longValue(); } @@ -308,16 +311,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A return Long.parseLong(value.toString()); } - /** - * Override the default behavior (to return the string, and return the actual Number instance). - * - * @param value - */ - @Override - public Object valueForSearch(Object value) { - return value(value); - } - @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { super.merge(mergeWith, mergeResult); @@ -326,7 +319,9 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } if (!mergeResult.simulate()) { NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith; - this.precisionStep = nfmMergeWith.precisionStep; + this.fieldType = this.fieldType.clone(); + this.fieldType.setNumericPrecisionStep(nfmMergeWith.fieldType.numericPrecisionStep()); + this.fieldType.freeze(); this.includeInAll = nfmMergeWith.includeInAll; if (nfmMergeWith.ignoreMalformed.explicit()) { this.ignoreMalformed = nfmMergeWith.ignoreMalformed; @@ -342,13 +337,13 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } protected NumericTokenStream popCachedStream() { - if (precisionStep == 4) { + if (fieldType.numericPrecisionStep() == 4) { return tokenStream4.get(); - } else if (precisionStep == 8) { + } else if (fieldType.numericPrecisionStep() == 8) { return tokenStream8.get(); - } else if (precisionStep == 16) { + } else if (fieldType.numericPrecisionStep() == 16) { return tokenStream16.get(); - } else if (precisionStep == Integer.MAX_VALUE) { + } else if (fieldType.numericPrecisionStep() == Integer.MAX_VALUE) { return tokenStreamMax.get(); } return tokenStream.get(); @@ -359,8 +354,8 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A protected final NumberFieldMapper mapper; - public CustomNumericField(NumberFieldMapper mapper, Number value, FieldType fieldType) { - super(mapper.names().indexName(), fieldType); + public CustomNumericField(NumberFieldMapper mapper, Number value, MappedFieldType fieldType) { + super(mapper.fieldType().names().indexName(), fieldType); this.mapper = mapper; if (value != null) { this.fieldsData = value; @@ -431,13 +426,8 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } - public static class CustomLongNumericDocValuesField extends CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } + public static class CustomLongNumericDocValuesField extends CustomNumericDocValuesField { private final LongArrayList values; @@ -481,9 +471,4 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A builder.field("coerce", coerce.value()); } } - - @Override - public boolean isNumeric() { - return true; - } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index f195862ac74..382f30eab40 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -43,13 +42,13 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -69,7 +68,7 @@ public class ShortFieldMapper extends NumberFieldMapper { public static final int DEFAULT_PRECISION_STEP = 8; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new ShortFieldType(); static { FIELD_TYPE.freeze(); @@ -83,7 +82,7 @@ public class ShortFieldMapper extends NumberFieldMapper { protected Short nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), DEFAULT_PRECISION_STEP); + super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP); builder = this; } @@ -94,13 +93,24 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override public ShortFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - ShortFieldMapper fieldMapper = new ShortFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, - ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings, + setupFieldType(context); + ShortFieldMapper fieldMapper = new ShortFieldMapper(fieldType, docValues, nullValue, + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + String name = precisionStep == Integer.MAX_VALUE ? "_short/max" : ("_short/" + precisionStep); + return new NamedAnalyzer(name, new NumericIntegerAnalyzer(precisionStep)); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -124,23 +134,84 @@ public class ShortFieldMapper extends NumberFieldMapper { } } + public static class ShortFieldType extends NumberFieldType { + + public ShortFieldType() {} + + protected ShortFieldType(ShortFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new ShortFieldType(this); + } + + @Override + public Short value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).shortValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToShort((BytesRef) value); + } + return Short.parseShort(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : (int)parseValue(lowerTerm), + upperTerm == null ? null : (int)parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + short iValue = Short.parseShort(value); + short iSim = fuzziness.asShort(); + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinInt(terms); + long maxValue = NumericUtils.getMaxInt(terms); + return new FieldStats.Long( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Short nullValue; private String nullValueAsString; - protected ShortFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected ShortFieldMapper(MappedFieldType fieldType, Boolean docValues, Short nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, new NamedAnalyzer("_short/" + precisionStep, - new NumericIntegerAnalyzer(precisionStep)), new NamedAnalyzer("_short/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -149,33 +220,7 @@ public class ShortFieldMapper extends NumberFieldMapper { return new FieldDataType("short"); } - @Override - protected int maxPrecisionStep() { - return 32; - } - - @Override - public Short value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).shortValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToShort((BytesRef) value); - } - return Short.parseShort(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private short parseValue(Object value) { + private static short parseValue(Object value) { if (value instanceof Number) { return ((Number) value).shortValue(); } @@ -185,28 +230,6 @@ public class ShortFieldMapper extends NumberFieldMapper { return Short.parseShort(value.toString()); } - private int parseValueAsInt(Object value) { - return parseValue(value); - } - - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - short iValue = Short.parseShort(value); - short iSim = fuzziness.asShort(); - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValueAsInt(lowerTerm), - upperTerm == null ? null : parseValueAsInt(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -223,7 +246,7 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { short value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -245,7 +268,7 @@ public class ShortFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).shortValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Short.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Short.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -256,7 +279,7 @@ public class ShortFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -285,16 +308,16 @@ public class ShortFieldMapper extends NumberFieldMapper { } else { value = parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomShortNumericField field = new CustomShortNumericField(this, value, fieldType); + CustomShortNumericField field = new CustomShortNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -320,8 +343,8 @@ public class ShortFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != DEFAULT_PRECISION_STEP) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != DEFAULT_PRECISION_STEP) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -334,22 +357,13 @@ public class ShortFieldMapper extends NumberFieldMapper { } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomShortNumericField extends CustomNumericField { private final short number; private final NumberFieldMapper mapper; - public CustomShortNumericField(NumberFieldMapper mapper, short number, FieldType fieldType) { + public CustomShortNumericField(NumberFieldMapper mapper, short number, NumberFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 5dfffc83809..0fd77fe5875 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -19,9 +19,7 @@ package org.elasticsearch.index.mapper.core; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; @@ -34,19 +32,20 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; @@ -59,7 +58,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa public static final String CONTENT_TYPE = "string"; public static class Defaults extends AbstractFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new StringFieldType(); static { FIELD_TYPE.freeze(); @@ -77,12 +76,10 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa protected int positionOffsetGap = Defaults.POSITION_OFFSET_GAP; - protected NamedAnalyzer searchQuotedAnalyzer; - protected int ignoreAbove = Defaults.IGNORE_ABOVE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); builder = this; } @@ -94,9 +91,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa @Override public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) { super.searchAnalyzer(searchAnalyzer); - if (searchQuotedAnalyzer == null) { - searchQuotedAnalyzer = searchAnalyzer; - } return this; } @@ -106,7 +100,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa } public Builder searchQuotedAnalyzer(NamedAnalyzer analyzer) { - this.searchQuotedAnalyzer = analyzer; + this.fieldType.setSearchQuoteAnalyzer(analyzer); return builder; } @@ -118,20 +112,20 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa @Override public StringFieldMapper build(BuilderContext context) { if (positionOffsetGap > 0) { - indexAnalyzer = new NamedAnalyzer(indexAnalyzer, positionOffsetGap); - searchAnalyzer = new NamedAnalyzer(searchAnalyzer, positionOffsetGap); - searchQuotedAnalyzer = new NamedAnalyzer(searchQuotedAnalyzer, positionOffsetGap); + fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionOffsetGap)); + fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionOffsetGap)); + fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionOffsetGap)); } // if the field is not analyzed, then by default, we should omit norms and have docs only // index options, as probably what the user really wants // if they are set explicitly, we will use those values // we also change the values on the default field type so that toXContent emits what // differs from the defaults - FieldType defaultFieldType = new FieldType(Defaults.FIELD_TYPE); + MappedFieldType defaultFieldType = Defaults.FIELD_TYPE.clone(); if (fieldType.indexOptions() != IndexOptions.NONE && !fieldType.tokenized()) { defaultFieldType.setOmitNorms(true); defaultFieldType.setIndexOptions(IndexOptions.DOCS); - if (!omitNormsSet && boost == Defaults.BOOST) { + if (!omitNormsSet && fieldType.boost() == Defaults.BOOST) { fieldType.setOmitNorms(true); } if (!indexOptionsSet) { @@ -139,9 +133,9 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa } } defaultFieldType.freeze(); - StringFieldMapper fieldMapper = new StringFieldMapper(buildNames(context), - boost, fieldType, defaultFieldType, docValues, nullValue, indexAnalyzer, searchAnalyzer, searchQuotedAnalyzer, - positionOffsetGap, ignoreAbove, similarity, normsLoading, + setupFieldType(context); + StringFieldMapper fieldMapper = new StringFieldMapper( + fieldType, defaultFieldType, docValues, nullValue, positionOffsetGap, ignoreAbove, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; @@ -174,14 +168,14 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa builder.positionOffsetGap(XContentMapValues.nodeIntegerValue(propNode, -1)); // we need to update to actual analyzers if they are not set in this case... // so we can inject the position offset gap... - if (builder.indexAnalyzer == null) { - builder.indexAnalyzer = parserContext.analysisService().defaultIndexAnalyzer(); + if (builder.fieldType.indexAnalyzer() == null) { + builder.fieldType.setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer()); } - if (builder.searchAnalyzer == null) { - builder.searchAnalyzer = parserContext.analysisService().defaultSearchAnalyzer(); + if (builder.fieldType.searchAnalyzer() == null) { + builder.fieldType.setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer()); } - if (builder.searchQuotedAnalyzer == null) { - builder.searchQuotedAnalyzer = parserContext.analysisService().defaultSearchQuoteAnalyzer(); + if (builder.fieldType.searchQuoteAnalyzer() == null) { + builder.fieldType.setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer()); } iterator.remove(); } else if (propName.equals("ignore_above")) { @@ -195,32 +189,50 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa } } + public static class StringFieldType extends MappedFieldType { + + public StringFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected StringFieldType(StringFieldType ref) { + super(ref); + } + + public StringFieldType clone() { + return new StringFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + } + private String nullValue; private Boolean includeInAll; private int positionOffsetGap; - private NamedAnalyzer searchQuotedAnalyzer; private int ignoreAbove; - private final FieldType defaultFieldType; + private final MappedFieldType defaultFieldType; - protected StringFieldMapper(Names names, float boost, FieldType fieldType, FieldType defaultFieldType, Boolean docValues, - String nullValue, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, - NamedAnalyzer searchQuotedAnalyzer, int positionOffsetGap, int ignoreAbove, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + protected StringFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, Boolean docValues, + String nullValue, int positionOffsetGap, int ignoreAbove, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); - if (fieldType.tokenized() && fieldType.indexOptions() != IndexOptions.NONE && hasDocValues()) { - throw new MapperParsingException("Field [" + names.fullName() + "] cannot be analyzed and have doc values"); + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); + if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { + throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values"); } this.defaultFieldType = defaultFieldType; this.nullValue = nullValue; this.positionOffsetGap = positionOffsetGap; - this.searchQuotedAnalyzer = searchQuotedAnalyzer != null ? searchQuotedAnalyzer : this.searchAnalyzer; this.ignoreAbove = ignoreAbove; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return defaultFieldType; } @@ -248,14 +260,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa includeInAll = null; } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - @Override protected boolean customBoost() { return true; @@ -269,11 +273,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa return ignoreAbove; } - @Override - public Analyzer searchQuoteAnalyzer() { - return this.searchQuotedAnalyzer; - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -284,7 +283,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = parseCreateFieldForString(context, nullValue, boost); + ValueAndBoost valueAndBoost = parseCreateFieldForString(context, nullValue, fieldType.boost()); if (valueAndBoost.value() == null) { return; } @@ -292,19 +291,19 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa return; } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), valueAndBoost.value(), valueAndBoost.boost()); + context.allEntries().addText(fieldType.names().fullName(), valueAndBoost.value(), valueAndBoost.boost()); } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - Field field = new Field(names.indexName(), valueAndBoost.value(), fieldType); + Field field = new Field(fieldType.names().indexName(), valueAndBoost.value(), fieldType); field.setBoost(valueAndBoost.boost()); fields.add(field); } - if (hasDocValues()) { - fields.add(new SortedSetDocValuesField(names.indexName(), new BytesRef(valueAndBoost.value()))); + if (fieldType().hasDocValues()) { + fields.add(new SortedSetDocValuesField(fieldType.names().indexName(), new BytesRef(valueAndBoost.value()))); } if (fields.isEmpty()) { - context.ignoredValue(names.indexName(), valueAndBoost.value()); + context.ignoredValue(fieldType.names().indexName(), valueAndBoost.value()); } } @@ -381,13 +380,14 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa if (includeDefaults || positionOffsetGap != Defaults.POSITION_OFFSET_GAP) { builder.field("position_offset_gap", positionOffsetGap); } - if (searchQuotedAnalyzer != null && !searchQuotedAnalyzer.name().equals(searchAnalyzer.name())) { - builder.field("search_quote_analyzer", searchQuotedAnalyzer.name()); + NamedAnalyzer searchQuoteAnalyzer = fieldType.searchQuoteAnalyzer(); + if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType.searchAnalyzer().name())) { + builder.field("search_quote_analyzer", searchQuoteAnalyzer.name()); } else if (includeDefaults) { - if (searchQuotedAnalyzer == null) { + if (searchQuoteAnalyzer == null) { builder.field("search_quote_analyzer", "default"); } else { - builder.field("search_quote_analyzer", searchQuotedAnalyzer.name()); + builder.field("search_quote_analyzer", searchQuoteAnalyzer.name()); } } if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) { diff --git a/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java index c13268b1988..eb53c172df4 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java @@ -22,18 +22,18 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost; import org.elasticsearch.index.similarity.SimilarityProvider; @@ -43,6 +43,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; import static org.elasticsearch.index.mapper.MapperBuilders.tokenCountField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -55,6 +56,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { public static final String CONTENT_TYPE = "token_count"; public static class Defaults extends IntegerFieldMapper.Defaults { + } public static class Builder extends NumberFieldMapper.Builder { @@ -62,7 +64,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { private NamedAnalyzer analyzer; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); builder = this; } @@ -82,13 +84,23 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { @Override public TokenCountFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, - ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings, context.indexSettings(), + setupFieldType(context); + TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(fieldType, docValues, nullValue, + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), analyzer, multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -122,34 +134,33 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { private NamedAnalyzer analyzer; - protected TokenCountFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Integer nullValue, - Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, Settings fieldDataSettings, Settings indexSettings, NamedAnalyzer analyzer, + protected TokenCountFieldMapper(MappedFieldType fieldType, Boolean docValues, Integer nullValue, + Explicit ignoreMalformed, Explicit coerce, Settings fieldDataSettings, Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, nullValue, ignoreMalformed, coerce, - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, nullValue, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, multiFields, copyTo); this.analyzer = analyzer; } @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, boost); + ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType.boost()); if (valueAndBoost.value() == null && nullValue() == null) { return; } - if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored() || hasDocValues()) { + if (fieldType.indexOptions() != NONE || fieldType.stored() || fieldType().hasDocValues()) { int count; if (valueAndBoost.value() == null) { count = nullValue(); } else { - count = countPositions(analyzer.analyzer().tokenStream(names().shortName(), valueAndBoost.value())); + count = countPositions(analyzer.analyzer().tokenStream(fieldType().names().shortName(), valueAndBoost.value())); } addIntegerFields(context, fields, count, valueAndBoost.boost()); } if (fields.isEmpty()) { - context.ignoredValue(names.indexName(), valueAndBoost.value()); + context.ignoredValue(fieldType.names().indexName(), valueAndBoost.value()); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 983b83ce054..d7434a574dd 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.object.ObjectMapper; diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index db0c9899c15..4485aed953b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -24,10 +24,9 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.base.Objects; import com.google.common.collect.Iterators; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -41,10 +40,9 @@ import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -56,7 +54,6 @@ import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.ArrayList; @@ -97,7 +94,6 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal public static class Defaults { public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL; - public static final boolean STORE = false; public static final boolean ENABLE_LATLON = false; public static final boolean ENABLE_GEOHASH = false; public static final boolean ENABLE_GEOHASH_PREFIX = false; @@ -107,7 +103,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal public static final boolean VALIDATE_LAT = true; public static final boolean VALIDATE_LON = true; - public static final FieldType FIELD_TYPE = new FieldType(StringFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); @@ -131,16 +127,15 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal private int geoHashPrecision = Defaults.GEO_HASH_PRECISION; - boolean validateLat = Defaults.VALIDATE_LAT; - boolean validateLon = Defaults.VALIDATE_LON; - boolean normalizeLat = Defaults.NORMALIZE_LAT; - boolean normalizeLon = Defaults.NORMALIZE_LON; - public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); this.builder = this; } + GeoPointFieldType fieldType() { + return (GeoPointFieldType)fieldType; + } + @Override public Builder multiFieldPathType(ContentPath.Type pathType) { this.pathType = pathType; @@ -185,6 +180,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal DoubleFieldMapper latMapper = null; DoubleFieldMapper lonMapper = null; + GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType; context.path().add(name); if (enableLatLon) { @@ -196,10 +192,13 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); + geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType()); } StringFieldMapper geohashMapper = null; - if (enableGeoHash) { + if (enableGeoHash || enableGeohashPrefix) { + // TODO: possible also implicitly enable geohash if geohash precision is set geohashMapper = stringField(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).omitNorms(true).indexOptions(IndexOptions.DOCS).build(context); + geoPointFieldType.setGeohashEnabled(geohashMapper.fieldType(), geoHashPrecision, enableGeohashPrefix); } context.path().remove(); @@ -208,11 +207,11 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal // this is important: even if geo points feel like they need to be tokenized to distinguish lat from lon, we actually want to // store them as a single token. fieldType.setTokenized(false); + fieldType.setHasDocValues(false); + setupFieldType(context); - return new GeoPointFieldMapper(buildNames(context), fieldType, docValues, indexAnalyzer, searchAnalyzer, - similarity, fieldDataSettings, context.indexSettings(), origPathType, enableLatLon, enableGeoHash, enableGeohashPrefix, precisionStep, - geoHashPrecision, latMapper, lonMapper, geohashMapper, validateLon, validateLat, normalizeLon, normalizeLat - , multiFieldsBuilder.build(this, context)); + return new GeoPointFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings(), origPathType, + latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context)); } } @@ -251,24 +250,24 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } iterator.remove(); } else if (fieldName.equals("validate")) { - builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); - builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode)); + builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("validate_lon")) { - builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("validate_lat")) { - builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("normalize")) { - builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); - builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode)); + builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("normalize_lat")) { - builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("normalize_lon")) { - builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) { iterator.remove(); @@ -278,6 +277,128 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } } + public static class GeoPointFieldType extends MappedFieldType { + + private MappedFieldType geohashFieldType; + private int geohashPrecision; + private boolean geohashPrefixEnabled; + + private MappedFieldType latFieldType; + private MappedFieldType lonFieldType; + private boolean validateLon = true; + private boolean validateLat = true; + private boolean normalizeLon = true; + private boolean normalizeLat = true; + + public GeoPointFieldType() { + super(StringFieldMapper.Defaults.FIELD_TYPE); + } + + protected GeoPointFieldType(GeoPointFieldType ref) { + super(ref); + this.geohashFieldType = ref.geohashFieldType; // copying ref is ok, this can never be modified + this.geohashPrecision = ref.geohashPrecision; + this.geohashPrefixEnabled = ref.geohashPrefixEnabled; + this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified + this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified + this.validateLon = ref.validateLon; + this.validateLat = ref.validateLat; + this.normalizeLon = ref.normalizeLon; + this.normalizeLat = ref.normalizeLat; + } + + @Override + public MappedFieldType clone() { + return new GeoPointFieldType(this); + } + + public boolean isGeohashEnabled() { + return geohashFieldType != null; + } + + public MappedFieldType geohashFieldType() { + return geohashFieldType; + } + + public int geohashPrecision() { + return geohashPrecision; + } + + public boolean isGeohashPrefixEnabled() { + return geohashPrefixEnabled; + } + + public void setGeohashEnabled(MappedFieldType geohashFieldType, int geohashPrecision, boolean geohashPrefixEnabled) { + checkIfFrozen(); + this.geohashFieldType = geohashFieldType; + this.geohashPrecision = geohashPrecision; + this.geohashPrefixEnabled = geohashPrefixEnabled; + } + + public boolean isLatLonEnabled() { + return latFieldType != null; + } + + public MappedFieldType latFieldType() { + return latFieldType; + } + + public MappedFieldType lonFieldType() { + return lonFieldType; + } + + public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) { + checkIfFrozen(); + this.latFieldType = latFieldType; + this.lonFieldType = lonFieldType; + } + + public boolean validateLon() { + return validateLon; + } + + public void setValidateLon(boolean validateLon) { + checkIfFrozen(); + this.validateLon = validateLon; + } + + public boolean validateLat() { + return validateLat; + } + + public void setValidateLat(boolean validateLat) { + checkIfFrozen(); + this.validateLat = validateLat; + } + + public boolean normalizeLon() { + return normalizeLon; + } + + public void setNormalizeLon(boolean normalizeLon) { + checkIfFrozen(); + this.normalizeLon = normalizeLon; + } + + public boolean normalizeLat() { + return normalizeLat; + } + + public void setNormalizeLat(boolean normalizeLat) { + checkIfFrozen(); + this.normalizeLat = normalizeLat; + } + + @Override + public GeoPoint value(Object value) { + if (value instanceof GeoPoint) { + return (GeoPoint) value; + } else { + return GeoPoint.parseFromLatLon(value.toString()); + } + } + } + /** * A byte-aligned fixed-length encoding for latitudes and longitudes. */ @@ -405,52 +526,19 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal private final ContentPath.Type pathType; - private final boolean enableLatLon; - - private final boolean enableGeoHash; - - private final boolean enableGeohashPrefix; - - private final Integer precisionStep; - - private final int geoHashPrecision; - private final DoubleFieldMapper latMapper; private final DoubleFieldMapper lonMapper; private final StringFieldMapper geohashMapper; - private boolean validateLon; - private boolean validateLat; - - private final boolean normalizeLon; - private final boolean normalizeLat; - - public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean docValues, - NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, - SimilarityProvider similarity, @Nullable Settings fieldDataSettings, Settings indexSettings, - ContentPath.Type pathType, boolean enableLatLon, boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision, - DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, - boolean validateLon, boolean validateLat, - boolean normalizeLon, boolean normalizeLat, MultiFields multiFields) { - super(names, 1f, fieldType, docValues, null, indexAnalyzer, similarity, null, fieldDataSettings, indexSettings, multiFields, null); + public GeoPointFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, + ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,MultiFields multiFields) { + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, null); this.pathType = pathType; - this.enableLatLon = enableLatLon; - this.enableGeoHash = enableGeoHash || enableGeohashPrefix; // implicitly enable geohashes if geohash_prefix is set - this.enableGeohashPrefix = enableGeohashPrefix; - this.precisionStep = precisionStep; - this.geoHashPrecision = geoHashPrecision; - this.latMapper = latMapper; this.lonMapper = lonMapper; this.geohashMapper = geohashMapper; - - this.validateLat = validateLat; - this.validateLon = validateLon; - - this.normalizeLat = normalizeLat; - this.normalizeLon = normalizeLon; } @Override @@ -459,7 +547,12 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } @Override - public FieldType defaultFieldType() { + public GeoPointFieldType fieldType() { + return (GeoPointFieldType)fieldType; + } + + @Override + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -473,39 +566,6 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal return false; } - public DoubleFieldMapper latMapper() { - return latMapper; - } - - public DoubleFieldMapper lonMapper() { - return lonMapper; - } - - public StringFieldMapper geoHashStringMapper() { - return this.geohashMapper; - } - - int geoHashPrecision() { - return geoHashPrecision; - } - - public boolean isEnableLatLon() { - return enableLatLon; - } - - public boolean isEnableGeohashPrefix() { - return enableGeohashPrefix; - } - - @Override - public GeoPoint value(Object value) { - if (value instanceof GeoPoint) { - return (GeoPoint) value; - } else { - return GeoPoint.parseFromLatLon(value.toString()); - } - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called"); @@ -515,7 +575,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal public Mapper parse(ParseContext context) throws IOException { ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(pathType); - context.path().add(names().shortName()); + context.path().add(fieldType().names().shortName()); GeoPoint sparse = context.parseExternalValue(GeoPoint.class); @@ -565,9 +625,9 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal return null; } - private void parseGeohashField(ParseContext context, String geohash) throws IOException { - int len = Math.min(geoHashPrecision, geohash.length()); - int min = enableGeohashPrefix ? 1 : geohash.length(); + private void addGeohashField(ParseContext context, String geohash) throws IOException { + int len = Math.min(fieldType().geohashPrecision(), geohash.length()); + int min = fieldType().isGeohashPrefixEnabled() ? 1 : geohash.length(); for (int i = len; i >= min; i--) { // side effect of this call is adding the field @@ -584,40 +644,40 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException { - if (normalizeLat || normalizeLon) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); + if (fieldType().normalizeLat() || fieldType().normalizeLon()) { + GeoUtils.normalizePoint(point, fieldType().normalizeLat(), fieldType().normalizeLon()); } - if (validateLat) { + if (fieldType().validateLat()) { if (point.lat() > 90.0 || point.lat() < -90.0) { throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); } } - if (validateLon) { + if (fieldType().validateLon()) { if (point.lon() > 180.0 || point.lon() < -180) { throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - Field field = new Field(names.indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType); + Field field = new Field(fieldType.names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType); context.doc().add(field); } - if (enableGeoHash) { + if (fieldType().isGeohashEnabled()) { if (geohash == null) { geohash = GeoHashUtils.encode(point.lat(), point.lon()); } - parseGeohashField(context, geohash); + addGeohashField(context, geohash); } - if (enableLatLon) { + if (fieldType().isLatLonEnabled()) { latMapper.parse(context.createExternalValueContext(point.lat())); lonMapper.parse(context.createExternalValueContext(point.lon())); } - if (hasDocValues()) { - CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(names().indexName()); + if (fieldType().hasDocValues()) { + CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field == null) { - field = new CustomGeoPointDocValuesField(names().indexName(), point.lat(), point.lon()); - context.doc().addWithKey(names().indexName(), field); + field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon()); + context.doc().addWithKey(fieldType().names().indexName(), field); } else { field.add(point.lat(), point.lon()); } @@ -647,42 +707,43 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } GeoPointFieldMapper fieldMergeWith = (GeoPointFieldMapper) mergeWith; - if (this.enableLatLon != fieldMergeWith.enableLatLon) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different lat_lon"); + if (this.fieldType().isLatLonEnabled() != fieldMergeWith.fieldType().isLatLonEnabled()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different lat_lon"); } - if (this.enableGeoHash != fieldMergeWith.enableGeoHash) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash"); + if (this.fieldType().isGeohashEnabled() != fieldMergeWith.fieldType().isGeohashEnabled()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash"); } - if (this.geoHashPrecision != fieldMergeWith.geoHashPrecision) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash_precision"); + if (this.fieldType().geohashPrecision() != fieldMergeWith.fieldType().geohashPrecision()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash_precision"); } - if (this.enableGeohashPrefix != fieldMergeWith.enableGeohashPrefix) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash_prefix"); + if (this.fieldType().isGeohashPrefixEnabled() != fieldMergeWith.fieldType().isGeohashPrefixEnabled()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash_prefix"); } - if (this.normalizeLat != fieldMergeWith.normalizeLat) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different normalize_lat"); + if (this.fieldType().normalizeLat() != fieldMergeWith.fieldType().normalizeLat()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different normalize_lat"); } - if (this.normalizeLon != fieldMergeWith.normalizeLon) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different normalize_lon"); + if (this.fieldType().normalizeLon() != fieldMergeWith.fieldType().normalizeLon()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different normalize_lon"); } - if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different precision_step"); + if (fieldType().isLatLonEnabled() && + this.fieldType().latFieldType().numericPrecisionStep() != fieldMergeWith.fieldType().latFieldType().numericPrecisionStep()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different precision_step"); } - if (this.validateLat != fieldMergeWith.validateLat) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different validate_lat"); + if (this.fieldType().validateLat() != fieldMergeWith.fieldType().validateLat()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different validate_lat"); } - if (this.validateLon != fieldMergeWith.validateLon) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different validate_lon"); + if (this.fieldType().validateLon() != fieldMergeWith.fieldType().validateLon()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different validate_lon"); } } @Override public Iterator iterator() { List extras = new ArrayList<>(); - if (enableGeoHash) { + if (fieldType().isGeohashEnabled()) { extras.add(geohashMapper); } - if (enableLatLon) { + if (fieldType().isLatLonEnabled()) { extras.add(latMapper); extras.add(lonMapper); } @@ -695,46 +756,46 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal if (includeDefaults || pathType != Defaults.PATH_TYPE) { builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); } - if (includeDefaults || enableLatLon != Defaults.ENABLE_LATLON) { - builder.field("lat_lon", enableLatLon); + if (includeDefaults || fieldType().isLatLonEnabled() != Defaults.ENABLE_LATLON) { + builder.field("lat_lon", fieldType().isLatLonEnabled()); } - if (includeDefaults || enableGeoHash != Defaults.ENABLE_GEOHASH) { - builder.field("geohash", enableGeoHash); + if (includeDefaults || fieldType().isGeohashEnabled() != Defaults.ENABLE_GEOHASH) { + builder.field("geohash", fieldType().isGeohashEnabled()); } - if (includeDefaults || enableGeohashPrefix != Defaults.ENABLE_GEOHASH_PREFIX) { - builder.field("geohash_prefix", enableGeohashPrefix); + if (includeDefaults || fieldType().isGeohashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) { + builder.field("geohash_prefix", fieldType().isGeohashPrefixEnabled()); } - if (includeDefaults || geoHashPrecision != Defaults.GEO_HASH_PRECISION) { - builder.field("geohash_precision", geoHashPrecision); + if (fieldType().isGeohashEnabled() && (includeDefaults || fieldType().geohashPrecision() != Defaults.GEO_HASH_PRECISION)) { + builder.field("geohash_precision", fieldType().geohashPrecision()); } - if (includeDefaults || precisionStep != null) { - builder.field("precision_step", precisionStep); + if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) { + builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); } - if (includeDefaults || validateLat != Defaults.VALIDATE_LAT || validateLon != Defaults.VALIDATE_LON) { - if (validateLat && validateLon) { + if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT || fieldType().validateLon() != Defaults.VALIDATE_LON) { + if (fieldType().validateLat() && fieldType().validateLon()) { builder.field("validate", true); - } else if (!validateLat && !validateLon) { + } else if (!fieldType().validateLat() && !fieldType().validateLon()) { builder.field("validate", false); } else { - if (includeDefaults || validateLat != Defaults.VALIDATE_LAT) { - builder.field("validate_lat", validateLat); + if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT) { + builder.field("validate_lat", fieldType().validateLat()); } - if (includeDefaults || validateLon != Defaults.VALIDATE_LON) { - builder.field("validate_lon", validateLon); + if (includeDefaults || fieldType().validateLon() != Defaults.VALIDATE_LON) { + builder.field("validate_lon", fieldType().validateLon()); } } } - if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT || normalizeLon != Defaults.NORMALIZE_LON) { - if (normalizeLat && normalizeLon) { + if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) { + if (fieldType().normalizeLat() && fieldType().normalizeLon()) { builder.field("normalize", true); - } else if (!normalizeLat && !normalizeLon) { + } else if (!fieldType().normalizeLat() && !fieldType().normalizeLon()) { builder.field("normalize", false); } else { - if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT) { - builder.field("normalize_lat", normalizeLat); + if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT) { + builder.field("normalize_lat", fieldType().normalizeLat()); } - if (includeDefaults || normalizeLon != Defaults.NORMALIZE_LON) { - builder.field("normalize_lon", normalizeLat); + if (includeDefaults || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) { + builder.field("normalize_lon", fieldType().normalizeLon()); } } } @@ -742,15 +803,9 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - private final ObjectHashSet points; - public CustomGeoPointDocValuesField(String name, double lat, double lon) { + public CustomGeoPointDocValuesField(String name, double lat, double lon) { super(name); points = new ObjectHashSet<>(2); points.add(new GeoPoint(lat, lon)); diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 42a4fb287ea..9468088c982 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.geo; import com.spatial4j.core.shape.Shape; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; @@ -40,10 +39,11 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; @@ -94,7 +94,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { public static final double DISTANCE_ERROR_PCT = 0.025d; public static final Orientation ORIENTATION = Orientation.RIGHT; - public static final FieldType FIELD_TYPE = new FieldType(); + public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); @@ -119,7 +119,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { private SpatialPrefixTree prefixTree; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); } public Builder tree(String tree) { @@ -155,7 +155,6 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { @Override public GeoShapeFieldMapper build(BuilderContext context) { - final FieldMapper.Names names = buildNames(context); if (Names.TREE_GEOHASH.equals(tree)) { prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true)); } else if (Names.TREE_QUADTREE.equals(tree)) { @@ -169,9 +168,19 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } else { throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]"); } + setupFieldType(context); - return new GeoShapeFieldMapper(names, prefixTree, strategyName, distanceErrorPct, orientation, fieldType, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + RecursivePrefixTreeStrategy recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, fieldType.names().indexName()); + recursiveStrategy.setDistErrPct(distanceErrorPct); + recursiveStrategy.setPruneLeafyBranches(false); + TermQueryPrefixTreeStrategy termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, fieldType.names().indexName()); + termStrategy.setDistErrPct(distanceErrorPct); + + GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType; + geoShapeFieldType.setStrategies(strategyName, recursiveStrategy, termStrategy); + geoShapeFieldType.setOrientation(orientation); + + return new GeoShapeFieldMapper(fieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } private final int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) { @@ -223,25 +232,83 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } } - private final PrefixTreeStrategy defaultStrategy; - private final RecursivePrefixTreeStrategy recursiveStrategy; - private final TermQueryPrefixTreeStrategy termStrategy; - private Orientation shapeOrientation; + public static class GeoShapeFieldType extends MappedFieldType { - public GeoShapeFieldMapper(FieldMapper.Names names, SpatialPrefixTree tree, String defaultStrategyName, double distanceErrorPct, - Orientation shapeOrientation, FieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, 1, fieldType, false, null, null, null, null, null, indexSettings, multiFields, copyTo); - this.recursiveStrategy = new RecursivePrefixTreeStrategy(tree, names.indexName()); - this.recursiveStrategy.setDistErrPct(distanceErrorPct); - this.recursiveStrategy.setPruneLeafyBranches(false); - this.termStrategy = new TermQueryPrefixTreeStrategy(tree, names.indexName()); - this.termStrategy.setDistErrPct(distanceErrorPct); - this.defaultStrategy = resolveStrategy(defaultStrategyName); - this.shapeOrientation = shapeOrientation; + private PrefixTreeStrategy defaultStrategy; + private RecursivePrefixTreeStrategy recursiveStrategy; + private TermQueryPrefixTreeStrategy termStrategy; + private Orientation orientation; + + public GeoShapeFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected GeoShapeFieldType(GeoShapeFieldType ref) { + super(ref); + // TODO: this shallow copy is probably not good...need to extract the parameters and recreate the tree and strategies? + this.defaultStrategy = ref.defaultStrategy; + this.recursiveStrategy = ref.recursiveStrategy; + this.termStrategy = ref.termStrategy; + this.orientation = ref.orientation; + } + + @Override + public MappedFieldType clone() { + return new GeoShapeFieldType(this); + } + + public PrefixTreeStrategy defaultStrategy() { + return this.defaultStrategy; + } + + public PrefixTreeStrategy resolveStrategy(String strategyName) { + if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { + return recursiveStrategy; + } + if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) { + return termStrategy; + } + throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); + } + + public void setStrategies(String defaultStrategy, RecursivePrefixTreeStrategy recursiveStrategy, TermQueryPrefixTreeStrategy termStrategy) { + checkIfFrozen(); + this.recursiveStrategy = recursiveStrategy; + this.termStrategy = termStrategy; + this.defaultStrategy = resolveStrategy(defaultStrategy); + } + + public void setDistErrPct(double distErrPct) { + checkIfFrozen(); + this.recursiveStrategy.setDistErrPct(distErrPct); + this.termStrategy.setDistErrPct(distErrPct); + } + + public Orientation orientation() { return this.orientation; } + + public void setOrientation(Orientation orientation) { + checkIfFrozen(); + this.orientation = orientation; + } + + @Override + public String value(Object value) { + throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values"); + } + + } + + public GeoShapeFieldMapper(MappedFieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(fieldType, false, null, indexSettings, multiFields, copyTo); } @Override - public FieldType defaultFieldType() { + public GeoShapeFieldType fieldType() { + return (GeoShapeFieldType)fieldType; + } + + @Override + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -261,18 +328,18 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } shape = shapeBuilder.build(); } - Field[] fields = defaultStrategy.createIndexableFields(shape); + Field[] fields = fieldType().defaultStrategy().createIndexableFields(shape); if (fields == null || fields.length == 0) { return null; } for (Field field : fields) { if (!customBoost()) { - field.setBoost(boost); + field.setBoost(fieldType.boost()); } context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e); + throw new MapperParsingException("failed to parse [" + fieldType.names().fullName() + "]", e); } return null; } @@ -281,29 +348,29 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different field type"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different field type"); return; } final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith; - final PrefixTreeStrategy mergeWithStrategy = fieldMergeWith.defaultStrategy; + final PrefixTreeStrategy mergeWithStrategy = fieldMergeWith.fieldType().defaultStrategy(); // prevent user from changing strategies - if (!(this.defaultStrategy.getClass().equals(mergeWithStrategy.getClass()))) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different strategy"); + if (!(this.fieldType().defaultStrategy().getClass().equals(mergeWithStrategy.getClass()))) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different strategy"); } - final SpatialPrefixTree grid = this.defaultStrategy.getGrid(); + final SpatialPrefixTree grid = this.fieldType().defaultStrategy().getGrid(); final SpatialPrefixTree mergeGrid = mergeWithStrategy.getGrid(); // prevent user from changing trees (changes encoding) if (!grid.getClass().equals(mergeGrid.getClass())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different tree"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree"); } // TODO we should allow this, but at the moment levels is used to build bookkeeping variables // in lucene's SpatialPrefixTree implementations, need a patch to correct that first if (grid.getMaxLevels() != mergeGrid.getMaxLevels()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different tree_levels or precision"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree_levels or precision"); } // bail if there were merge conflicts @@ -312,11 +379,12 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } // change distance error percent - this.defaultStrategy.setDistErrPct(mergeWithStrategy.getDistErrPct()); - + this.fieldType = this.fieldType.clone(); + this.fieldType().setDistErrPct(mergeWithStrategy.getDistErrPct()); // change orientation - this is allowed because existing dateline spanning shapes // have already been unwound and segmented - this.shapeOrientation = fieldMergeWith.shapeOrientation; + this.fieldType().setOrientation(fieldMergeWith.fieldType().orientation()); + this.fieldType.freeze(); } @Override @@ -328,25 +396,25 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { builder.field("type", contentType()); // TODO: Come up with a better way to get the name, maybe pass it from builder - if (defaultStrategy.getGrid() instanceof GeohashPrefixTree) { + if (fieldType().defaultStrategy().getGrid() instanceof GeohashPrefixTree) { // Don't emit the tree name since GeohashPrefixTree is the default // Only emit the tree levels if it isn't the default value - if (includeDefaults || defaultStrategy.getGrid().getMaxLevels() != Defaults.GEOHASH_LEVELS) { - builder.field(Names.TREE_LEVELS, defaultStrategy.getGrid().getMaxLevels()); + if (includeDefaults || fieldType().defaultStrategy().getGrid().getMaxLevels() != Defaults.GEOHASH_LEVELS) { + builder.field(Names.TREE_LEVELS, fieldType().defaultStrategy().getGrid().getMaxLevels()); } } else { builder.field(Names.TREE, Names.TREE_QUADTREE); - if (includeDefaults || defaultStrategy.getGrid().getMaxLevels() != Defaults.QUADTREE_LEVELS) { - builder.field(Names.TREE_LEVELS, defaultStrategy.getGrid().getMaxLevels()); + if (includeDefaults || fieldType().defaultStrategy().getGrid().getMaxLevels() != Defaults.QUADTREE_LEVELS) { + builder.field(Names.TREE_LEVELS, fieldType().defaultStrategy().getGrid().getMaxLevels()); } } - if (includeDefaults || defaultStrategy.getDistErrPct() != Defaults.DISTANCE_ERROR_PCT) { - builder.field(Names.DISTANCE_ERROR_PCT, defaultStrategy.getDistErrPct()); + if (includeDefaults || fieldType().defaultStrategy().getDistErrPct() != Defaults.DISTANCE_ERROR_PCT) { + builder.field(Names.DISTANCE_ERROR_PCT, fieldType().defaultStrategy().getDistErrPct()); } - if (includeDefaults || orientation() != Defaults.ORIENTATION) { - builder.field(Names.ORIENTATION, orientation()); + if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) { + builder.field(Names.ORIENTATION, fieldType().orientation()); } } @@ -354,34 +422,4 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { protected String contentType() { return CONTENT_TYPE; } - - @Override - public String value(Object value) { - throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values"); - } - - public PrefixTreeStrategy defaultStrategy() { - return this.defaultStrategy; - } - - public PrefixTreeStrategy recursiveStrategy() { - return this.recursiveStrategy; - } - - public PrefixTreeStrategy termStrategy() { - return this.termStrategy; - } - - public Orientation orientation() { return this.shapeOrientation; } - - public PrefixTreeStrategy resolveStrategy(String strategyName) { - if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { - return recursiveStrategy; - } - if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) { - return termStrategy; - } - throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); - } - } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index c310d96476c..364dee5852b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; @@ -34,8 +33,8 @@ import org.elasticsearch.common.lucene.all.AllField; import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -45,7 +44,6 @@ import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.similarity.SimilarityLookupService; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -80,11 +78,12 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { public static final String INDEX_NAME = AllFieldMapper.NAME; public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_ENABLED; - public static final FieldType FIELD_TYPE = new FieldType(); + public static final MappedFieldType FIELD_TYPE = new AllFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); FIELD_TYPE.setTokenized(true); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } } @@ -94,7 +93,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { private EnabledAttributeMapper enabled = Defaults.ENABLED; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); builder = this; indexName = Defaults.INDEX_NAME; } @@ -113,7 +112,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } fieldType.setTokenized(true); - return new AllFieldMapper(name, fieldType, indexAnalyzer, searchAnalyzer, enabled, similarity, normsLoading, fieldDataSettings, context.indexSettings()); + return new AllFieldMapper(fieldType, enabled, fieldDataSettings, context.indexSettings()); } } @@ -156,18 +155,49 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } } + public static class AllFieldType extends MappedFieldType { + + public AllFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected AllFieldType(AllFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new AllFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public Query queryStringTermQuery(Term term) { + return new AllTermQuery(term); + } + + @Override + public Query termQuery(Object value, QueryParseContext context) { + return queryStringTermQuery(createTerm(value)); + } + } private EnabledAttributeMapper enabledState; public AllFieldMapper(Settings indexSettings) { - this(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), null, null, Defaults.ENABLED, null, null, null, indexSettings); + this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED, null, indexSettings); } - protected AllFieldMapper(String name, FieldType fieldType, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, - EnabledAttributeMapper enabled, SimilarityProvider similarity, Loading normsLoading, + protected AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, name, name, name), 1.0f, fieldType, false, indexAnalyzer, searchAnalyzer, - similarity, normsLoading, fieldDataSettings, indexSettings); + super(fieldType, false, fieldDataSettings, indexSettings); this.enabledState = enabled; } @@ -177,7 +207,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -186,16 +216,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { return new FieldDataType("string"); } - @Override - public Query queryStringTermQuery(Term term) { - return new AllTermQuery(term); - } - - @Override - public Query termQuery(Object value, QueryParseContext context) { - return queryStringTermQuery(createTerm(value)); - } - @Override public void preParse(ParseContext context) throws IOException { } @@ -219,11 +239,11 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { // reset the entries context.allEntries().reset(); Analyzer analyzer = findAnalyzer(context); - fields.add(new AllField(names.indexName(), context.allEntries(), analyzer, fieldType)); + fields.add(new AllField(fieldType.names().indexName(), context.allEntries(), analyzer, fieldType)); } private Analyzer findAnalyzer(ParseContext context) { - Analyzer analyzer = indexAnalyzer; + Analyzer analyzer = fieldType.indexAnalyzer(); if (analyzer == null) { analyzer = context.docMapper().mappers().indexAnalyzer(); if (analyzer == null) { @@ -233,14 +253,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } return analyzer; } - - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } @Override protected String contentType() { @@ -294,8 +306,8 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { doXContentAnalyzers(builder, includeDefaults); - if (similarity() != null) { - builder.field("similarity", similarity().name()); + if (fieldType().similarity() != null) { + builder.field("similarity", fieldType().similarity().name()); } else if (includeDefaults) { builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY); } @@ -303,14 +315,14 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } } @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) { - mergeResult.addConflict("mapper [" + names.fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); } super.merge(mergeWith, mergeResult); } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index 2d4a6975d94..631fdb87771 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -38,6 +39,7 @@ import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; +import org.elasticsearch.search.highlight.HighlightBuilder; import java.io.IOException; import java.util.ArrayList; @@ -65,13 +67,16 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa public static final String NAME = FieldNamesFieldMapper.NAME; public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_ENABLED; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new FieldNamesFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } } @@ -80,7 +85,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa private EnabledAttributeMapper enabledState = Defaults.ENABLED_STATE; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @@ -98,7 +103,8 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa @Override public FieldNamesFieldMapper build(BuilderContext context) { - return new FieldNamesFieldMapper(name, indexName, boost, fieldType, enabledState, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new FieldNamesFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings()); } } @@ -127,17 +133,45 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa } } - private final FieldType defaultFieldType; + public static class FieldNamesFieldType extends MappedFieldType { + + public FieldNamesFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected FieldNamesFieldType(FieldNamesFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new FieldNamesFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + } + + private final MappedFieldType defaultFieldType; private EnabledAttributeMapper enabledState; private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled public FieldNamesFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), Defaults.ENABLED_STATE, null, indexSettings); + this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings); } - public FieldNamesFieldMapper(String name, String indexName, float boost, FieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + public FieldNamesFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, fieldDataSettings, indexSettings); this.defaultFieldType = Defaults.FIELD_TYPE; this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0); this.enabledState = enabledState; @@ -148,7 +182,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return defaultFieldType; } @@ -157,19 +191,6 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa return new FieldDataType("string"); } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - @Override public void preParse(ParseContext context) throws IOException { } @@ -230,7 +251,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa for (String path : paths) { for (String fieldName : extractFieldNames(path)) { if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - document.add(new Field(names().indexName(), fieldName, fieldType)); + document.add(new Field(fieldType().names().indexName(), fieldName, fieldType)); } } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index b7438540c40..87cd4f7cb31 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.collect.Iterables; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; @@ -43,6 +42,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -73,14 +73,16 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = IdFieldMapper.NAME; - public static final String INDEX_NAME = IdFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new IdFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } @@ -92,8 +94,8 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { private String path = Defaults.PATH; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); - indexName = Defaults.INDEX_NAME; + super(Defaults.NAME, Defaults.FIELD_TYPE); + indexName = Defaults.NAME; } public Builder path(String path) { @@ -108,7 +110,8 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { @Override public IdFieldMapper build(BuilderContext context) { - return new IdFieldMapper(name, indexName, boost, fieldType, docValues, path, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new IdFieldMapper(fieldType, docValues, path, fieldDataSettings, context.indexSettings()); } } @@ -133,21 +136,109 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { } } + public static class IdFieldType extends MappedFieldType { + + public IdFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected IdFieldType(IdFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new IdFieldType(this); + } + + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + + @Override + public Query termQuery(Object value, @Nullable QueryParseContext context) { + if (indexOptions() != IndexOptions.NONE || context == null) { + return super.termQuery(value, context); + } + final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value); + return new TermsQuery(UidFieldMapper.NAME, uids); + } + + @Override + public Query termsQuery(List values, @Nullable QueryParseContext context) { + if (indexOptions() != IndexOptions.NONE || context == null) { + return super.termsQuery(values, context); + } + return new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values)); + } + + @Override + public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + if (indexOptions() != IndexOptions.NONE || context == null) { + return super.prefixQuery(value, method, context); + } + Collection queryTypes = context.queryTypes(); + BooleanQuery query = new BooleanQuery(); + for (String queryType : queryTypes) { + PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); + if (method != null) { + prefixQuery.setRewriteMethod(method); + } + query.add(prefixQuery, BooleanClause.Occur.SHOULD); + } + return query; + } + + @Override + public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + if (indexOptions() != IndexOptions.NONE || context == null) { + return super.regexpQuery(value, flags, maxDeterminizedStates, method, context); + } + Collection queryTypes = context.queryTypes(); + if (queryTypes.size() == 1) { + RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))), + flags, maxDeterminizedStates); + if (method != null) { + regexpQuery.setRewriteMethod(method); + } + return regexpQuery; + } + BooleanQuery query = new BooleanQuery(); + for (String queryType : queryTypes) { + RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); + if (method != null) { + regexpQuery.setRewriteMethod(method); + } + query.add(regexpQuery, BooleanClause.Occur.SHOULD); + } + return query; + } + } + private final String path; public IdFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.INDEX_NAME, Defaults.BOOST, idFieldType(indexSettings), null, Defaults.PATH, null, indexSettings); + this(idFieldType(indexSettings), null, Defaults.PATH, null, indexSettings); } - protected IdFieldMapper(String name, String indexName, float boost, FieldType fieldType, Boolean docValues, String path, + protected IdFieldMapper(MappedFieldType fieldType, Boolean docValues, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), boost, fieldType, docValues, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + super(fieldType, docValues, fieldDataSettings, indexSettings); this.path = path; } - private static FieldType idFieldType(Settings indexSettings) { - FieldType fieldType = new FieldType(Defaults.FIELD_TYPE); + private static MappedFieldType idFieldType(Settings indexSettings) { + MappedFieldType fieldType = Defaults.FIELD_TYPE.clone(); boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0); if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) { fieldType.setTokenized(false); @@ -160,7 +251,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -169,78 +260,6 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { return new FieldDataType("string"); } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - - @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { - return super.termQuery(value, context); - } - final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value); - return new TermsQuery(UidFieldMapper.NAME, uids); - } - - @Override - public Query termsQuery(List values, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { - return super.termsQuery(values, context); - } - return new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values)); - } - - @Override - public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { - return super.prefixQuery(value, method, context); - } - Collection queryTypes = context.queryTypes(); - BooleanQuery query = new BooleanQuery(); - for (String queryType : queryTypes) { - PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); - if (method != null) { - prefixQuery.setRewriteMethod(method); - } - query.add(prefixQuery, BooleanClause.Occur.SHOULD); - } - return query; - } - - @Override - public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { - return super.regexpQuery(value, flags, maxDeterminizedStates, method, context); - } - Collection queryTypes = context.queryTypes(); - if (queryTypes.size() == 1) { - RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))), - flags, maxDeterminizedStates); - if (method != null) { - regexpQuery.setRewriteMethod(method); - } - return regexpQuery; - } - BooleanQuery query = new BooleanQuery(); - for (String queryType : queryTypes) { - RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); - if (method != null) { - regexpQuery.setRewriteMethod(method); - } - query.add(regexpQuery, BooleanClause.Occur.SHOULD); - } - return query; - } - @Override public void preParse(ParseContext context) throws IOException { if (context.sourceToParse().id() != null) { @@ -270,10 +289,10 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { } // else we are in the pre/post parse phase if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - fields.add(new Field(names.indexName(), context.id(), fieldType)); + fields.add(new Field(fieldType.names().indexName(), context.id(), fieldType)); } - if (hasDocValues()) { - fields.add(new BinaryDocValuesField(names.indexName(), new BytesRef(context.id()))); + if (fieldType().hasDocValues()) { + fields.add(new BinaryDocValuesField(fieldType.names().indexName(), new BytesRef(context.id()))); } } @@ -310,7 +329,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } builder.endObject(); return builder; diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 34ab0bcb4ae..00fe4013ed8 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; @@ -30,6 +29,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.MapperParsingException; @@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; +import org.elasticsearch.search.highlight.HighlightBuilder; import java.io.IOException; import java.util.Iterator; @@ -59,13 +60,16 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = IndexFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new IndexFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } @@ -77,7 +81,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @@ -88,7 +92,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper @Override public IndexFieldMapper build(BuilderContext context) { - return new IndexFieldMapper(name, indexName, boost, fieldType, enabledState, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new IndexFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings()); } } @@ -114,16 +119,39 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper } } + public static class IndexFieldType extends MappedFieldType { + + public IndexFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected IndexFieldType(IndexFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new IndexFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + } + private EnabledAttributeMapper enabledState; public IndexFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), Defaults.ENABLED_STATE, null, indexSettings); + this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings); } - public IndexFieldMapper(String name, String indexName, float boost, FieldType fieldType, EnabledAttributeMapper enabledState, + public IndexFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + super(fieldType, false, fieldDataSettings, indexSettings); this.enabledState = enabledState; } @@ -132,7 +160,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -142,16 +170,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper } public String value(Document document) { - Field field = (Field) document.getField(names.indexName()); - return field == null ? null : value(field); - } - - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); + Field field = (Field) document.getField(fieldType.names().indexName()); + return field == null ? null : (String)fieldType().value(field); } @Override @@ -174,7 +194,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper if (!enabledState.enabled) { return; } - fields.add(new Field(names.indexName(), context.index(), fieldType)); + fields.add(new Field(fieldType.names().indexName(), context.index(), fieldType)); } @Override @@ -202,7 +222,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } } builder.endObject(); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 7aca0b17ea6..ea79136beb1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.base.Objects; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; @@ -35,6 +34,7 @@ import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -70,18 +70,21 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = ParentFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new ParentFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } } - public static class Builder extends Mapper.Builder { + public static class Builder extends AbstractFieldMapper.Builder { protected String indexName; @@ -89,7 +92,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper protected Settings fieldDataSettings; public Builder() { - super(Defaults.NAME); + super(Defaults.NAME, Defaults.FIELD_TYPE); this.indexName = name; builder = this; } @@ -109,7 +112,8 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper if (type == null) { throw new MapperParsingException("Parent mapping must contain the parent type"); } - return new ParentFieldMapper(name, indexName, type, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new ParentFieldMapper(fieldType, type, fieldDataSettings, context.indexSettings()); } } @@ -130,8 +134,8 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper } else if (fieldName.equals("fielddata")) { // Only take over `loading`, since that is the only option now that is configurable: Map fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata")); - if (fieldDataSettings.containsKey(Loading.KEY)) { - Settings settings = settingsBuilder().put(Loading.KEY, fieldDataSettings.get(Loading.KEY)).build(); + if (fieldDataSettings.containsKey(MappedFieldType.Loading.KEY)) { + Settings settings = settingsBuilder().put(MappedFieldType.Loading.KEY, fieldDataSettings.get(MappedFieldType.Loading.KEY)).build(); builder.fieldDataSettings(settings); } iterator.remove(); @@ -141,19 +145,101 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper } } + public static class ParentFieldType extends MappedFieldType { + + public ParentFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected ParentFieldType(ParentFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new ParentFieldType(this); + } + + @Override + public Uid value(Object value) { + if (value == null) { + return null; + } + return Uid.createUid(value.toString()); + } + + @Override + public Object valueForSearch(Object value) { + if (value == null) { + return null; + } + String sValue = value.toString(); + if (sValue == null) { + return null; + } + int index = sValue.indexOf(Uid.DELIMITER); + if (index == -1) { + return sValue; + } + return sValue.substring(index + 1); + } + + /** + * We don't need to analyzer the text, and we need to convert it to UID... + */ + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + + @Override + public Query termQuery(Object value, @Nullable QueryParseContext context) { + return termsQuery(Collections.singletonList(value), context); + } + + @Override + public Query termsQuery(List values, @Nullable QueryParseContext context) { + if (context == null) { + return super.termsQuery(values, context); + } + + List types = new ArrayList<>(context.mapperService().types().size()); + for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) { + if (!documentMapper.parentFieldMapper().active()) { + types.add(documentMapper.type()); + } + } + + List bValues = new ArrayList<>(values.size()); + for (Object value : values) { + BytesRef bValue = BytesRefs.toBytesRef(value); + if (Uid.hasDelimiter(bValue)) { + bValues.add(bValue); + } else { + // we use all non child types, cause we don't know if its exact or not... + for (String type : types) { + bValues.add(Uid.createUidAsBytes(type, bValue)); + } + } + } + return new TermsQuery(names().indexName(), bValues); + } + } + private final String type; private final BytesRef typeAsBytes; - protected ParentFieldMapper(String name, String indexName, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), false, - Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + protected ParentFieldMapper(MappedFieldType fieldType, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, fieldDataSettings, indexSettings); this.type = type; this.typeAsBytes = type == null ? null : new BytesRef(type); } public ParentFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, null, null, indexSettings); - this.fieldDataType = new FieldDataType("_parent", settingsBuilder().put(Loading.KEY, Loading.LAZY_VALUE)); + this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings); + this.fieldType = this.fieldType.clone(); + this.fieldType.setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE))); + this.fieldType.freeze(); } public String type() { @@ -161,13 +247,13 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { - return new FieldDataType("_parent", settingsBuilder().put(Loading.KEY, Loading.EAGER_VALUE)); + return new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.EAGER_VALUE)); } @Override @@ -189,7 +275,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper // we are in the parsing of _parent phase String parentId = context.parser().text(); context.sourceToParse().parent(parentId); - fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); + fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); } else { // otherwise, we are running it post processing of the xcontent String parsedParentId = context.doc().get(Defaults.NAME); @@ -200,7 +286,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper throw new MapperParsingException("No parent id provided, not within the document, and not externally"); } // we did not add it in the parsing phase, add it now - fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); + fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); } else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), type, parentId))) { throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]"); } @@ -209,87 +295,6 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper // we have parent mapping, yet no value was set, ignore it... } - @Override - public Uid value(Object value) { - if (value == null) { - return null; - } - return Uid.createUid(value.toString()); - } - - @Override - public Object valueForSearch(Object value) { - if (value == null) { - return null; - } - String sValue = value.toString(); - if (sValue == null) { - return null; - } - int index = sValue.indexOf(Uid.DELIMITER); - if (index == -1) { - return sValue; - } - return sValue.substring(index + 1); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - if (value instanceof BytesRef) { - BytesRef bytesRef = (BytesRef) value; - if (Uid.hasDelimiter(bytesRef)) { - return bytesRef; - } - return Uid.createUidAsBytes(typeAsBytes, bytesRef); - } - String sValue = value.toString(); - if (sValue.indexOf(Uid.DELIMITER) == -1) { - return Uid.createUidAsBytes(type, sValue); - } - return super.indexedValueForSearch(value); - } - - @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { - return termsQuery(Collections.singletonList(value), context); - } - - @Override - public Query termsQuery(List values, @Nullable QueryParseContext context) { - if (context == null) { - return super.termsQuery(values, context); - } - - List types = new ArrayList<>(context.mapperService().types().size()); - for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) { - if (!documentMapper.parentFieldMapper().active()) { - types.add(documentMapper.type()); - } - } - - List bValues = new ArrayList<>(values.size()); - for (Object value : values) { - BytesRef bValue = BytesRefs.toBytesRef(value); - if (Uid.hasDelimiter(bValue)) { - bValues.add(bValue); - } else { - // we use all non child types, cause we don't know if its exact or not... - for (String type : types) { - bValues.add(Uid.createUidAsBytes(type, bValue)); - } - } - } - return new TermsQuery(names.indexName(), bValues); - } - - /** - * We don't need to analyzer the text, and we need to convert it to UID... - */ - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - @Override protected String contentType() { return CONTENT_TYPE; @@ -307,12 +312,28 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } builder.endObject(); return builder; } + @Override + public BytesRef indexedValueForSearch(Object value) { + if (value instanceof BytesRef) { + BytesRef bytesRef = (BytesRef) value; + if (Uid.hasDelimiter(bytesRef)) { + return bytesRef; + } + return Uid.createUidAsBytes(typeAsBytes, bytesRef); + } + String sValue = value.toString(); + if (sValue.indexOf(Uid.DELIMITER) == -1) { + return Uid.createUidAsBytes(type, sValue); + } + return super.indexedValueForSearch(value); + } + @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { ParentFieldMapper other = (ParentFieldMapper) mergeWith; @@ -322,14 +343,16 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper if (!mergeResult.simulate()) { ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith; + this.fieldType = this.fieldType.clone(); if (fieldMergeWith.customFieldDataSettings != null) { if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) { this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings; - this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(), + this.fieldType.setFieldDataType(new FieldDataType(defaultFieldDataType().getType(), builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings) - ); + )); } } + this.fieldType.freeze(); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 96b9375d9ad..d03238caf6e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; @@ -30,6 +29,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -58,13 +58,16 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = "_routing"; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new RoutingFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } @@ -79,7 +82,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe private String path = Defaults.PATH; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); } public Builder required(boolean required) { @@ -121,6 +124,29 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe } } + public static class RoutingFieldType extends MappedFieldType { + + public RoutingFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected RoutingFieldType(RoutingFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new RoutingFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + } private boolean required; private final String path; @@ -129,15 +155,14 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe this(Defaults.FIELD_TYPE, Defaults.REQUIRED, Defaults.PATH, null, indexSettings); } - protected RoutingFieldMapper(FieldType fieldType, boolean required, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), 1.0f, fieldType, false, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, fieldDataSettings, indexSettings); this.required = required; this.path = path; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -159,16 +184,8 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe } public String value(Document document) { - Field field = (Field) document.getField(names.indexName()); - return field == null ? null : value(field); - } - - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); + Field field = (Field) document.getField(fieldType.names().indexName()); + return field == null ? null : (String)value(field); } @Override @@ -194,10 +211,10 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe String routing = context.sourceToParse().routing(); if (routing != null) { if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) { - context.ignoredValue(names.indexName(), routing); + context.ignoredValue(fieldType.names().indexName(), routing); return; } - fields.add(new Field(names.indexName(), routing, fieldType)); + fields.add(new Field(fieldType.names().indexName(), routing, fieldType)); } } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java index 1d8ad2a422c..a9435c5a1da 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java @@ -20,16 +20,18 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; @@ -53,10 +55,12 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { public static final String NAME = CONTENT_TYPE; public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; - public static final FieldType SIZE_FIELD_TYPE = new FieldType(IntegerFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType SIZE_FIELD_TYPE = IntegerFieldMapper.Defaults.FIELD_TYPE.clone(); static { SIZE_FIELD_TYPE.setStored(true); + SIZE_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_32_BIT); + SIZE_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); SIZE_FIELD_TYPE.freeze(); } } @@ -66,7 +70,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.SIZE_FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT); + super(Defaults.NAME, Defaults.SIZE_FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); builder = this; } @@ -77,8 +81,19 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { @Override public SizeFieldMapper build(BuilderContext context) { + setupFieldType(context); return new SizeFieldMapper(enabledState, fieldType, fieldDataSettings, context.indexSettings()); } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -104,12 +119,12 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { private EnabledAttributeMapper enabledState; public SizeFieldMapper(Settings indexSettings) { - this(Defaults.ENABLED_STATE, new FieldType(Defaults.SIZE_FIELD_TYPE), null, indexSettings); + this(Defaults.ENABLED_STATE, Defaults.SIZE_FIELD_TYPE.clone(), null, indexSettings); } - public SizeFieldMapper(EnabledAttributeMapper enabled, FieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(Defaults.NAME), Defaults.PRECISION_STEP_32_BIT, Defaults.BOOST, fieldType, false, Defaults.NULL_VALUE, - Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, null, fieldDataSettings, + public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, Defaults.NULL_VALUE, + Defaults.IGNORE_MALFORMED, Defaults.COERCE, fieldDataSettings, indexSettings, MultiFields.empty(), null); this.enabledState = enabled; } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 7c8ae58d5fd..0324ef0d0d3 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.base.Objects; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; @@ -45,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -78,12 +78,15 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper public static final long COMPRESS_THRESHOLD = -1; public static final String FORMAT = null; // default format is to use the one provided - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new SourceFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); // not indexed FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } @@ -138,7 +141,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper @Override public SourceFieldMapper build(BuilderContext context) { - return new SourceFieldMapper(name, enabled, format, compress, compressThreshold, includes, excludes, context.indexSettings()); + return new SourceFieldMapper(enabled, format, compress, compressThreshold, includes, excludes, context.indexSettings()); } } @@ -195,6 +198,39 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper } } + public static class SourceFieldType extends MappedFieldType { + + public SourceFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected SourceFieldType(SourceFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new SourceFieldType(this); + } + + @Override + public byte[] value(Object value) { + if (value == null) { + return null; + } + BytesReference bValue; + if (value instanceof BytesRef) { + bValue = new BytesArray((BytesRef) value); + } else { + bValue = (BytesReference) value; + } + try { + return CompressorFactory.uncompressIfNeeded(bValue).toBytes(); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to decompress source", e); + } + } + } private final boolean enabled; @@ -212,13 +248,12 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper private XContentType formatContentType; public SourceFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null, indexSettings); + this(Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null, indexSettings); } - protected SourceFieldMapper(String name, boolean enabled, String format, Boolean compress, long compressThreshold, + protected SourceFieldMapper(boolean enabled, String format, Boolean compress, long compressThreshold, String[] includes, String[] excludes, Settings indexSettings) { - super(new Names(name, name, name, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), false, - Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, null, indexSettings); // Only stored. + super(Defaults.FIELD_TYPE.clone(), false, null, indexSettings); // Only stored. this.enabled = enabled; this.compress = compress; this.compressThreshold = compressThreshold; @@ -247,7 +282,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -358,25 +393,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper if (!source.hasArray()) { source = source.toBytesArray(); } - fields.add(new StoredField(names().indexName(), source.array(), source.arrayOffset(), source.length())); - } - - @Override - public byte[] value(Object value) { - if (value == null) { - return null; - } - BytesReference bValue; - if (value instanceof BytesRef) { - bValue = new BytesArray((BytesRef) value); - } else { - bValue = (BytesReference) value; - } - try { - return CompressorFactory.uncompressIfNeeded(bValue).toBytes(); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to decompress source", e); - } + fields.add(new StoredField(fieldType().names().indexName(), source.array(), source.arrayOffset(), source.length())); } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 8e817ddd903..0040b4a5140 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; @@ -30,6 +29,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.AlreadyExpiredException; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericLongAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -59,12 +61,14 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { public static class Defaults extends LongFieldMapper.Defaults { public static final String NAME = TTLFieldMapper.CONTENT_TYPE; - public static final FieldType TTL_FIELD_TYPE = new FieldType(LongFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType TTL_FIELD_TYPE = new TTLFieldType(); static { TTL_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); TTL_FIELD_TYPE.setStored(true); TTL_FIELD_TYPE.setTokenized(false); + TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); + TTL_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); TTL_FIELD_TYPE.freeze(); } @@ -78,7 +82,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { private long defaultTTL = Defaults.DEFAULT; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.TTL_FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); } public Builder enabled(EnabledAttributeMapper enabled) { @@ -93,8 +97,19 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { @Override public TTLFieldMapper build(BuilderContext context) { + setupFieldType(context); return new TTLFieldMapper(fieldType, enabledState, defaultTTL, ignoreMalformed(context),coerce(context), fieldDataSettings, context.indexSettings()); } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -121,18 +136,46 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { } } + public static class TTLFieldType extends LongFieldType { + + public TTLFieldType() { + } + + protected TTLFieldType(TTLFieldType ref) { + super(ref); + } + + @Override + public LongFieldType clone() { + return new TTLFieldType(this); + } + + // Overrides valueForSearch to display live value of remaining ttl + @Override + public Object valueForSearch(Object value) { + long now; + SearchContext searchContext = SearchContext.current(); + if (searchContext != null) { + now = searchContext.nowInMillis(); + } else { + now = System.currentTimeMillis(); + } + long val = value(value); + return val - now; + } + } + private EnabledAttributeMapper enabledState; private long defaultTTL; public TTLFieldMapper(Settings indexSettings) { - this(new FieldType(Defaults.TTL_FIELD_TYPE), Defaults.ENABLED_STATE, Defaults.DEFAULT, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings); + this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings); } - protected TTLFieldMapper(FieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit ignoreMalformed, + protected TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit ignoreMalformed, Explicit coerce, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), Defaults.PRECISION_STEP_64_BIT, - Defaults.BOOST, fieldType, false, Defaults.NULL_VALUE, ignoreMalformed, coerce, - null, null, fieldDataSettings, indexSettings, MultiFields.empty(), null); + super(fieldType, false, Defaults.NULL_VALUE, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, MultiFields.empty(), null); this.enabledState = enabled; this.defaultTTL = defaultTTL; } @@ -145,20 +188,6 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { return this.defaultTTL; } - // Overrides valueForSearch to display live value of remaining ttl - @Override - public Object valueForSearch(Object value) { - long now; - SearchContext searchContext = SearchContext.current(); - if (searchContext != null) { - now = searchContext.nowInMillis(); - } else { - now = System.currentTimeMillis(); - } - long val = value(value); - return val - now; - } - // Other implementation for realtime get display public Object valueForSearch(long expirationTime) { return expirationTime - System.currentTimeMillis(); @@ -207,7 +236,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now); } // the expiration timestamp (timestamp + ttl) is set as field - fields.add(new CustomLongNumericField(this, expire, fieldType)); + fields.add(new CustomLongNumericField(this, expire, (NumberFieldType)fieldType)); } } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 9437ee3d056..5f068395a03 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; @@ -32,6 +31,9 @@ import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericDateAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -46,7 +48,6 @@ import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.timestamp; @@ -63,21 +64,26 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper public static final String NAME = "_timestamp"; // TODO: this should be removed - public static final FieldType PRE_20_FIELD_TYPE; - public static final FieldType FIELD_TYPE = new FieldType(DateFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType PRE_20_FIELD_TYPE; + public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT); + public static final DateFieldType FIELD_TYPE = new TimestampFieldType(); static { FIELD_TYPE.setStored(true); FIELD_TYPE.setTokenized(false); + FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER); + FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Defaults.PRECISION_STEP_64_BIT)); + FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE)); FIELD_TYPE.freeze(); - PRE_20_FIELD_TYPE = new FieldType(FIELD_TYPE); + PRE_20_FIELD_TYPE = FIELD_TYPE.clone(); PRE_20_FIELD_TYPE.setStored(false); PRE_20_FIELD_TYPE.freeze(); } public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED; public static final String PATH = null; - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT); public static final String DEFAULT_TIMESTAMP = "now"; } @@ -85,13 +91,16 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; private String path = Defaults.PATH; - private FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; private String defaultTimestamp = Defaults.DEFAULT_TIMESTAMP; private boolean explicitStore = false; private Boolean ignoreMissing = null; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); + } + + DateFieldType fieldType() { + return (DateFieldType)fieldType; } public Builder enabled(EnabledAttributeMapper enabledState) { @@ -105,8 +114,8 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper } public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - this.dateTimeFormatter = dateTimeFormatter; - return builder; + fieldType().setDateTimeFormatter(dateTimeFormatter); + return this; } public Builder defaultTimestamp(String defaultTimestamp) { @@ -131,9 +140,20 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper assert fieldType.stored(); fieldType.setStored(false); } - return new TimestampFieldMapper(fieldType, docValues, enabledState, path, dateTimeFormatter, defaultTimestamp, + setupFieldType(context); + return new TimestampFieldMapper(fieldType, docValues, enabledState, path, defaultTimestamp, ignoreMissing, - ignoreMalformed(context), coerce(context), normsLoading, fieldDataSettings, context.indexSettings()); + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings()); + } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericDateAnalyzer.buildNamedAnalyzer(fieldType().dateTimeFormatter(), precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; } } @@ -190,7 +210,29 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper } } - private static FieldType defaultFieldType(Settings settings) { + public static class TimestampFieldType extends DateFieldType { + + public TimestampFieldType() {} + + protected TimestampFieldType(TimestampFieldType ref) { + super(ref); + } + + @Override + public DateFieldType clone() { + return new TimestampFieldType(this); + } + + /** + * Override the default behavior to return a timestamp + */ + @Override + public Object valueForSearch(Object value) { + return value(value); + } + } + + private static MappedFieldType defaultFieldType(Settings settings) { return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE; } @@ -198,23 +240,18 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper private final String path; private final String defaultTimestamp; - private final FieldType defaultFieldType; + private final MappedFieldType defaultFieldType; private final Boolean ignoreMissing; public TimestampFieldMapper(Settings indexSettings) { - this(new FieldType(defaultFieldType(indexSettings)), null, Defaults.ENABLED, Defaults.PATH, Defaults.DATE_TIME_FORMATTER, Defaults.DEFAULT_TIMESTAMP, - null, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, null, indexSettings); + this(defaultFieldType(indexSettings).clone(), null, Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, + null, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings); } - protected TimestampFieldMapper(FieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path, - FormatDateTimeFormatter dateTimeFormatter, String defaultTimestamp, - Boolean ignoreMissing, - Explicit ignoreMalformed, Explicit coerce, Loading normsLoading, + protected TimestampFieldMapper(MappedFieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path, + String defaultTimestamp, Boolean ignoreMissing, Explicit ignoreMalformed, Explicit coerce, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), dateTimeFormatter, - Defaults.PRECISION_STEP_64_BIT, Defaults.BOOST, fieldType, docValues, - Defaults.NULL_VALUE, TimeUnit.MILLISECONDS /*always milliseconds*/, - ignoreMalformed, coerce, null, normsLoading, fieldDataSettings, + super(fieldType, docValues, Defaults.NULL_VALUE, ignoreMalformed, coerce, fieldDataSettings, indexSettings, MultiFields.empty(), null); this.enabledState = enabledState; this.path = path; @@ -224,7 +261,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return defaultFieldType; } @@ -249,19 +286,6 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper return this.ignoreMissing; } - @Override - public FormatDateTimeFormatter dateTimeFormatter() { - return this.dateTimeFormatter; - } - - /** - * Override the default behavior to return a timestamp - */ - @Override - public Object valueForSearch(Object value) { - return value(value); - } - @Override public void preParse(ParseContext context) throws IOException { super.parse(context); @@ -281,14 +305,14 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper protected void innerParseCreateField(ParseContext context, List fields) throws IOException { if (enabledState.enabled) { long timestamp = context.sourceToParse().timestamp(); - if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored() && !hasDocValues()) { - context.ignoredValue(names.indexName(), String.valueOf(timestamp)); + if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored() && !fieldType().hasDocValues()) { + context.ignoredValue(fieldType.names().indexName(), String.valueOf(timestamp)); } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - fields.add(new LongFieldMapper.CustomLongNumericField(this, timestamp, fieldType)); + fields.add(new LongFieldMapper.CustomLongNumericField(this, timestamp, (NumberFieldType)fieldType)); } - if (hasDocValues()) { - fields.add(new NumericDocValuesField(names.indexName(), timestamp)); + if (fieldType().hasDocValues()) { + fields.add(new NumericDocValuesField(fieldType.names().indexName(), timestamp)); } } } @@ -306,10 +330,10 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper // if all are defaults, no sense to write it at all if (!includeDefaults && indexed == indexedDefault && customFieldDataSettings == null && - fieldType.stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH - && dateTimeFormatter.format().equals(Defaults.DATE_TIME_FORMATTER.format()) + fieldType.stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH + && fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format()) && Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp) - && defaultDocValues() == hasDocValues()) { + && defaultDocValues() == fieldType().hasDocValues()) { return builder; } builder.startObject(CONTENT_TYPE); @@ -326,8 +350,8 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper if (includeDefaults || path != Defaults.PATH) { builder.field("path", path); } - if (includeDefaults || !dateTimeFormatter.format().equals(Defaults.DATE_TIME_FORMATTER.format())) { - builder.field("format", dateTimeFormatter.format()); + if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())) { + builder.field("format", fieldType().dateTimeFormatter().format()); } if (includeDefaults || !Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) { builder.field("default", defaultTimestamp); @@ -338,7 +362,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } builder.endObject(); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index a6b214f97f9..9128534c468 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; @@ -36,6 +35,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -65,13 +65,16 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = TypeFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new TypeFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } } @@ -79,13 +82,14 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { public static class Builder extends AbstractFieldMapper.Builder { public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @Override public TypeFieldMapper build(BuilderContext context) { - return new TypeFieldMapper(name, indexName, boost, fieldType, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new TypeFieldMapper(fieldType, fieldDataSettings, context.indexSettings()); } } @@ -101,17 +105,53 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { } } - public TypeFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null, indexSettings); + public static class TypeFieldType extends MappedFieldType { + + public TypeFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected TypeFieldType(TypeFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new TypeFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + + @Override + public Query termQuery(Object value, @Nullable QueryParseContext context) { + if (indexOptions() == IndexOptions.NONE) { + return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value))))); + } + return new ConstantScoreQuery(new TermQuery(createTerm(value))); + } } - public TypeFieldMapper(String name, String indexName, float boost, FieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + public TypeFieldMapper(Settings indexSettings) { + this(Defaults.FIELD_TYPE.clone(), null, indexSettings); + } + + public TypeFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, fieldDataSettings, indexSettings); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -120,26 +160,6 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { return new FieldDataType("string"); } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - - @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() == IndexOptions.NONE) { - return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value))))); - } - return new ConstantScoreQuery(new TermQuery(createTerm(value))); - } - - @Override - public boolean useTermQueryWithQueryString() { - return true; - } @Override public void preParse(ParseContext context) throws IOException { @@ -161,9 +181,9 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) { return; } - fields.add(new Field(names.indexName(), context.type(), fieldType)); - if (hasDocValues()) { - fields.add(new SortedSetDocValuesField(names.indexName(), new BytesRef(context.type()))); + fields.add(new Field(fieldType.names().indexName(), context.type(), fieldType)); + if (fieldType().hasDocValues()) { + fields.add(new SortedSetDocValuesField(fieldType.names().indexName(), new BytesRef(context.type()))); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index da9f424d9b2..3d49df6089c 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; @@ -32,6 +31,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -61,17 +61,20 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = UidFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); - public static final FieldType NESTED_FIELD_TYPE; + public static final MappedFieldType FIELD_TYPE = new UidFieldType(); + public static final MappedFieldType NESTED_FIELD_TYPE; static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); - NESTED_FIELD_TYPE = new FieldType(FIELD_TYPE); + NESTED_FIELD_TYPE = FIELD_TYPE.clone(); NESTED_FIELD_TYPE.setStored(false); NESTED_FIELD_TYPE.freeze(); } @@ -86,7 +89,8 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { @Override public UidFieldMapper build(BuilderContext context) { - return new UidFieldMapper(name, indexName, docValues, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new UidFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings()); } } @@ -102,13 +106,36 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { } } - public UidFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, null, null, indexSettings); + public static class UidFieldType extends MappedFieldType { + + public UidFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected UidFieldType(UidFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new UidFieldType(this); + } + + @Override + public Uid value(Object value) { + if (value == null) { + return null; + } + return Uid.createUid(value.toString()); + } } - protected UidFieldMapper(String name, String indexName, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), docValuesEnabled(docValues, indexSettings), - Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + public UidFieldMapper(Settings indexSettings) { + this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings); + } + + protected UidFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, docValuesEnabled(docValues, indexSettings), fieldDataSettings, indexSettings); } static Boolean docValuesEnabled(Boolean docValues, Settings indexSettings) { @@ -119,7 +146,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -171,21 +198,13 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { Field uid = new Field(NAME, Uid.createUid(context.stringBuilder(), context.type(), context.id()), Defaults.FIELD_TYPE); context.uid(uid); fields.add(uid); - if (hasDocValues()) { + if (fieldType().hasDocValues()) { fields.add(new BinaryDocValuesField(NAME, new BytesRef(uid.stringValue()))); } } - @Override - public Uid value(Object value) { - if (value == null) { - return null; - } - return Uid.createUid(value.toString()); - } - public Term term(String uid) { - return createTerm(uid); + return new Term(fieldType().names().indexName(), fieldType().indexedValueForSearch(uid)); } @Override @@ -210,7 +229,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } builder.endObject(); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index 88b0074b634..f4a33b80b5c 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -20,13 +20,14 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.index.DocValuesType; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -52,9 +53,13 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe public static class Defaults { public static final String NAME = VersionFieldMapper.NAME; - public static final float BOOST = 1.0f; - public static final FieldType FIELD_TYPE = NumericDocValuesField.TYPE; + public static final MappedFieldType FIELD_TYPE = new VersionFieldType(); + static { + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setDocValuesType(DocValuesType.NUMERIC); + FIELD_TYPE.freeze(); + } } public static class Builder extends Mapper.Builder { @@ -86,6 +91,31 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe } } + public static class VersionFieldType extends MappedFieldType { + + public VersionFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected VersionFieldType(VersionFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new VersionFieldType(this); + } + + @Override + public Long value(Object value) { + if (value == null || (value instanceof Long)) { + return (Long) value; + } else { + return Long.parseLong(value.toString()); + } + } + } + private final ThreadLocal fieldCache = new ThreadLocal() { @Override protected Field initialValue() { @@ -94,7 +124,7 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe }; public VersionFieldMapper(Settings indexSettings) { - super(new Names(NAME, NAME, NAME, NAME), Defaults.BOOST, Defaults.FIELD_TYPE, true, null, null, null, null, null, indexSettings); + super(Defaults.FIELD_TYPE, true, null, indexSettings); } @Override @@ -116,15 +146,6 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe return null; } - @Override - public Long value(Object value) { - if (value == null || (value instanceof Long)) { - return (Long) value; - } else { - return Long.parseLong(value.toString()); - } - } - @Override public void postParse(ParseContext context) throws IOException { // In the case of nested docs, let's fill nested docs with version=1 so that Lucene doesn't write a Bitset for documents @@ -136,7 +157,7 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } diff --git a/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 0fa74b0ab8a..8cf6b93f5ca 100644 --- a/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -20,10 +20,8 @@ package org.elasticsearch.index.mapper.ip; import com.google.common.net.InetAddresses; - import org.apache.lucene.analysis.NumericTokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.NumericRangeQuery; @@ -43,6 +41,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericAnalyzer; import org.elasticsearch.index.analysis.NumericTokenizer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -51,7 +50,6 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -101,7 +99,7 @@ public class IpFieldMapper extends NumberFieldMapper { public static class Defaults extends NumberFieldMapper.Defaults { public static final String NULL_VALUE = null; - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new IpFieldType(); static { FIELD_TYPE.freeze(); @@ -113,7 +111,7 @@ public class IpFieldMapper extends NumberFieldMapper { protected String nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); builder = this; } @@ -124,13 +122,23 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public IpFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - IpFieldMapper fieldMapper = new IpFieldMapper(buildNames(context), - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), - similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + IpFieldMapper fieldMapper = new IpFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + String name = precisionStep == Integer.MAX_VALUE ? "_ip/max" : ("_ip/" + precisionStep); + return new NamedAnalyzer(name, new NumericIpAnalyzer(precisionStep)); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -154,21 +162,90 @@ public class IpFieldMapper extends NumberFieldMapper { } } + public static class IpFieldType extends NumberFieldType { + + public IpFieldType() {} + + protected IpFieldType(IpFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new IpFieldType(this); + } + + + @Override + public Long value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToLong((BytesRef) value); + } + return ipToLong(value.toString()); + } + + /** + * IPs should return as a string. + */ + @Override + public Object valueForSearch(Object value) { + Long val = value(value); + if (val == null) { + return null; + } + return longToIp(val); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseValue(lowerTerm), + upperTerm == null ? null : parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long iValue = ipToLong(value); + long iSim; + try { + iSim = ipToLong(fuzziness.asString()); + } catch (IllegalArgumentException e) { + iSim = fuzziness.asLong(); + } + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + } + private String nullValue; - protected IpFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected IpFieldMapper(MappedFieldType fieldType, Boolean docValues, String nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, - ignoreMalformed, coerce, new NamedAnalyzer("_ip/" + precisionStep, new NumericIpAnalyzer(precisionStep)), - new NamedAnalyzer("_ip/max", new NumericIpAnalyzer(Integer.MAX_VALUE)), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -177,45 +254,7 @@ public class IpFieldMapper extends NumberFieldMapper { return new FieldDataType("long"); } - @Override - protected int maxPrecisionStep() { - return 64; - } - - @Override - public Long value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToLong((BytesRef) value); - } - return ipToLong(value.toString()); - } - - /** - * IPs should return as a string. - */ - @Override - public Object valueForSearch(Object value) { - Long val = value(value); - if (val == null) { - return null; - } - return longToIp(val); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private long parseValue(Object value) { + private static long parseValue(Object value) { if (value instanceof Number) { return ((Number) value).longValue(); } @@ -225,29 +264,6 @@ public class IpFieldMapper extends NumberFieldMapper { return ipToLong(value.toString()); } - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - long iValue = ipToLong(value); - long iSim; - try { - iSim = ipToLong(fuzziness.asString()); - } catch (IllegalArgumentException e) { - iSim = fuzziness.asLong(); - } - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -276,16 +292,16 @@ public class IpFieldMapper extends NumberFieldMapper { return; } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), ipAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), ipAsString, fieldType.boost()); } final long value = ipToLong(ipAsString); if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType); - field.setBoost(boost); + field.setBoost(fieldType.boost()); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -310,8 +326,8 @@ public class IpFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); diff --git a/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java b/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java index ca0aed86f8e..d5e4c95b54c 100644 --- a/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java @@ -165,7 +165,7 @@ public class CommonTermsQueryParser implements QueryParser { String field; FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { - field = mapper.names().indexName(); + field = mapper.fieldType().names().indexName(); } else { field = fieldName; } @@ -173,7 +173,7 @@ public class CommonTermsQueryParser implements QueryParser { Analyzer analyzer = null; if (queryAnalyzer == null) { if (mapper != null) { - analyzer = mapper.searchAnalyzer(); + analyzer = mapper.fieldType().searchAnalyzer(); } if (analyzer == null && mapper != null) { analyzer = parseContext.getSearchAnalyzer(mapper); diff --git a/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java b/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java index fe981193805..0cf97b4cbe3 100644 --- a/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java @@ -98,7 +98,7 @@ public class ExistsQueryParser implements QueryParser { if (fieldNamesMapper!= null && fieldNamesMapper.enabled()) { final String f; if (mapper != null) { - f = mapper.names().indexName(); + f = mapper.fieldType().names().indexName(); } else { f = field; } diff --git a/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java b/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java index 1e8fd7cfa03..1a2e6608dec 100644 --- a/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java @@ -92,7 +92,7 @@ public class FieldMaskingSpanQueryParser implements QueryParser { FieldMapper mapper = parseContext.fieldMapper(field); if (mapper != null) { - field = mapper.names().indexName(); + field = mapper.fieldType().names().indexName(); } FieldMaskingSpanQuery query = new FieldMaskingSpanQuery(inner, field); diff --git a/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java b/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java index 3a63ae68295..10a766ac068 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java @@ -150,9 +150,9 @@ public class GeoShapeQueryParser implements QueryParser { GeoShapeFieldMapper shapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = shapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = shapeFieldMapper.fieldType().defaultStrategy(); if (strategyName != null) { - strategy = shapeFieldMapper.resolveStrategy(strategyName); + strategy = shapeFieldMapper.fieldType().resolveStrategy(strategyName); } Query query; if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) { diff --git a/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java index 08b38453dc6..363303181ac 100644 --- a/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java +++ b/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; @@ -71,11 +72,11 @@ public class GeohashCellQuery { * @return a new GeoBoundinboxfilter */ public static Query create(QueryParseContext context, GeoPointFieldMapper fieldMapper, String geohash, @Nullable List geohashes) { - if (fieldMapper.geoHashStringMapper() == null) { + MappedFieldType geoHashMapper = fieldMapper.fieldType().geohashFieldType(); + if (geoHashMapper == null) { throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled"); } - StringFieldMapper geoHashMapper = fieldMapper.geoHashStringMapper(); if (geohashes == null || geohashes.size() == 0) { return geoHashMapper.termQuery(geohash, context); } else { @@ -246,7 +247,7 @@ public class GeohashCellQuery { } GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper); - if (!geoMapper.isEnableGeohashPrefix()) { + if (!geoMapper.fieldType().isGeohashPrefixEnabled()) { throw new QueryParsingException(parseContext, "can't execute geohash_cell on field [" + fieldName + "], geohash_prefix is not enabled"); } diff --git a/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java b/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java index 16596b6bdfb..7c7ae54aff5 100644 --- a/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java @@ -116,7 +116,7 @@ public class MissingQueryParser implements QueryParser { if (fieldNamesMapper != null && fieldNamesMapper.enabled()) { final String f; if (mapper != null) { - f = mapper.names().indexName(); + f = mapper.fieldType().names().indexName(); } else { f = field; } diff --git a/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java b/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java index fe1c99e421a..b33da0994c5 100644 --- a/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java @@ -167,7 +167,7 @@ public class MoreLikeThisQueryParser implements QueryParser { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { String field = parser.text(); FieldMapper mapper = parseContext.fieldMapper(field); - moreLikeFields.add(mapper == null ? field : mapper.names().indexName()); + moreLikeFields.add(mapper == null ? field : mapper.fieldType().names().indexName()); } } else if (Fields.DOCUMENT_IDS.match(currentFieldName, parseContext.parseFlags())) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { diff --git a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index e2be1229fa6..f279efa5052 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -287,8 +287,8 @@ public class QueryParseContext { * TODO: remove this by moving defaults into mappers themselves */ public Analyzer getSearchAnalyzer(FieldMapper mapper) { - if (mapper.searchAnalyzer() != null) { - return mapper.searchAnalyzer(); + if (mapper.fieldType().searchAnalyzer() != null) { + return mapper.fieldType().searchAnalyzer(); } return mapperService().searchAnalyzer(); } @@ -297,8 +297,8 @@ public class QueryParseContext { * TODO: remove this by moving defaults into mappers themselves */ public Analyzer getSearchQuoteAnalyzer(FieldMapper mapper) { - if (mapper.searchQuoteAnalyzer() != null) { - return mapper.searchQuoteAnalyzer(); + if (mapper.fieldType().searchQuoteAnalyzer() != null) { + return mapper.fieldType().searchQuoteAnalyzer(); } return mapperService().searchQuoteAnalyzer(); } diff --git a/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java b/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java index e86ffebde31..fed95b9eb02 100644 --- a/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java @@ -128,7 +128,7 @@ public class RangeQueryParser implements QueryParser { "[range] time_zone when using ms since epoch format as it's UTC based can not be applied to [" + fieldName + "]"); } - query = ((DateFieldMapper) mapper).rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext); + query = ((DateFieldMapper) mapper).fieldType().rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext); } else { if (timeZone != null) { throw new QueryParsingException(parseContext, "[range] time_zone can not be applied to non date field [" diff --git a/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java b/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java index d48c5b7163f..ba70aa89bde 100644 --- a/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java +++ b/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java @@ -132,7 +132,7 @@ public class SimpleQueryStringParser implements QueryParser { } else { FieldMapper mapper = parseContext.fieldMapper(fField); if (mapper != null) { - fieldsAndWeights.put(mapper.names().indexName(), fBoost); + fieldsAndWeights.put(mapper.fieldType().names().indexName(), fBoost); } else { fieldsAndWeights.put(fField, fBoost); } diff --git a/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java b/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java index b1b3cbf5fbf..1b4e8c3af0e 100644 --- a/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java @@ -95,7 +95,7 @@ public class SpanTermQueryParser implements QueryParser { BytesRef valueBytes = null; FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { - fieldName = mapper.names().indexName(); + fieldName = mapper.fieldType().names().indexName(); valueBytes = mapper.indexedValueForSearch(value); } if (valueBytes == null) { diff --git a/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java b/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java index f4170292251..1da9b5f748e 100644 --- a/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java @@ -160,7 +160,7 @@ public class TermsQueryParser implements QueryParser { FieldMapper fieldMapper = parseContext.fieldMapper(fieldName); if (fieldMapper != null) { - fieldName = fieldMapper.names().indexName(); + fieldName = fieldMapper.fieldType().names().indexName(); } if (lookupId != null) { diff --git a/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java b/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java index 36ca202173e..be664f04f6b 100644 --- a/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java @@ -95,7 +95,7 @@ public class WildcardQueryParser implements QueryParser { BytesRef valueBytes; FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { - fieldName = mapper.names().indexName(); + fieldName = mapper.fieldType().names().indexName(); valueBytes = mapper.indexedValueForSearch(value); } else { valueBytes = new BytesRef(value); diff --git a/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java b/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java index 375920a7fb0..b5a8363740d 100644 --- a/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java +++ b/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java @@ -263,7 +263,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser { } long origin = SearchContext.current().nowInMillis(); if (originString != null) { - origin = dateFieldMapper.parseToMilliseconds(originString); + origin = dateFieldMapper.fieldType().parseToMilliseconds(originString, false, null, null); } if (scaleString == null) { diff --git a/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/src/main/java/org/elasticsearch/index/search/MatchQuery.java index b2b747a9c92..344c9d3b29d 100644 --- a/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -157,7 +157,7 @@ public class MatchQuery { final String field; FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { - field = mapper.names().indexName(); + field = mapper.fieldType().names().indexName(); } else { field = fieldName; } diff --git a/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index ea9666f0746..a31466a7dc6 100644 --- a/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -165,7 +165,7 @@ public class MultiMatchQuery extends MatchQuery { FieldMapper mapper = parseContext.fieldMapper(name); if (mapper != null) { Analyzer actualAnalyzer = getAnalyzer(mapper); - name = mapper.names().indexName(); + name = mapper.fieldType().names().indexName(); if (!groups.containsKey(actualAnalyzer)) { groups.put(actualAnalyzer, new ArrayList()); } diff --git a/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java b/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java index cb1befd76c7..bf1ba4578b2 100644 --- a/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java +++ b/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java @@ -31,7 +31,7 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; public class IndexedGeoBoundingBoxQuery { public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) { - if (!fieldMapper.isEnableLatLon()) { + if (!fieldMapper.fieldType().isLatLonEnabled()) { throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldMapper.name() + "], can't use indexed filter on it"); } //checks to see if bounding box crosses 180 degrees @@ -45,16 +45,16 @@ public class IndexedGeoBoundingBoxQuery { private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) { BooleanQuery filter = new BooleanQuery(); filter.setMinimumNumberShouldMatch(1); - filter.add(fieldMapper.lonMapper().rangeFilter(null, bottomRight.lon(), true, true), Occur.SHOULD); - filter.add(fieldMapper.lonMapper().rangeFilter(topLeft.lon(), null, true, true), Occur.SHOULD); - filter.add(fieldMapper.latMapper().rangeFilter(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); + filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, null), Occur.SHOULD); + filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), null, true, true, null), Occur.SHOULD); + filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST); return new ConstantScoreQuery(filter); } private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) { BooleanQuery filter = new BooleanQuery(); - filter.add(fieldMapper.lonMapper().rangeFilter(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST); - filter.add(fieldMapper.latMapper().rangeFilter(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); + filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, null), Occur.MUST); + filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST); return new ConstantScoreQuery(filter); } } diff --git a/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 7c31ca2c8fd..c3232085130 100644 --- a/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -100,7 +100,7 @@ public class SimilarityService extends AbstractIndexComponent { @Override public Similarity get(String name) { FieldMapper mapper = mapperService.smartNameFieldMapper(name); - return (mapper != null && mapper.similarity() != null) ? mapper.similarity().get() : defaultSimilarity; + return (mapper != null && mapper.fieldType().similarity() != null) ? mapper.fieldType().similarity().get() : defaultSimilarity; } } } diff --git a/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java b/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java index 9ca66a65ec7..ec160dba1ec 100644 --- a/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java +++ b/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java @@ -236,7 +236,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) { analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString()); } else { - analyzer = mapperService.smartNameFieldMapper(field).indexAnalyzer(); + analyzer = mapperService.smartNameFieldMapper(field).fieldType().indexAnalyzer(); } if (analyzer == null) { analyzer = mapperService.analysisService().defaultIndexAnalyzer(); diff --git a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 7547814d791..4c4233ff2c0 100644 --- a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; import org.elasticsearch.index.shard.IndexShard; @@ -99,7 +100,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL this.closed = true; } - public IndexFieldDataCache buildIndexFieldDataCache(IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) { + public IndexFieldDataCache buildIndexFieldDataCache(IndexService indexService, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType) { return new IndexFieldCache(logger, cache, indicesFieldDataCacheListener, indexService, index, fieldNames, fieldDataType); } @@ -139,12 +140,12 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL private final ESLogger logger; private final IndexService indexService; final Index index; - final FieldMapper.Names fieldNames; + final MappedFieldType.Names fieldNames; final FieldDataType fieldDataType; private final Cache cache; private final IndicesFieldDataCacheListener indicesFieldDataCacheListener; - IndexFieldCache(ESLogger logger,final Cache cache, IndicesFieldDataCacheListener indicesFieldDataCacheListener, IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) { + IndexFieldCache(ESLogger logger,final Cache cache, IndicesFieldDataCacheListener indicesFieldDataCacheListener, IndexService indexService, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType) { this.logger = logger; this.indexService = indexService; this.index = index; diff --git a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java index c9b5f653101..eff99a26e1a 100644 --- a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java +++ b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.indices.breaker.CircuitBreakerService; /** @@ -43,13 +44,14 @@ public class IndicesFieldDataCacheListener implements IndexFieldDataCache.Listen } @Override - public void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable fieldData) { + public void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable fieldData) { } @Override - public void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { assert sizeInBytes >= 0 : "When reducing circuit breaker, it should be adjusted with a number higher or equal to 0 and not [" + sizeInBytes + "]"; circuitBreakerService.getBreaker(CircuitBreaker.FIELDDATA).addWithoutBreaking(-sizeInBytes); } } + diff --git a/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java b/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java index 165193d35f0..3233cdcd756 100644 --- a/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java +++ b/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java @@ -60,8 +60,8 @@ class SingleDocumentPercolatorIndex implements PercolatorIndex { if (tokenStream != null) { memoryIndex.addField(field.name(), tokenStream, field.boost()); } - } catch (IOException e) { - throw new ElasticsearchException("Failed to create token stream", e); + } catch (Exception e) { + throw new ElasticsearchException("Failed to create token stream for [" + field.name() + "]", e); } } context.initialize(new DocEngineSearcher(memoryIndex), parsedDocument); diff --git a/src/main/java/org/elasticsearch/search/SearchService.java b/src/main/java/org/elasticsearch/search/SearchService.java index 15a691f3360..025ac1f6ec9 100644 --- a/src/main/java/org/elasticsearch/search/SearchService.java +++ b/src/main/java/org/elasticsearch/search/SearchService.java @@ -59,7 +59,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.TemplateQueryParser; import org.elasticsearch.index.search.stats.StatsGroupsParseElement; @@ -840,8 +840,12 @@ public class SearchService extends AbstractLifecycleComponent { final ObjectSet warmUp = new ObjectHashSet<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final String indexName = fieldMapper.names().indexName(); - if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE && !fieldMapper.fieldType().omitNorms() && fieldMapper.normsLoading(defaultLoading) == Loading.EAGER) { + final String indexName = fieldMapper.fieldType().names().indexName(); + Loading normsLoading = fieldMapper.fieldType().normsLoading(); + if (normsLoading == null) { + normsLoading = defaultLoading; + } + if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE && !fieldMapper.fieldType().omitNorms() && normsLoading == Loading.EAGER) { warmUp.add(indexName); } } @@ -896,7 +900,7 @@ public class SearchService extends AbstractLifecycleComponent { final Map warmUp = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final FieldDataType fieldDataType = fieldMapper.fieldDataType(); + final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType(); if (fieldDataType == null) { continue; } @@ -904,7 +908,7 @@ public class SearchService extends AbstractLifecycleComponent { continue; } - final String indexName = fieldMapper.names().indexName(); + final String indexName = fieldMapper.fieldType().names().indexName(); if (warmUp.containsKey(indexName)) { continue; } @@ -924,10 +928,10 @@ public class SearchService extends AbstractLifecycleComponent { final long start = System.nanoTime(); indexFieldDataService.getForField(fieldMapper).load(ctx); if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); + indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.names().fullName()); + indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.fieldType().names().fullName()); } finally { latch.countDown(); } @@ -950,14 +954,14 @@ public class SearchService extends AbstractLifecycleComponent { final Map warmUpGlobalOrdinals = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final FieldDataType fieldDataType = fieldMapper.fieldDataType(); + final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType(); if (fieldDataType == null) { continue; } if (fieldDataType.getLoading() != Loading.EAGER_GLOBAL_ORDINALS) { continue; } - final String indexName = fieldMapper.names().indexName(); + final String indexName = fieldMapper.fieldType().names().indexName(); if (warmUpGlobalOrdinals.containsKey(indexName)) { continue; } @@ -976,10 +980,10 @@ public class SearchService extends AbstractLifecycleComponent { IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldMapper); ifd.loadGlobal(context.reader()); if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); + indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.names().fullName()); + indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.fieldType().names().fullName()); } finally { latch.countDown(); } diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java index c3a427c1b48..f8c42c36845 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java +++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java @@ -89,7 +89,7 @@ public class ChildrenParser implements Aggregator.Parser { parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter()); childFilter = new QueryWrapperFilter(childDocMapper.typeFilter()); ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper); - config.fieldContext(new FieldContext(parentFieldMapper.names().indexName(), parentChildIndexFieldData, parentFieldMapper)); + config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper)); } else { config.unmapped(true); } diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java b/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java index 4591e8392e2..f817c7d79fa 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java @@ -105,7 +105,7 @@ public class AggregationContext { if (config.fieldContext != null && config.fieldContext.mapper() instanceof DateFieldMapper) { final DateFieldMapper mapper = (DateFieldMapper) config.fieldContext.mapper(); try { - missing = mapper.dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis(); + missing = mapper.fieldType().dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis(); } catch (IllegalArgumentException e) { throw new SearchParseException(context, "Expected a date value in [missing] but got [" + config.missing + "]", null, e); } diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java index d9b01bb8f69..7efef92364c 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java @@ -72,7 +72,7 @@ public class ValueFormat { } public static DateTime mapper(DateFieldMapper mapper) { - return new DateTime(mapper.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper)); + return new DateTime(mapper.fieldType().dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper)); } public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) { diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormatter.java b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormatter.java index f3a47e271a1..0a06b27afd6 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormatter.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormatter.java @@ -105,7 +105,7 @@ public interface ValueFormatter extends Streamable { private DateTimeZone timeZone = DateTimeZone.UTC; public static DateTime mapper(DateFieldMapper mapper) { - return new DateTime(mapper.dateTimeFormatter()); + return new DateTime(mapper.fieldType().dateTimeFormatter()); } static final byte ID = 2; diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java index 4314e95b710..ccc57c34288 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java @@ -110,7 +110,7 @@ public interface ValueParser { } public static DateMath mapper(DateFieldMapper mapper) { - return new DateMath(new DateMathParser(mapper.dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT)); + return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT)); } } diff --git a/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 66f4bf991af..9afa3f5fe35 100644 --- a/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -152,7 +152,7 @@ public class FetchPhase implements SearchPhase { if (fieldNames == null) { fieldNames = new HashSet<>(); } - fieldNames.add(mapper.names().indexName()); + fieldNames.add(mapper.fieldType().names().indexName()); } else { if (extractFieldNames == null) { extractFieldNames = newArrayList(); diff --git a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java index c997624ff60..55ee8f9c21e 100644 --- a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java @@ -147,10 +147,10 @@ public class FastVectorHighlighter implements Highlighter { // we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible // Only send matched fields if they were requested to save time. if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.names().indexName(), field.fieldOptions().matchedFields(), fragmentCharSize, + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } else { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.names().indexName(), fragmentCharSize, + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } @@ -163,7 +163,7 @@ public class FastVectorHighlighter implements Highlighter { // Essentially we just request that a fragment is built from 0 to noMatchSize using the normal fragmentsBuilder FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/); fieldFragList.add(0, noMatchSize, Collections.emptyList()); - fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.names().indexName(), + fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), fieldFragList, 1, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); if (fragments != null && fragments.length > 0) { return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java b/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java index acbba6749cf..3358aec0ed7 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java @@ -47,9 +47,9 @@ public final class HighlightUtils { boolean forceSource = searchContext.highlight().forceSource(field); List textsToHighlight; if (!forceSource && mapper.fieldType().stored()) { - CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(ImmutableSet.of(mapper.names().indexName()), false); + CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(ImmutableSet.of(mapper.fieldType().names().indexName()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); - textsToHighlight = fieldVisitor.fields().get(mapper.names().indexName()); + textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().names().indexName()); if (textsToHighlight == null) { // Can happen if the document doesn't have the field to highlight textsToHighlight = ImmutableList.of(); @@ -57,7 +57,7 @@ public final class HighlightUtils { } else { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId()); - textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName())); + textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); } assert textsToHighlight != null; return textsToHighlight; diff --git a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index 460b2df05cd..86a50547cbc 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -69,7 +69,7 @@ public class PlainHighlighter implements Highlighter { org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); if (entry == null) { - QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.names().indexName() : null); + QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().names().indexName() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { @@ -105,7 +105,7 @@ public class PlainHighlighter implements Highlighter { for (Object textToHighlight : textsToHighlight) { String text = textToHighlight.toString(); - TokenStream tokenStream = analyzer.tokenStream(mapper.names().indexName(), text); + TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().names().indexName(), text); if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { // can't perform highlighting if the stream has no terms (binary token stream) or no offsets continue; @@ -161,7 +161,7 @@ public class PlainHighlighter implements Highlighter { String fieldContents = textsToHighlight.get(0).toString(); int end; try { - end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer.tokenStream(mapper.names().indexName(), fieldContents)); + end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer.tokenStream(mapper.fieldType().names().indexName(), fieldContents)); } catch (Exception e) { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } diff --git a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java index dcbb810d4dd..35f6560899e 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java @@ -91,7 +91,7 @@ public class PostingsHighlighter implements Highlighter { } IndexSearcher searcher = new IndexSearcher(hitContext.reader()); - Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.names().indexName(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); + Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().names().indexName(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); for (Snippet fieldSnippet : fieldSnippets) { if (Strings.hasText(fieldSnippet.getText())) { snippets.add(fieldSnippet); diff --git a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java index 3d338277033..ee824ee13c3 100644 --- a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java +++ b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java @@ -48,8 +48,8 @@ public final class FragmentBuilderHelper { */ public static WeightedFragInfo fixWeightedFragInfo(FieldMapper mapper, Field[] values, WeightedFragInfo fragInfo) { assert fragInfo != null : "FragInfo must not be null"; - assert mapper.names().indexName().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); - if (!fragInfo.getSubInfos().isEmpty() && (containsBrokenAnalysis(mapper.indexAnalyzer()))) { + assert mapper.fieldType().names().indexName().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); + if (!fragInfo.getSubInfos().isEmpty() && (containsBrokenAnalysis(mapper.fieldType().indexAnalyzer()))) { /* This is a special case where broken analysis like WDF is used for term-vector creation at index-time * which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort * the fragments based on their offsets rather than using soley the positions as it is done in diff --git a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java index b7670cb08ec..71393fdf190 100644 --- a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java +++ b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java @@ -60,10 +60,10 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName())); + List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mapper.fieldType().names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java index c52312f5ac0..7a12b449c97 100644 --- a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java +++ b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java @@ -56,13 +56,13 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName())); + List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); if (values.isEmpty()) { return EMPTY_FIELDS; } Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mapper.fieldType().names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java b/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java index 3cc9b9e1fd7..e634dff6a2f 100644 --- a/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java +++ b/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java @@ -85,7 +85,7 @@ public class FieldLookup { } valueLoaded = true; value = null; - List values = fields.get(mapper.names().indexName()); + List values = fields.get(mapper.fieldType().names().indexName()); return values != null ? value = values.get(0) : null; } @@ -95,6 +95,6 @@ public class FieldLookup { } valuesLoaded = true; values.clear(); - return values = fields().get(mapper.names().indexName()); + return values = fields().get(mapper.fieldType().names().indexName()); } } diff --git a/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java b/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java index 52e0872742a..9936372c9b8 100644 --- a/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java +++ b/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java @@ -144,12 +144,12 @@ public class LeafFieldsLookup implements Map { cachedFieldData.put(name, data); } if (data.fields() == null) { - String fieldName = data.mapper().names().indexName(); + String fieldName = data.mapper().fieldType().names().indexName(); fieldVisitor.reset(fieldName); try { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(data.mapper()); - data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.mapper().names().indexName()))); + data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.mapper().fieldType().names().indexName()))); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [" + name + "]", e); } diff --git a/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index 3d97b788825..6d1cc200f8a 100644 --- a/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -268,7 +268,7 @@ public class SortParseElement implements SearchParseElement { IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldMapper) .comparatorSource(missing, sortMode, nested); - sortFields.add(new SortField(fieldMapper.names().indexName(), fieldComparatorSource, reverse)); + sortFields.add(new SortField(fieldMapper.fieldType().names().indexName(), fieldComparatorSource, reverse)); } } diff --git a/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java b/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java index b95d7200d08..879b51a6fe2 100644 --- a/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java +++ b/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java @@ -253,7 +253,7 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider return new LookupFactory() { @Override public Lookup getLookup(CompletionFieldMapper mapper, CompletionSuggestionContext suggestionContext) { - AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.names().indexName()); + AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.fieldType().names().indexName()); if (analyzingSuggestHolder == null) { return null; } @@ -263,19 +263,19 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider final Automaton queryPrefix = mapper.requiresContext() ? ContextQuery.toAutomaton(analyzingSuggestHolder.getPreserveSeparator(), suggestionContext.getContextQueries()) : null; if (suggestionContext.isFuzzy()) { - suggester = new XFuzzySuggester(mapper.indexAnalyzer(), queryPrefix, mapper.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), - suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), suggestionContext.isFuzzyUnicodeAware(), - analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, - analyzingSuggestHolder.holeCharacter); + suggester = new XFuzzySuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags, + analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, + suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), + suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), suggestionContext.isFuzzyUnicodeAware(), + analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, + analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, + analyzingSuggestHolder.holeCharacter); } else { - suggester = new XAnalyzingSuggester(mapper.indexAnalyzer(), queryPrefix, mapper.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, - analyzingSuggestHolder.holeCharacter); + suggester = new XAnalyzingSuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags, + analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, + analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, + analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, + analyzingSuggestHolder.holeCharacter); } return suggester; } @@ -304,7 +304,7 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider @Override AnalyzingSuggestHolder getAnalyzingSuggestHolder(CompletionFieldMapper mapper) { - return lookupMap.get(mapper.names().indexName()); + return lookupMap.get(mapper.fieldType().names().indexName()); } @Override diff --git a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java index 453dbdc3eb5..91824b75af5 100644 --- a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java @@ -191,10 +191,10 @@ public final class PhraseSuggestParser implements SuggestContextParser { throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]"); } else if (suggestion.getAnalyzer() == null) { // no analyzer name passed in, so try the field's analyzer, or the default analyzer - if (fieldMapper.searchAnalyzer() == null) { + if (fieldMapper.fieldType().searchAnalyzer() == null) { suggestion.setAnalyzer(mapperService.searchAnalyzer()); } else { - suggestion.setAnalyzer(fieldMapper.searchAnalyzer()); + suggestion.setAnalyzer(fieldMapper.fieldType().searchAnalyzer()); } } diff --git a/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 2592c3cba2e..cf9f09204d9 100644 --- a/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -163,8 +163,8 @@ public class PreBuiltAnalyzerTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = docMapper.mappers().getMapper("field"); - assertThat(fieldMapper.searchAnalyzer(), instanceOf(NamedAnalyzer.class)); - NamedAnalyzer fieldMapperNamedAnalyzer = (NamedAnalyzer) fieldMapper.searchAnalyzer(); + assertThat(fieldMapper.fieldType().searchAnalyzer(), instanceOf(NamedAnalyzer.class)); + NamedAnalyzer fieldMapperNamedAnalyzer = (NamedAnalyzer) fieldMapper.fieldType().searchAnalyzer(); assertThat(fieldMapperNamedAnalyzer.analyzer(), is(namedAnalyzer.analyzer())); } diff --git a/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java b/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java index 99bc38b5c84..ceeb448586b 100644 --- a/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java +++ b/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java @@ -24,7 +24,7 @@ import org.apache.lucene.index.IndexReader; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import org.junit.Test; diff --git a/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java b/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java index 75c8e18fc91..ab336cf7dab 100644 --- a/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java @@ -176,17 +176,20 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase { static class FakeFieldMapper extends AbstractFieldMapper { static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); public FakeFieldMapper(String fullName, String indexName) { - super(new Names(fullName, indexName, indexName, fullName), 1.0f, AbstractFieldMapper.Defaults.FIELD_TYPE, null, null, null, null, null, null, dummySettings, null, null); + super(makeFieldType(fullName, indexName), null, null, dummySettings, null, null); + } + static MappedFieldType makeFieldType(String fullName, String indexName) { + MappedFieldType fieldType = Defaults.FIELD_TYPE.clone(); + fieldType.setNames(new MappedFieldType.Names(fullName, indexName, indexName, fullName)); + return fieldType; } @Override - public FieldType defaultFieldType() { return null; } + public MappedFieldType defaultFieldType() { return null; } @Override public FieldDataType defaultFieldDataType() { return null; } @Override protected String contentType() { return null; } @Override protected void parseCreateField(ParseContext context, List list) throws IOException {} - @Override - public String value(Object value) { return null; } } } diff --git a/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 36c218f3d7c..b0ff89f414d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -306,9 +306,9 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { assertThat(field, nullValue()); } if (similarity == null || similarity.equals("TF/IDF")) { - assertThat(builtDocMapper.allFieldMapper().similarity(), nullValue()); + assertThat(builtDocMapper.allFieldMapper().fieldType().similarity(), nullValue()); } else { - assertThat(similarity, equalTo(builtDocMapper.allFieldMapper().similarity().name())); + assertThat(similarity, equalTo(builtDocMapper.allFieldMapper().fieldType().similarity().name())); } assertThat(builtMapping.contains("fielddata"), is(fieldData)); if (allDefault) { diff --git a/src/test/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapperTests.java index c09741ba17e..d28609e5bee 100644 --- a/src/test/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapperTests.java @@ -108,7 +108,7 @@ public class Murmur3FieldMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = parser.parse(mapping); Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); - assertFalse(mapper.hasDocValues()); + assertFalse(mapper.fieldType().hasDocValues()); } public void testIndexSettingBackcompat() throws Exception { diff --git a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index a8bf1476465..45c6322f359 100755 --- a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -23,7 +23,6 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.spatial4j.core.shape.Point; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilder; @@ -32,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -61,15 +61,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; * .shape GeoShape type */ public class ExternalMapper extends AbstractFieldMapper { - /** - * Returns the actual value of the field. - * - * @param value - */ - @Override - public Object value(Object value) { - return null; - } public static class Names { public static final String FIELD_BIN = "bin"; @@ -89,7 +80,7 @@ public class ExternalMapper extends AbstractFieldMapper { private String mapperName; public Builder(String name, String generatedValue, String mapperName) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); this.builder = this; this.stringBuilder = stringField(name).store(false); this.generatedValue = generatedValue; @@ -115,8 +106,9 @@ public class ExternalMapper extends AbstractFieldMapper { context.path().remove(); context.path().pathType(origPathType); + setupFieldType(context); - return new ExternalMapper(buildNames(context), generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper, + return new ExternalMapper(fieldType, generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } @@ -159,11 +151,11 @@ public class ExternalMapper extends AbstractFieldMapper { private final GeoShapeFieldMapper shapeMapper; private final FieldMapper stringMapper; - public ExternalMapper(FieldMapper.Names names, + public ExternalMapper(MappedFieldType fieldType, String generatedValue, String mapperName, BinaryFieldMapper binMapper, BooleanFieldMapper boolMapper, GeoPointFieldMapper pointMapper, GeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, 1.0f, Defaults.FIELD_TYPE, false, null, null, null, null, null, indexSettings, + super(fieldType, false, null, indexSettings, multiFields, copyTo); this.generatedValue = generatedValue; this.mapperName = mapperName; @@ -175,7 +167,7 @@ public class ExternalMapper extends AbstractFieldMapper { } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -236,7 +228,7 @@ public class ExternalMapper extends AbstractFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(names().shortName()); + builder.startObject(fieldType().names().shortName()); builder.field("type", mapperName); multiFields.toXContent(builder, params); builder.endObject(); diff --git a/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index b959bb41ab6..d31e2a1b01b 100644 --- a/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -55,12 +55,12 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(GeoShapeFieldMapper.Defaults.DISTANCE_ERROR_PCT)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoShapeFieldMapper.Defaults.GEOHASH_LEVELS)); - assertThat(geoShapeFieldMapper.orientation(), equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION)); } /** @@ -79,7 +79,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - ShapeBuilder.Orientation orientation = ((GeoShapeFieldMapper)fieldMapper).orientation(); + ShapeBuilder.Orientation orientation = ((GeoShapeFieldMapper)fieldMapper).fieldType().orientation(); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); @@ -96,7 +96,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - orientation = ((GeoShapeFieldMapper)fieldMapper).orientation(); + orientation = ((GeoShapeFieldMapper)fieldMapper).fieldType().orientation(); assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW)); @@ -118,7 +118,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.1)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); @@ -141,7 +141,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); @@ -169,7 +169,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); @@ -193,7 +193,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); // distance_error_pct was not specified so we expect the mapper to take the highest precision between "precision" and // "tree_levels" setting distErrPct to 0 to guarantee desired precision @@ -219,7 +219,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); @@ -243,7 +243,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); @@ -266,7 +266,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); @@ -292,7 +292,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); @@ -314,7 +314,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); @@ -351,13 +351,13 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); assertThat(strategy.getDistErrPct(), equalTo(0.01)); assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); - assertThat(geoShapeFieldMapper.orientation(), equalTo(ShapeBuilder.Orientation.CCW)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW)); // correct mapping stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -373,12 +373,12 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - strategy = geoShapeFieldMapper.defaultStrategy(); + strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); assertThat(strategy.getDistErrPct(), equalTo(0.001)); assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); - assertThat(geoShapeFieldMapper.orientation(), equalTo(ShapeBuilder.Orientation.CW)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CW)); } } diff --git a/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index 50cc9968466..12d5211f087 100644 --- a/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -96,25 +96,25 @@ public class GeohashMappingGeoPointTests extends ElasticsearchSingleNodeTest { @Test public void testGeoHashPrecisionAsInteger() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash_precision", 10).endObject().endObject() + .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", 10).endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); assertThat(mapper, instanceOf(GeoPointFieldMapper.class)); GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) mapper; - assertThat(geoPointFieldMapper.geoHashPrecision(), is(10)); + assertThat(geoPointFieldMapper.fieldType().geohashPrecision(), is(10)); } @Test public void testGeoHashPrecisionAsLength() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash_precision", "5m").endObject().endObject() + .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", "5m").endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); assertThat(mapper, instanceOf(GeoPointFieldMapper.class)); GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) mapper; - assertThat(geoPointFieldMapper.geoHashPrecision(), is(10)); + assertThat(geoPointFieldMapper.fieldType().geohashPrecision(), is(10)); } @Test diff --git a/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index 8da6e71c135..a8ce3c145a7 100644 --- a/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -69,7 +69,7 @@ public class FieldNamesFieldMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldNamesFieldMapper fieldNamesMapper = docMapper.rootMapper(FieldNamesFieldMapper.class); - assertFalse(fieldNamesMapper.hasDocValues()); + assertFalse(fieldNamesMapper.fieldType().hasDocValues()); assertEquals(IndexOptions.DOCS, fieldNamesMapper.fieldType().indexOptions()); assertFalse(fieldNamesMapper.fieldType().tokenized()); assertFalse(fieldNamesMapper.fieldType().stored()); diff --git a/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 620847559ee..26ffbefdd1c 100644 --- a/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -130,11 +130,11 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper existing = parser.parse(mapping1); DocumentMapper changed = parser.parse(mapping2); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("whitespace")); + assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); MergeResult mergeResult = existing.merge(changed.mapping(), false); assertThat(mergeResult.hasConflicts(), equalTo(false)); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("keyword")); + assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); } @Test @@ -150,11 +150,11 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper existing = parser.parse(mapping1); DocumentMapper changed = parser.parse(mapping2); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("whitespace")); + assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); MergeResult mergeResult = existing.merge(changed.mapping(), false); assertThat(mergeResult.hasConflicts(), equalTo(false)); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("standard")); + assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); assertThat(((StringFieldMapper) (existing.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14)); } diff --git a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index e7df72c3dcd..4c845b4a91f 100644 --- a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -27,14 +27,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import org.elasticsearch.index.IndexService; import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.junit.Test; @@ -46,8 +50,12 @@ import java.util.TreeMap; import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.mapper.MapperBuilders.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.index.mapper.MapperBuilders.doc; +import static org.elasticsearch.index.mapper.MapperBuilders.rootObject; +import static org.elasticsearch.index.mapper.MapperBuilders.stringField; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; /** * @@ -119,7 +127,7 @@ public class MultiFieldTests extends ElasticsearchSingleNodeTest { assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.test1").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.test1").fieldType().stored(), equalTo(true)); assertThat(docMapper.mappers().getMapper("name.test1").fieldType().tokenized(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.test1").fieldDataType().getLoading(), equalTo(FieldMapper.Loading.EAGER)); + assertThat(docMapper.mappers().getMapper("name.test1").fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); assertThat(docMapper.mappers().getMapper("name.test2"), notNullValue()); assertThat(docMapper.mappers().getMapper("name.test2"), instanceOf(TokenCountFieldMapper.class)); diff --git a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationTests.java b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationTests.java index 3e5c29daade..60609d82b1a 100644 --- a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationTests.java @@ -113,6 +113,7 @@ public class MultiFieldsIntegrationTests extends ElasticsearchIntegrationTest { assertThat(mappingMetaData, not(nullValue())); Map mappingSource = mappingMetaData.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); + logger.info("Keys: " + aField.keySet()); assertThat(aField.size(), equalTo(2)); assertThat(aField.get("type").toString(), equalTo("geo_point")); assertThat(aField.get("fields"), notNullValue()); diff --git a/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java index 5ab240857b3..c429a012f86 100644 --- a/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java @@ -52,7 +52,7 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); // System.out.println("Document: " + doc); // System.out.println("Json: " + docMapper.sourceMapper().value(doc)); doc = docMapper.parse("person", "1", json).rootDoc(); @@ -71,8 +71,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper builtDocMapper = parser.parse(builtMapping); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = builtDocMapper.parse("person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); // System.out.println("Document: " + doc); // System.out.println("Json: " + docMapper.sourceMapper().value(doc)); } @@ -86,8 +86,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); // System.out.println("Document: " + doc); // System.out.println("Json: " + docMapper.sourceMapper().value(doc)); } @@ -98,8 +98,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); Document doc = docMapper.parse("person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); // System.out.println("Document: " + doc); // System.out.println("Json: " + docMapper.sourceMapper().value(doc)); } diff --git a/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 5b429d093c2..0583e289994 100644 --- a/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -54,6 +54,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Map; +import static org.elasticsearch.index.mapper.core.StringFieldMapper.Builder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -371,9 +372,9 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest { DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1)); - assertFalse(new StringFieldMapper.Builder("anything").index(false).build(ctx).hasDocValues()); - assertTrue(new StringFieldMapper.Builder("anything").index(false).fieldDataSettings(DOC_VALUES_SETTINGS).build(ctx).hasDocValues()); - assertTrue(new StringFieldMapper.Builder("anything").index(false).docValues(true).build(ctx).hasDocValues()); + assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(false).fieldDataSettings(DOC_VALUES_SETTINGS).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") @@ -414,11 +415,11 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest { fail(); } catch (Exception e) { /* OK */ } - assertFalse(new StringFieldMapper.Builder("anything").index(false).build(ctx).hasDocValues()); - assertTrue(new StringFieldMapper.Builder("anything").index(true).tokenized(false).build(ctx).hasDocValues()); - assertFalse(new StringFieldMapper.Builder("anything").index(true).tokenized(true).build(ctx).hasDocValues()); - assertFalse(new StringFieldMapper.Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).hasDocValues()); - assertTrue(new StringFieldMapper.Builder("anything").index(false).docValues(true).build(ctx).hasDocValues()); + assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues()); + assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues()); + assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") diff --git a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index e415ea76c07..3e40a84c931 100644 --- a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -113,8 +113,8 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0) ? true : false)); assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); assertThat(docMapper.timestampFieldMapper().path(), equalTo(TimestampFieldMapper.Defaults.PATH)); - assertThat(docMapper.timestampFieldMapper().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT)); - assertThat(docMapper.timestampFieldMapper().hasDocValues(), equalTo(false)); + assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT)); + assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(false)); assertAcked(client().admin().indices().prepareDelete("test").execute().get()); } } @@ -135,8 +135,8 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(false)); assertEquals(IndexOptions.NONE, docMapper.timestampFieldMapper().fieldType().indexOptions()); assertThat(docMapper.timestampFieldMapper().path(), equalTo("timestamp")); - assertThat(docMapper.timestampFieldMapper().dateTimeFormatter().format(), equalTo("year")); - assertThat(docMapper.timestampFieldMapper().hasDocValues(), equalTo(true)); + assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo("year")); + assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(true)); } @Test @@ -507,16 +507,16 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); DocumentMapper docMapper = parser.parse(mapping); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY)); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject() .endObject().endObject().string(); MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false); assertThat(mergeResult.buildConflicts().length, equalTo(0)); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.EAGER)); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("array")); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array")); } @Test @@ -574,7 +574,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); DocumentMapper docMapper = parser.parse(mapping); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY)); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", false) .startObject("fielddata").field("format", "array").endObject() @@ -597,9 +597,9 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict)); } assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty()); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY)); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); assertTrue(docMapper.timestampFieldMapper().enabled()); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); } @Test @@ -739,9 +739,9 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { void assertDocValuesSerialization(String mapping) throws Exception { DocumentMapperParser parser = createIndex("test_doc_values").mapperService().documentMapperParser(); DocumentMapper docMapper = parser.parse(mapping); - boolean docValues= docMapper.timestampFieldMapper().hasDocValues(); + boolean docValues = docMapper.timestampFieldMapper().fieldType().hasDocValues(); docMapper = parser.parse(docMapper.mappingSource().string()); - assertThat(docMapper.timestampFieldMapper().hasDocValues(), equalTo(docValues)); + assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(docValues)); assertAcked(client().admin().indices().prepareDelete("test_doc_values")); } diff --git a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index 1090f6df85a..588891d1cf4 100644 --- a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -149,10 +149,10 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); - assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); + assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues()); documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); - assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); + assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues()); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); } diff --git a/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index c2c98553bbf..3f4119b4739 100644 --- a/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -56,9 +56,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(DefaultSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DefaultSimilarityProvider.class)); - DefaultSimilarity similarity = (DefaultSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + DefaultSimilarity similarity = (DefaultSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } @@ -78,9 +78,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(BM25SimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(BM25SimilarityProvider.class)); - BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").similarity().get(); + BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getK1(), equalTo(2.0f)); assertThat(similarity.getB(), equalTo(1.5f)); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); @@ -103,9 +103,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(DFRSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DFRSimilarityProvider.class)); - DFRSimilarity similarity = (DFRSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + DFRSimilarity similarity = (DFRSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getBasicModel(), instanceOf(BasicModelG.class)); assertThat(similarity.getAfterEffect(), instanceOf(AfterEffectL.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -129,9 +129,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(IBSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(IBSimilarityProvider.class)); - IBSimilarity similarity = (IBSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + IBSimilarity similarity = (IBSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getDistribution(), instanceOf(DistributionSPL.class)); assertThat(similarity.getLambda(), instanceOf(LambdaTTF.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -152,9 +152,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(LMDirichletSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMDirichletSimilarityProvider.class)); - LMDirichletSimilarity similarity = (LMDirichletSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + LMDirichletSimilarity similarity = (LMDirichletSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getMu(), equalTo(3000f)); } @@ -172,9 +172,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(LMJelinekMercerSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMJelinekMercerSimilarityProvider.class)); - LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getLambda(), equalTo(0.7f)); } } diff --git a/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java b/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java index 637c877caf3..adf1fbf2a04 100644 --- a/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java +++ b/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.engine.Segment; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.search.SearchService; diff --git a/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingTest.java b/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingTest.java index 733f0a26cd2..b61ad6e61df 100644 --- a/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingTest.java +++ b/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingTest.java @@ -26,11 +26,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.merge.policy.MergePolicyModule; -import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ElasticsearchIntegrationTest; @@ -62,7 +62,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertAcked(prepareCreate("test") .setSettings(indexSettings) .addMapping("parent") - .addMapping("child", childMapping(FieldMapper.Loading.LAZY))); + .addMapping("child", childMapping(MappedFieldType.Loading.LAZY))); ensureGreen(); client().prepareIndex("test", "parent", "1").setSource("{}").get(); @@ -93,7 +93,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertAcked(prepareCreate("test") .setSettings(indexSettings) .addMapping("parent") - .addMapping("child", childMapping(FieldMapper.Loading.EAGER))); + .addMapping("child", childMapping(MappedFieldType.Loading.EAGER))); ensureGreen(); client().prepareIndex("test", "parent", "1").setSource("{}").get(); @@ -108,7 +108,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertAcked(prepareCreate("test") .setSettings(indexSettings) .addMapping("parent") - .addMapping("child", childMapping(FieldMapper.Loading.EAGER_GLOBAL_ORDINALS))); + .addMapping("child", childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS))); ensureGreen(); // Need to do 2 separate refreshes, otherwise we have 1 segment and then we can't measure if global ordinals @@ -139,7 +139,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertThat(fielddataSizeDefault, greaterThan(0l)); PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child") - .setSource(childMapping(FieldMapper.Loading.EAGER_GLOBAL_ORDINALS)) + .setSource(childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS)) .get(); assertAcked(putMappingResponse); assertBusy(new Runnable() { @@ -156,7 +156,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { MapperService mapperService = indexService.mapperService(); DocumentMapper documentMapper = mapperService.documentMapper("child"); if (documentMapper != null) { - verified = documentMapper.parentFieldMapper().fieldDataType().getLoading() == FieldMapper.Loading.EAGER_GLOBAL_ORDINALS; + verified = documentMapper.parentFieldMapper().fieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS; } } assertTrue(verified); @@ -171,10 +171,10 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(fielddataSizeDefault)); } - private XContentBuilder childMapping(FieldMapper.Loading loading) throws IOException { + private XContentBuilder childMapping(MappedFieldType.Loading loading) throws IOException { return jsonBuilder().startObject().startObject("child").startObject("_parent") .field("type", "parent") - .startObject("fielddata").field(FieldMapper.Loading.KEY, loading).endObject() + .startObject("fielddata").field(MappedFieldType.Loading.KEY, loading).endObject() .endObject().endObject().endObject(); } diff --git a/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java b/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java index 0d4329d5bb7..83bcdb44397 100644 --- a/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java +++ b/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.index.cache.filter.FilterCacheModule; import org.elasticsearch.index.cache.filter.FilterCacheModule.FilterCacheSettings; import org.elasticsearch.index.cache.filter.index.IndexFilterCache; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; diff --git a/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationTests.java b/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationTests.java index 13e4d0d6554..eb6a6629aac 100644 --- a/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationTests.java +++ b/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationTests.java @@ -461,7 +461,7 @@ public class GeoShapeIntegrationTests extends ElasticsearchIntegrationTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)fieldMapper; - ShapeBuilder.Orientation orientation = gsfm.orientation(); + ShapeBuilder.Orientation orientation = gsfm.fieldType().orientation(); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); @@ -473,7 +473,7 @@ public class GeoShapeIntegrationTests extends ElasticsearchIntegrationTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); gsfm = (GeoShapeFieldMapper)fieldMapper; - orientation = gsfm.orientation(); + orientation = gsfm.fieldType().orientation(); assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW)); diff --git a/src/test/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProviderV1.java b/src/test/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProviderV1.java index d8cd3dd3fc7..a2867abcd81 100644 --- a/src/test/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProviderV1.java +++ b/src/test/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProviderV1.java @@ -59,6 +59,8 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; +import static org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester.HOLE_CHARACTER; + /** * This is an older implementation of the AnalyzingCompletionLookupProvider class * We use this to test for backwards compatibility in our tests, namely @@ -232,7 +234,7 @@ public class AnalyzingCompletionLookupProviderV1 extends CompletionLookupProvide return new LookupFactory() { @Override public Lookup getLookup(CompletionFieldMapper mapper, CompletionSuggestionContext suggestionContext) { - AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.names().indexName()); + AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.fieldType().names().indexName()); if (analyzingSuggestHolder == null) { return null; } @@ -242,18 +244,18 @@ public class AnalyzingCompletionLookupProviderV1 extends CompletionLookupProvide XAnalyzingSuggester suggester; if (suggestionContext.isFuzzy()) { - suggester = new XFuzzySuggester(mapper.indexAnalyzer(), queryPrefix, mapper.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), - suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), false, - analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, SEP_LABEL, PAYLOAD_SEP, END_BYTE, XAnalyzingSuggester.HOLE_CHARACTER); + suggester = new XFuzzySuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags, + analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, + suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), + suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), false, + analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, + analyzingSuggestHolder.maxAnalyzedPathsForOneInput, SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER); } else { - suggester = new XAnalyzingSuggester(mapper.indexAnalyzer(), queryPrefix, mapper.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - analyzingSuggestHolder.preservePositionIncrements, - analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, SEP_LABEL, PAYLOAD_SEP, END_BYTE, XAnalyzingSuggester.HOLE_CHARACTER); + suggester = new XAnalyzingSuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags, + analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, + analyzingSuggestHolder.preservePositionIncrements, + analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, + analyzingSuggestHolder.maxAnalyzedPathsForOneInput, SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER); } return suggester; } @@ -285,7 +287,7 @@ public class AnalyzingCompletionLookupProviderV1 extends CompletionLookupProvide @Override AnalyzingSuggestHolder getAnalyzingSuggestHolder(CompletionFieldMapper mapper) { - return lookupMap.get(mapper.names().indexName()); + return lookupMap.get(mapper.fieldType().names().indexName()); } @Override diff --git a/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java b/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java index 757723ef4d0..d77d6308b62 100644 --- a/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java +++ b/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.completion; import com.google.common.collect.Lists; - import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldsConsumer; @@ -55,7 +54,8 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.codec.postingsformat.Elasticsearch090PostingsFormat; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.search.suggest.SuggestUtils; @@ -76,8 +76,16 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class CompletionPostingsFormatTest extends ElasticsearchTestCase { - + Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + static final MappedFieldType FIELD_TYPE = CompletionFieldMapper.Defaults.FIELD_TYPE.clone(); + static final NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); + static { + FIELD_TYPE.setNames(new Names("foo")); + FIELD_TYPE.setIndexAnalyzer(analyzer); + FIELD_TYPE.setSearchAnalyzer(analyzer); + FIELD_TYPE.freeze(); + } @Test public void testCompletionPostingsFormat() throws IOException { @@ -92,8 +100,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = currentProvider.load(input); PostingsFormat format = PostingsFormat.forName(Lucene.LATEST_POSTINGS_FORMAT); - NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); - Lookup lookup = load.getLookup(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING), new CompletionSuggestionContext(null)); + Lookup lookup = load.getLookup(new CompletionFieldMapper(FIELD_TYPE, format, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING), new CompletionSuggestionContext(null)); List result = lookup.lookup("ge", false, 10); assertThat(result.get(0).key.toString(), equalTo("Generator - Foo Fighters")); assertThat(result.get(0).payload.utf8ToString(), equalTo("id:10")); @@ -111,8 +118,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = currentProvider.load(input); PostingsFormat format = new Elasticsearch090PostingsFormat(); - NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); - AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder analyzingSuggestHolder = load.getAnalyzingSuggestHolder(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING)); + AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder analyzingSuggestHolder = load.getAnalyzingSuggestHolder(new CompletionFieldMapper(FIELD_TYPE, format, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING)); assertThat(analyzingSuggestHolder.sepLabel, is(AnalyzingCompletionLookupProviderV1.SEP_LABEL)); assertThat(analyzingSuggestHolder.payloadSep, is(AnalyzingCompletionLookupProviderV1.PAYLOAD_SEP)); assertThat(analyzingSuggestHolder.endByte, is(AnalyzingCompletionLookupProviderV1.END_BYTE)); @@ -129,8 +135,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = currentProvider.load(input); PostingsFormat format = new Elasticsearch090PostingsFormat(); - NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); - AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder analyzingSuggestHolder = load.getAnalyzingSuggestHolder(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING)); + AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder analyzingSuggestHolder = load.getAnalyzingSuggestHolder(new CompletionFieldMapper(FIELD_TYPE, format, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING)); assertThat(analyzingSuggestHolder.sepLabel, is(XAnalyzingSuggester.SEP_LABEL)); assertThat(analyzingSuggestHolder.payloadSep, is(XAnalyzingSuggester.PAYLOAD_SEP)); assertThat(analyzingSuggestHolder.endByte, is(XAnalyzingSuggester.END_BYTE)); @@ -237,8 +242,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { reference.build(iter); PostingsFormat provider = PostingsFormat.forName(Lucene.LATEST_POSTINGS_FORMAT); - NamedAnalyzer namedAnalzyer = new NamedAnalyzer("foo", new StandardAnalyzer()); - final CompletionFieldMapper mapper = new CompletionFieldMapper(new Names("foo"), namedAnalzyer, namedAnalzyer, provider, null, usePayloads, + final CompletionFieldMapper mapper = new CompletionFieldMapper(FIELD_TYPE, provider, usePayloads, preserveSeparators, preservePositionIncrements, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING); Lookup buildAnalyzingLookup = buildAnalyzingLookup(mapper, titles, titles, weights); Field field = buildAnalyzingLookup.getClass().getDeclaredField("maxAnalyzedPathsForOneInput"); @@ -250,7 +254,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { for (int i = 0; i < titles.length; i++) { int res = between(1, 10); final StringBuilder builder = new StringBuilder(); - SuggestUtils.analyze(namedAnalzyer.tokenStream("foo", titles[i]), new SuggestUtils.TokenConsumer() { + SuggestUtils.analyze(analyzer.tokenStream("foo", titles[i]), new SuggestUtils.TokenConsumer() { @Override public void nextToken() throws IOException { if (builder.length() == 0) { @@ -285,7 +289,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { return mapper.postingsFormat(in); } }; - IndexWriterConfig indexWriterConfig = new IndexWriterConfig(mapper.indexAnalyzer()); + IndexWriterConfig indexWriterConfig = new IndexWriterConfig(mapper.fieldType().indexAnalyzer()); indexWriterConfig.setCodec(filterCodec); IndexWriter writer = new IndexWriter(dir, indexWriterConfig); @@ -305,7 +309,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { assertThat(reader.leaves().size(), equalTo(1)); assertThat(reader.leaves().get(0).reader().numDocs(), equalTo(weights.length)); LeafReaderContext atomicReaderContext = reader.leaves().get(0); - Terms luceneTerms = atomicReaderContext.reader().terms(mapper.names().fullName()); + Terms luceneTerms = atomicReaderContext.reader().terms(mapper.fieldType().names().fullName()); Lookup lookup = ((Completion090PostingsFormat.CompletionTerms) luceneTerms).getLookup(mapper, new CompletionSuggestionContext(null)); reader.close(); writer.close(); @@ -340,8 +344,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = provider.load(input); PostingsFormat format = new Elasticsearch090PostingsFormat(); - NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); - assertNull(load.getLookup(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING), new CompletionSuggestionContext(null))); + assertNull(load.getLookup(new CompletionFieldMapper(FIELD_TYPE, format, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING), new CompletionSuggestionContext(null))); dir.close(); } diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index 81f0e9f39bf..621d4fc4d01 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -98,7 +98,8 @@ import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.internal.SizeFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.merge.policy.*; @@ -558,7 +559,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase private static Settings.Builder setRandomNormsLoading(Random random, Settings.Builder builder) { if (random.nextBoolean()) { - builder.put(SearchService.NORMS_LOADING_KEY, RandomPicks.randomFrom(random, Arrays.asList(FieldMapper.Loading.EAGER, FieldMapper.Loading.LAZY))); + builder.put(SearchService.NORMS_LOADING_KEY, RandomPicks.randomFrom(random, Arrays.asList(MappedFieldType.Loading.EAGER, MappedFieldType.Loading.LAZY))); } return builder; } From 35a58d874ef56be50a0ad1d7bfb13edb4204d0a3 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Tue, 12 May 2015 10:37:22 +0100 Subject: [PATCH 072/123] Scripting: Unify script and template requests across codebase This change unifies the way scripts and templates are specified for all instances in the codebase. It builds on the Script class added previously and adds request building and parsing support as well as the ability to transfer script objects between nodes. It also adds a Template class which aims to provide the same functionality for template APIs Closes #11091 --- .../scripted-metric-aggregation.asciidoc | 22 +- docs/java-api/update.asciidoc | 4 +- docs/reference/aggregations.asciidoc | 2 - .../bucket/range-aggregation.asciidoc | 29 +- .../significantterms-aggregation.asciidoc | 7 - .../bucket/terms-aggregation.asciidoc | 22 +- .../metrics/avg-aggregation.asciidoc | 32 +- .../metrics/cardinality-aggregation.asciidoc | 23 +- .../extendedstats-aggregation.asciidoc | 32 +- .../metrics/max-aggregation.asciidoc | 30 +- .../metrics/min-aggregation.asciidoc | 30 +- .../metrics/percentile-aggregation.asciidoc | 30 +- .../percentile-rank-aggregation.asciidoc | 32 +- .../scripted-metric-aggregation.asciidoc | 45 +- .../metrics/stats-aggregation.asciidoc | 32 +- .../metrics/sum-aggregation.asciidoc | 27 +- .../metrics/valuecount-aggregation.asciidoc | 24 +- docs/reference/docs/bulk.asciidoc | 2 +- docs/reference/docs/update.asciidoc | 46 +- docs/reference/mapping/transform.asciidoc | 12 +- docs/reference/modules/scripting.asciidoc | 20 +- .../query-dsl/function-score-query.asciidoc | 14 +- .../reference/query-dsl/script-query.asciidoc | 8 +- .../query-dsl/template-query.asciidoc | 4 +- .../search/request/script-fields.asciidoc | 8 +- docs/reference/search/request/sort.asciidoc | 8 +- .../reference/search/search-template.asciidoc | 18 +- .../action/bulk/BulkRequest.java | 9 +- .../action/search/SearchRequest.java | 90 +- .../action/search/SearchRequestBuilder.java | 68 +- .../action/update/UpdateHelper.java | 10 +- .../action/update/UpdateRequest.java | 201 ++- .../action/update/UpdateRequestBuilder.java | 35 +- .../search/function/ScriptScoreFunction.java | 15 +- .../common/xcontent/XContentType.java | 18 + .../index/mapper/DocumentMapper.java | 80 +- .../index/mapper/DocumentMapperParser.java | 23 +- .../index/query/QueryBuilders.java | 21 + .../index/query/ScriptQueryBuilder.java | 57 +- .../index/query/ScriptQueryParser.java | 66 +- .../index/query/TemplateQueryBuilder.java | 60 +- .../index/query/TemplateQueryParser.java | 94 +- .../functionscore/ScoreFunctionBuilders.java | 21 + .../script/ScriptScoreFunctionBuilder.java | 54 +- .../script/ScriptScoreFunctionParser.java | 30 +- .../query/support/BaseInnerHitBuilder.java | 50 +- .../rest/action/update/RestUpdateAction.java | 25 +- .../script/AbstractScriptParser.java | 196 +++ .../java/org/elasticsearch/script/Script.java | 268 +++- .../script/ScriptParameterParser.java | 19 +- .../elasticsearch/script/ScriptService.java | 83 +- .../org/elasticsearch/script/Template.java | 198 +++ .../elasticsearch/search/SearchService.java | 35 +- .../ValuesSourceAggregationBuilder.java | 80 +- .../significant/SignificantTermsBuilder.java | 2 +- .../significant/heuristics/ChiSquare.java | 3 +- .../bucket/significant/heuristics/GND.java | 3 +- .../significant/heuristics/JLHScore.java | 3 +- .../heuristics/MutualInformation.java | 3 +- .../heuristics/PercentageScore.java | 3 +- .../heuristics/ScriptHeuristic.java | 160 ++- .../SignificanceHeuristicBuilder.java | 8 +- .../scripted/InternalScriptedMetric.java | 47 +- .../scripted/ScriptedMetricAggregator.java | 101 +- .../scripted/ScriptedMetricBuilder.java | 144 ++- .../scripted/ScriptedMetricParser.java | 89 +- .../metrics/tophits/TopHitsBuilder.java | 50 +- .../support/ValuesSourceParser.java | 37 +- .../search/builder/SearchSourceBuilder.java | 56 +- .../script/ScriptFieldsParseElement.java | 40 +- .../internal/ShardSearchLocalRequest.java | 39 +- .../search/internal/ShardSearchRequest.java | 9 +- .../internal/ShardSearchTransportRequest.java | 17 +- .../search/sort/ScriptSortBuilder.java | 88 +- .../search/sort/ScriptSortParser.java | 42 +- .../search/sort/SortBuilders.java | 16 + .../suggest/phrase/PhraseSuggestParser.java | 19 +- .../action/IndicesRequestTests.java | 28 +- .../action/bulk/BulkRequestTests.java | 16 +- .../action/update/UpdateRequestTests.java | 108 +- .../expression/ScriptComparisonBenchmark.java | 4 +- .../scripts/score/BasicScriptBenchmark.java | 16 +- ...TimeDataHistogramAggregationBenchmark.java | 11 +- .../cluster/NoMasterNodeTests.java | 25 +- .../function/ScriptScoreFunctionTests.java | 3 +- .../org/elasticsearch/document/BulkTests.java | 273 ++++ .../index/query/TemplateQueryBuilderTest.java | 21 +- .../index/query/TemplateQueryParserTest.java | 8 +- .../index/query/TemplateQueryTest.java | 406 +++++- .../nested/SimpleNestedTests.java | 144 +++ .../percolator/PercolatorTests.java | 18 +- .../routing/AliasRoutingTests.java | 4 +- .../script/CustomScriptContextTests.java | 23 +- .../script/GroovyScriptTests.java | 31 +- .../script/IndexLookupTests.java | 579 ++++++++- .../script/IndexedScriptTests.java | 62 +- .../script/NativeScriptTests.java | 7 +- .../script/OnDiskScriptTests.java | 33 + .../script/ScriptFieldTests.java | 32 + .../script/ScriptParameterParserTest.java | 46 +- .../script/ScriptServiceTests.java | 35 +- .../expression/ExpressionScriptTests.java | 11 +- .../search/aggregations/EquivalenceTests.java | 9 +- .../bucket/DateHistogramTests.java | 411 +++++- .../aggregations/bucket/DateRangeTests.java | 453 ++++++- .../aggregations/bucket/DoubleTermsTests.java | 538 ++++++-- .../aggregations/bucket/HistogramTests.java | 296 ++++- .../aggregations/bucket/IPv4RangeTests.java | 452 ++++++- .../aggregations/bucket/LongTermsTests.java | 276 +++- .../aggregations/bucket/MinDocCountTests.java | 3 +- .../aggregations/bucket/RangeTests.java | 491 ++++++- ...ignificantTermsSignificanceScoreTests.java | 151 ++- .../aggregations/bucket/StringTermsTests.java | 1141 ++++++++++------- .../aggregations/bucket/TopHitsTests.java | 132 +- .../metrics/ScriptedMetricTests.java | 741 ++++++++++- .../child/SimpleChildQuerySearchTests.java | 13 +- .../search/fields/SearchFieldsTests.java | 256 +++- .../functionscore/ExplainableScriptTests.java | 11 +- .../functionscore/FunctionScoreTests.java | 297 ++++- .../RandomScoreFunctionTests.java | 95 +- .../search/geo/GeoDistanceTests.java | 137 +- .../search/innerhits/InnerHitsTests.java | 246 +++- .../search/query/SearchQueryTests.java | 64 +- .../search/rescore/QueryRescorerTests.java | 54 +- .../scriptfilter/ScriptQuerySearchTests.java | 80 +- .../search/sort/SimpleSortTests.java | 487 ++++++- .../search/stats/SearchStatsTests.java | 15 +- .../search/timeout/SearchTimeoutTests.java | 5 +- .../update/UpdateByNativeScriptTests.java | 30 +- .../org/elasticsearch/update/UpdateTests.java | 826 +++++++++++- 130 files changed, 10901 insertions(+), 2201 deletions(-) create mode 100644 src/main/java/org/elasticsearch/script/AbstractScriptParser.java create mode 100644 src/main/java/org/elasticsearch/script/Template.java diff --git a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc index c2776b84797..e9c79ed59d8 100644 --- a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -30,10 +30,10 @@ MetricsAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") .initScript("_agg['heights'] = []") - .mapScript("if (doc['gender'].value == \"male\") " + + .mapScript(new Script("if (doc['gender'].value == \"male\") " + "{ _agg.heights.add(doc['height'].value) } " + "else " + - "{ _agg.heights.add(-1 * doc['height'].value) }"); + "{ _agg.heights.add(-1 * doc['height'].value) }")); -------------------------------------------------- You can also specify a `combine` script which will be executed on each shard: @@ -43,12 +43,12 @@ You can also specify a `combine` script which will be executed on each shard: MetricsAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript("_agg['heights'] = []") - .mapScript("if (doc['gender'].value == \"male\") " + + .initScript(new Script("_agg['heights'] = []")) + .mapScript(new Script("if (doc['gender'].value == \"male\") " + "{ _agg.heights.add(doc['height'].value) } " + "else " + - "{ _agg.heights.add(-1 * doc['height'].value) }") - .combineScript("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum"); + "{ _agg.heights.add(-1 * doc['height'].value) }")) + .combineScript(new Script("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum")); -------------------------------------------------- You can also specify a `reduce` script which will be executed on the node which gets the request: @@ -58,13 +58,13 @@ You can also specify a `reduce` script which will be executed on the node which MetricsAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript("_agg['heights'] = []") - .mapScript("if (doc['gender'].value == \"male\") " + + .initScript(new Script("_agg['heights'] = []")) + .mapScript(new Script("if (doc['gender'].value == \"male\") " + "{ _agg.heights.add(doc['height'].value) } " + "else " + - "{ _agg.heights.add(-1 * doc['height'].value) }") - .combineScript("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum") - .reduceScript("heights_sum = 0; for (a in _aggs) { heights_sum += a }; return heights_sum"); + "{ _agg.heights.add(-1 * doc['height'].value) }")) + .combineScript(new Script("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum")) + .reduceScript(new Script("heights_sum = 0; for (a in _aggs) { heights_sum += a }; return heights_sum")); -------------------------------------------------- diff --git a/docs/java-api/update.asciidoc b/docs/java-api/update.asciidoc index 2de835755c6..ea25ec0c2d2 100644 --- a/docs/java-api/update.asciidoc +++ b/docs/java-api/update.asciidoc @@ -22,7 +22,7 @@ Or you can use `prepareUpdate()` method: [source,java] -------------------------------------------------- client.prepareUpdate("ttl", "doc", "1") - .setScript("ctx._source.gender = \"male\"" <1> , ScriptService.ScriptType.INLINE) + .setScript(new Script("ctx._source.gender = \"male\"" <1> , ScriptService.ScriptType.INLINE, null, null)) .get(); client.prepareUpdate("ttl", "doc", "1") @@ -46,7 +46,7 @@ The update API allows to update a document based on a script provided: [source,java] -------------------------------------------------- UpdateRequest updateRequest = new UpdateRequest("ttl", "doc", "1") - .script("ctx._source.gender = \"male\""); + .script(new Script("ctx._source.gender = \"male\"")); client.update(updateRequest).get(); -------------------------------------------------- diff --git a/docs/reference/aggregations.asciidoc b/docs/reference/aggregations.asciidoc index 2464873b452..b1b34ee66df 100644 --- a/docs/reference/aggregations.asciidoc +++ b/docs/reference/aggregations.asciidoc @@ -73,8 +73,6 @@ Some aggregations work on values extracted from the aggregated documents. Typica a specific document field which is set using the `field` key for the aggregations. It is also possible to define a <> which will generate the values (per document). -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. - When both `field` and `script` settings are configured for the aggregation, the script will be treated as a `value script`. While normal scripts are evaluated on a document level (i.e. the script has access to all the data associated with the document), value scripts are evaluated on the *value* level. In this mode, the values are extracted diff --git a/docs/reference/aggregations/bucket/range-aggregation.asciidoc b/docs/reference/aggregations/bucket/range-aggregation.asciidoc index f7bfcab0644..d428d44523f 100644 --- a/docs/reference/aggregations/bucket/range-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/range-aggregation.asciidoc @@ -128,8 +128,6 @@ It is also possible to customize the key for each range: ==== Script -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. - [source,js] -------------------------------------------------- { @@ -148,6 +146,33 @@ TIP: The `script` parameter expects an inline script. Use `script_id` for indexe } -------------------------------------------------- +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "price_ranges" : { + "range" : { + "script" : { + "file": "my_script", + "params": { + "field": "price" + } + }, + "ranges" : [ + { "to" : 50 }, + { "from" : 50, "to" : 100 }, + { "from" : 100 } + ] + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. + ==== Value Script Lets say the product prices are in USD but we would like to get the price ranges in EURO. We can use value script to convert the prices prior the aggregation (assuming conversion rate of 0.8) diff --git a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc index 1e329db1df4..80c747e61a5 100644 --- a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc @@ -358,13 +358,6 @@ Customized scores can be implemented via a script: -------------------------------------------------- Scripts can be inline (as in above example), indexed or stored on disk. For details on the options, see <>. -Parameters need to be set as follows: - -[horizontal] -`script`:: Inline script, name of script file or name of indexed script. Mandatory. -`script_type`:: One of "inline" (default), "indexed" or "file". -`lang`:: Script language (default "groovy") -`params`:: Script parameters (default empty). Available parameters in the script are diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index cf401126c8a..70bdb00d184 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -441,7 +441,27 @@ Generating the terms using a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "genders" : { + "terms" : { + "script" : { + "file": "my_script", + "params": { + "field": "gender" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Value Script diff --git a/docs/reference/aggregations/metrics/avg-aggregation.asciidoc b/docs/reference/aggregations/metrics/avg-aggregation.asciidoc index 8e0d2b4b5e7..f81cd3eee33 100644 --- a/docs/reference/aggregations/metrics/avg-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/avg-aggregation.asciidoc @@ -47,7 +47,29 @@ Computing the average grade based on a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "avg_grade" : { + "avg" : { + "script" : { + "file": "my_script", + "params": { + "field": "grade" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ===== Value Script @@ -63,9 +85,11 @@ It turned out that the exam was way above the level of the students and a grade "avg_corrected_grade" : { "avg" : { "field" : "grade", - "script" : "_value * correction", - "params" : { - "correction" : 1.2 + "script" : { + "inline": "_value * correction", + "params" : { + "correction" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc b/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc index 8e34e16f7a8..0b484288b1c 100644 --- a/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc @@ -153,7 +153,28 @@ however since hashes need to be computed on the fly. } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "author_count" : { + "cardinality" : { + "script" : { + "file": "my_script", + "params": { + "first_name_field": "author.first_name", + "last_name_field": "author.last_name" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Missing value diff --git a/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc b/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc index 0f65b7670cf..30a5acf6809 100644 --- a/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc @@ -91,7 +91,29 @@ Computing the grades stats based on a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "grades_stats" : { + "extended_stats" : { + "script" : { + "file": "my_script", + "params": { + "field": "grade" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ===== Value Script @@ -107,9 +129,11 @@ It turned out that the exam was way above the level of the students and a grade "grades_stats" : { "extended_stats" : { "field" : "grade", - "script" : "_value * correction", - "params" : { - "correction" : 1.2 + "script" : { + "inline": "_value * correction", + "params" : { + "correction" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/max-aggregation.asciidoc b/docs/reference/aggregations/metrics/max-aggregation.asciidoc index 856adc4b03d..2a641fda5dc 100644 --- a/docs/reference/aggregations/metrics/max-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/max-aggregation.asciidoc @@ -44,7 +44,27 @@ Computing the max price value across all document, this time using a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "max_price" : { + "max" : { + "script" : { + "file": "my_script", + "params": { + "field": "price" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Value Script @@ -57,9 +77,11 @@ Let's say that the prices of the documents in our index are in USD, but we would "max_price_in_euros" : { "max" : { "field" : "price", - "script" : "_value * conversion_rate", - "params" : { - "conversion_rate" : 1.2 + "script" : { + "inline": "_value * conversion_rate", + "params" : { + "conversion_rate" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/min-aggregation.asciidoc b/docs/reference/aggregations/metrics/min-aggregation.asciidoc index c7424d5570b..7698a41202c 100644 --- a/docs/reference/aggregations/metrics/min-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/min-aggregation.asciidoc @@ -44,7 +44,27 @@ Computing the min price value across all document, this time using a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "min_price" : { + "min" : { + "script" : { + "file": "my_script", + "params": { + "field": "price" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Value Script @@ -57,9 +77,11 @@ Let's say that the prices of the documents in our index are in USD, but we would "min_price_in_euros" : { "min" : { "field" : "price", - "script" : "_value * conversion_rate", - "params" : { - "conversion_rate" : 1.2 + "script" : + "inline": "_value * conversion_rate", + "params" : { + "conversion_rate" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc index d5262beb6ef..ecad363886d 100644 --- a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc @@ -100,9 +100,11 @@ a script to convert them on-the-fly: "aggs" : { "load_time_outlier" : { "percentiles" : { - "script" : "doc['load_time'].value / timeUnit", <1> - "params" : { - "timeUnit" : 1000 <2> + "script" : { + "inline": "doc['load_time'].value / timeUnit", <1> + "params" : { + "timeUnit" : 1000 <2> + } } } } @@ -113,7 +115,27 @@ a script to convert them on-the-fly: script to generate values which percentiles are calculated on <2> Scripting supports parameterized input just like any other script -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "load_time_outlier" : { + "percentiles" : { + "script" : { + "file": "my_script", + "params" : { + "timeUnit" : 1000 + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. [[search-aggregations-metrics-percentile-aggregation-approximation]] ==== Percentiles are (usually) approximate diff --git a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc index a494a0a5d00..5da59061e0b 100644 --- a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc @@ -72,9 +72,11 @@ a script to convert them on-the-fly: "load_time_outlier" : { "percentile_ranks" : { "values" : [3, 5], - "script" : "doc['load_time'].value / timeUnit", <1> - "params" : { - "timeUnit" : 1000 <2> + "script" : { + "inline": "doc['load_time'].value / timeUnit", <1> + "params" : { + "timeUnit" : 1000 <2> + } } } } @@ -85,7 +87,28 @@ a script to convert them on-the-fly: script to generate values which percentile ranks are calculated on <2> Scripting supports parameterized input just like any other script -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "load_time_outlier" : { + "percentile_ranks" : { + "values" : [3, 5], + "script" : { + "file": "my_script", + "params" : { + "timeUnit" : 1000 + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Missing value @@ -108,3 +131,4 @@ had a value. -------------------------------------------------- <1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`. + diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index a775d545409..6db8c82a9e8 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -45,6 +45,42 @@ The response for the above aggregation: } -------------------------------------------------- +The above example can also be specified using file scripts as follows: + +[source,js] +-------------------------------------------------- +{ + "query" : { + "match_all" : {} + }, + "aggs": { + "profit": { + "scripted_metric": { + "init_script" : { + "file": "my_init_script" + }, + "map_script" : { + "file": "my_map_script" + }, + "combine_script" : { + "file": "my_combine_script" + }, + "params": { + "field": "amount" <1> + }, + "reduce_script" : { + "file": "my_reduce_script" + }, + } + } + } +} +-------------------------------------------------- + +<1> script parameters for init, map and combine scripts must be specified in a global `params` object so that it can be share between the scripts + +For more details on specifying scripts see <>. + ==== Scope of scripts The scripted metric aggregation uses scripts at 4 stages of its execution: @@ -225,13 +261,4 @@ params:: Optional. An object whose contents will be passed as variable -------------------------------------------------- reduce_params:: Optional. An object whose contents will be passed as variables to the `reduce_script`. This can be useful to allow the user to control the behavior of the reduce phase. If this is not specified the variable will be undefined in the reduce_script execution. -lang:: Optional. The script language used for the scripts. If this is not specified the default scripting language is used. -init_script_file:: Optional. Can be used in place of the `init_script` parameter to provide the script using in a file. -init_script_id:: Optional. Can be used in place of the `init_script` parameter to provide the script using an indexed script. -map_script_file:: Optional. Can be used in place of the `map_script` parameter to provide the script using in a file. -map_script_id:: Optional. Can be used in place of the `map_script` parameter to provide the script using an indexed script. -combine_script_file:: Optional. Can be used in place of the `combine_script` parameter to provide the script using in a file. -combine_script_id:: Optional. Can be used in place of the `combine_script` parameter to provide the script using an indexed script. -reduce_script_file:: Optional. Can be used in place of the `reduce_script` parameter to provide the script using in a file. -reduce_script_id:: Optional. Can be used in place of the `reduce_script` parameter to provide the script using an indexed script. diff --git a/docs/reference/aggregations/metrics/stats-aggregation.asciidoc b/docs/reference/aggregations/metrics/stats-aggregation.asciidoc index 429be4b8c4d..852c1c3f7a9 100644 --- a/docs/reference/aggregations/metrics/stats-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/stats-aggregation.asciidoc @@ -53,7 +53,29 @@ Computing the grades stats based on a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "grades_stats" : { + "stats" : { + "script" : { + "file": "my_script", + "params" : { + "field" : "grade" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ===== Value Script @@ -69,9 +91,11 @@ It turned out that the exam was way above the level of the students and a grade "grades_stats" : { "stats" : { "field" : "grade", - "script" : "_value * correction", - "params" : { - "correction" : 1.2 + "script" : + "inline": "_value * correction", + "params" : { + "correction" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc index 2d16129d15f..98286e9396f 100644 --- a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc @@ -55,7 +55,29 @@ Computing the intraday return based on a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "intraday_return" : { + "sum" : { + "script" : { + "file": "my_script", + "params" : { + "field" : "change" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ===== Value Script @@ -71,7 +93,8 @@ Computing the sum of squares over all stock tick changes: "daytime_return" : { "sum" : { "field" : "change", - "script" : "_value * _value" } + "script" : "_value * _value" + } } } } diff --git a/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc b/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc index ed5e23ee339..fa2bfdbbb9d 100644 --- a/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc @@ -48,4 +48,26 @@ Counting the values generated by a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "grades_count" : { + "value_count" : { + "script" : { + "file": "my_script", + "params" : { + "field" : "grade" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 2760a125ff8..aaf12079747 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -187,7 +187,7 @@ the options. Curl example with update actions: { "update" : {"_id" : "1", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } { "doc" : {"field" : "value"} } { "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } -{ "script" : "ctx._source.counter += param1", "lang" : "js", "params" : {"param1" : 1}, "upsert" : {"counter" : 1}} +{ "script" : { "inline": "ctx._source.counter += param1", "lang" : "js", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} { "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } { "doc" : {"field" : "value"}, "doc_as_upsert" : true } -------------------------------------------------- diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 4236e06754a..485b31cb037 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -28,9 +28,11 @@ Now, we can execute a script that would increment the counter: [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.counter += count", - "params" : { - "count" : 4 + "script" : { + "inline": "ctx._source.counter += count", + "params" : { + "count" : 4 + } } }' -------------------------------------------------- @@ -41,9 +43,11 @@ will still add it, since its a list): [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.tags += tag", - "params" : { - "tag" : "blue" + "script" : { + "inline": "ctx._source.tags += tag", + "params" : { + "tag" : "blue" + } } }' -------------------------------------------------- @@ -71,9 +75,11 @@ And, we can delete the doc if the tags contain blue, or ignore (noop): [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.tags.contains(tag) ? ctx.op = \"delete\" : ctx.op = \"none\"", - "params" : { - "tag" : "blue" + "script" : { + "inline": "ctx._source.tags.contains(tag) ? ctx.op = \"delete\" : ctx.op = \"none\"", + "params" : { + "tag" : "blue" + } } }' -------------------------------------------------- @@ -136,9 +142,11 @@ index the fresh doc: [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.counter += count", - "params" : { - "count" : 4 + "script" : { + "inline": "ctx._source.counter += count", + "params" : { + "count" : 4 + } }, "upsert" : { "counter" : 1 @@ -153,13 +161,15 @@ new `scripted_upsert` parameter with the value `true`. [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/sessions/session/dh3sgudg8gsrgl/_update' -d '{ - "script_id" : "my_web_session_summariser", "scripted_upsert":true, - "params" : { - "pageViewEvent" : { - "url":"foo.com/bar", - "response":404, - "time":"2014-01-01 12:32" + "script" : { + "id": "my_web_session_summariser", + "params" : { + "pageViewEvent" : { + "url":"foo.com/bar", + "response":404, + "time":"2014-01-01 12:32" + } } }, "upsert" : { diff --git a/docs/reference/mapping/transform.asciidoc b/docs/reference/mapping/transform.asciidoc index 5235afcfd96..9377336518a 100644 --- a/docs/reference/mapping/transform.asciidoc +++ b/docs/reference/mapping/transform.asciidoc @@ -10,11 +10,13 @@ field. Example: { "example" : { "transform" : { - "script" : "if (ctx._source['title']?.startsWith('t')) ctx._source['suggest'] = ctx._source['content']", - "params" : { - "variable" : "not used but an example anyway" - }, - "lang": "groovy" + "script" : { + "inline": "if (ctx._source['title']?.startsWith('t')) ctx._source['suggest'] = ctx._source['content']", + "params" : { + "variable" : "not used but an example anyway" + }, + "lang": "groovy" + } }, "properties": { "title": { "type": "string" }, diff --git a/docs/reference/modules/scripting.asciidoc b/docs/reference/modules/scripting.asciidoc index 750802c4ec2..0550542b4a2 100644 --- a/docs/reference/modules/scripting.asciidoc +++ b/docs/reference/modules/scripting.asciidoc @@ -29,7 +29,7 @@ GET /_search { "script_fields": { "my_field": { - "script": "1 + my_var", + "inline": "1 + my_var", "params": { "my_var": 2 } @@ -38,7 +38,7 @@ GET /_search } ----------------------------------- -Save the contents of the script as a file called `config/scripts/my_script.groovy` +Save the contents of the `inline` field as a file called `config/scripts/my_script.groovy` on every data node in the cluster: [source,js] @@ -54,7 +54,7 @@ GET /_search { "script_fields": { "my_field": { - "script_file": "my_script", + "file": "my_script", "params": { "my_var": 2 } @@ -67,9 +67,9 @@ GET /_search Additional `lang` plugins are provided to allow to execute scripts in -different languages. All places where a `script` parameter can be used, a `lang` parameter -(on the same level) can be provided to define the language of the -script. The following are the supported scripting languages: +different languages. All places where a script can be used, a `lang` parameter +can be provided to define the language of the script. The following are the +supported scripting languages: [cols="<,<,<",options="header",] |======================================================================= @@ -120,7 +120,7 @@ curl -XPOST localhost:9200/_search -d '{ { "script_score": { "lang": "groovy", - "script_file": "calculate-score", + "file": "calculate-score", "params": { "my_modifier": 8 } @@ -162,8 +162,8 @@ curl -XPOST localhost:9200/_scripts/groovy/indexedCalculateScore -d '{ This will create a document with id: `indexedCalculateScore` and type: `groovy` in the `.scripts` index. The type of the document is the language used by the script. -This script can be accessed at query time by appending `_id` to -the script parameter and passing the script id. So `script` becomes `script_id`.: +This script can be accessed at query time by using the `id` script parameter and passing +the script id: [source,js] -------------------------------------------------- @@ -178,7 +178,7 @@ curl -XPOST localhost:9200/_search -d '{ "functions": [ { "script_score": { - "script_id": "indexedCalculateScore", + "id": "indexedCalculateScore", "lang" : "groovy", "params": { "my_modifier": 8 diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 4588b4c7858..a5618a23c48 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -120,12 +120,14 @@ script, and provide parameters to it: [source,js] -------------------------------------------------- "script_score": { - "lang": "lang", - "params": { - "param1": value1, - "param2": value2 - }, - "script": "_score * doc['my_numeric_field'].value / pow(param1, param2)" + "script": { + "lang": "lang", + "params": { + "param1": value1, + "param2": value2 + }, + "inline": "_score * doc['my_numeric_field'].value / pow(param1, param2)" + } } -------------------------------------------------- diff --git a/docs/reference/query-dsl/script-query.asciidoc b/docs/reference/query-dsl/script-query.asciidoc index 899f176578e..4c307f2556f 100644 --- a/docs/reference/query-dsl/script-query.asciidoc +++ b/docs/reference/query-dsl/script-query.asciidoc @@ -34,9 +34,11 @@ to use the ability to pass parameters to the script itself, for example: }, "filter" : { "script" : { - "script" : "doc['num1'].value > param1" - "params" : { - "param1" : 5 + "script" : { + "inline" : "doc['num1'].value > param1" + "params" : { + "param1" : 5 + } } } } diff --git a/docs/reference/query-dsl/template-query.asciidoc b/docs/reference/query-dsl/template-query.asciidoc index 5d68992ff54..31728fe9993 100644 --- a/docs/reference/query-dsl/template-query.asciidoc +++ b/docs/reference/query-dsl/template-query.asciidoc @@ -12,7 +12,7 @@ GET /_search { "query": { "template": { - "query": { "match": { "text": "{{query_string}}" }}, + "inline": { "match": { "text": "{{query_string}}" }}, "params" : { "query_string" : "all about search" } @@ -45,7 +45,7 @@ GET /_search { "query": { "template": { - "query": "{ \"match\": { \"text\": \"{{query_string}}\" }}", <1> + "inline": "{ \"match\": { \"text\": \"{{query_string}}\" }}", <1> "params" : { "query_string" : "all about search" } diff --git a/docs/reference/search/request/script-fields.asciidoc b/docs/reference/search/request/script-fields.asciidoc index 46b169838a0..596aba31d82 100644 --- a/docs/reference/search/request/script-fields.asciidoc +++ b/docs/reference/search/request/script-fields.asciidoc @@ -15,9 +15,11 @@ evaluation>> (based on different fields) for each hit, for example: "script" : "doc['my_field_name'].value * 2" }, "test2" : { - "script" : "doc['my_field_name'].value * factor", - "params" : { - "factor" : 2.0 + "script" : { + "inline": "doc['my_field_name'].value * factor", + "params" : { + "factor" : 2.0 + } } } } diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 1e4218bb61d..58f42d8fdd8 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -318,10 +318,12 @@ Allow to sort based on custom scripts, here is an example: }, "sort" : { "_script" : { - "script" : "doc['field_name'].value * factor", "type" : "number", - "params" : { - "factor" : 1.1 + "script" : { + "inline": "doc['field_name'].value * factor", + "params" : { + "factor" : 1.1 + } }, "order" : "asc" } diff --git a/docs/reference/search/search-template.asciidoc b/docs/reference/search/search-template.asciidoc index bb33628ba3b..b92dbfaa795 100644 --- a/docs/reference/search/search-template.asciidoc +++ b/docs/reference/search/search-template.asciidoc @@ -8,7 +8,7 @@ before they are executed and fill existing templates with template parameters. ------------------------------------------ GET /_search/template { - "template" : { + "inline" : { "query": { "match" : { "{{my_field}}" : "{{my_value}}" } }, "size" : "{{my_size}}" }, @@ -40,7 +40,7 @@ disable scripts per language, source and operation as described in ------------------------------------------ GET /_search/template { - "template": { + "inline": { "query": { "match": { "title": "{{query_string}}" @@ -60,7 +60,7 @@ GET /_search/template ------------------------------------------ GET /_search/template { - "template": { + "inline": { "query": { "terms": { "status": [ @@ -97,7 +97,7 @@ A default value is written as `{{var}}{{^var}}default{{/var}}` for instance: [source,js] ------------------------------------------ { - "template": { + "inline": { "query": { "range": { "line_no": { @@ -212,7 +212,7 @@ via the REST API, should be written as a string: [source,json] -------------------- -"template": "{\"query\":{\"filtered\":{\"query\":{\"match\":{\"line\":\"{{text}}\"}},\"filter\":{{{#line_no}}\"range\":{\"line_no\":{{{#start}}\"gte\":\"{{start}}\"{{#end}},{{/end}}{{/start}}{{#end}}\"lte\":\"{{end}}\"{{/end}}}}{{/line_no}}}}}}" +"inline": "{\"query\":{\"filtered\":{\"query\":{\"match\":{\"line\":\"{{text}}\"}},\"filter\":{{{#line_no}}\"range\":{\"line_no\":{{{#start}}\"gte\":\"{{start}}\"{{#end}},{{/end}}{{/start}}{{#end}}\"lte\":\"{{end}}\"{{/end}}}}{{/line_no}}}}}}" -------------------- ================================== @@ -229,9 +229,7 @@ In order to execute the stored template, reference it by it's name under the `te ------------------------------------------ GET /_search/template { - "template": { - "file": "storedTemplate" <1> - }, + "file": "storedTemplate", <1> "params": { "query_string": "search for these words" } @@ -293,9 +291,7 @@ To use an indexed template at search time use: ------------------------------------------ GET /_search/template { - "template": { - "id": "templateName" <1> - }, + "id": "templateName", <1> "params": { "query_string": "search for these words" } diff --git a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 715c1d716d9..a562dc046b2 100644 --- a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -20,7 +20,12 @@ package org.elasticsearch.action.bulk; import com.google.common.collect.Lists; -import org.elasticsearch.action.*; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -140,7 +145,7 @@ public class BulkRequest extends ActionRequest implements Composite sizeInBytes += request.upsertRequest().source().length(); } if (request.script() != null) { - sizeInBytes += request.script().length() * 2; + sizeInBytes += request.script().getScript().length() * 2; } return this; } diff --git a/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 8e1da31affa..90ceee99f90 100644 --- a/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -35,11 +35,13 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.script.Template; +import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; import java.io.IOException; -import java.util.Collections; import java.util.Map; import static org.elasticsearch.search.Scroll.readScroll; @@ -69,9 +71,7 @@ public class SearchRequest extends ActionRequest implements Indic private String preference; private BytesReference templateSource; - private String templateName; - private ScriptService.ScriptType templateType; - private Map templateParams = Collections.emptyMap(); + private Template template; private BytesReference source; @@ -100,9 +100,7 @@ public class SearchRequest extends ActionRequest implements Indic this.routing = searchRequest.routing; this.preference = searchRequest.preference; this.templateSource = searchRequest.templateSource; - this.templateName = searchRequest.templateName; - this.templateType = searchRequest.templateType; - this.templateParams = searchRequest.templateParams; + this.template = searchRequest.template; this.source = searchRequest.source; this.extraSource = searchRequest.extraSource; this.queryCache = searchRequest.queryCache; @@ -390,42 +388,92 @@ public class SearchRequest extends ActionRequest implements Indic } /** - * The name of the stored template + * The stored template */ + public void template(Template template) { + this.template = template; + } + + /** + * The stored template + */ + public Template template() { + return template; + } + + /** + * The name of the stored template + * + * @deprecated use {@link #template(Template))} instead. + */ + @Deprecated public void templateName(String templateName) { - this.templateName = templateName; + updateOrCreateScript(templateName, null, null, null); } + /** + * The type of the stored template + * + * @deprecated use {@link #template(Template))} instead. + */ + @Deprecated public void templateType(ScriptService.ScriptType templateType) { - this.templateType = templateType; + updateOrCreateScript(null, templateType, null, null); } /** * Template parameters used for rendering + * + * @deprecated use {@link #template(Template))} instead. */ + @Deprecated public void templateParams(Map params) { - this.templateParams = params; + updateOrCreateScript(null, null, null, params); } /** * The name of the stored template + * + * @deprecated use {@link #template()} instead. */ + @Deprecated public String templateName() { - return templateName; + return template == null ? null : template.getScript(); } /** * The name of the stored template + * + * @deprecated use {@link #template()} instead. */ + @Deprecated public ScriptService.ScriptType templateType() { - return templateType; + return template == null ? null : template.getType(); } /** * Template parameters used for rendering + * + * @deprecated use {@link #template()} instead. */ + @Deprecated public Map templateParams() { - return templateParams; + return template == null ? null : template.getParams(); + } + + private void updateOrCreateScript(String templateContent, ScriptType type, String lang, Map params) { + Template template = template(); + if (template == null) { + template = new Template(templateContent == null ? "" : templateContent, type == null ? ScriptType.INLINE : type, lang, null, + params); + } else { + String newTemplateContent = templateContent == null ? template.getScript() : templateContent; + ScriptType newTemplateType = type == null ? template.getType() : type; + String newTemplateLang = lang == null ? template.getLang() : lang; + Map newTemplateParams = params == null ? template.getParams() : params; + template = new Template(newTemplateContent, newTemplateType, MustacheScriptEngineService.NAME, null, newTemplateParams); + } + template(template); } /** @@ -517,10 +565,8 @@ public class SearchRequest extends ActionRequest implements Indic indicesOptions = IndicesOptions.readIndicesOptions(in); templateSource = in.readBytesReference(); - templateName = in.readOptionalString(); - templateType = ScriptService.ScriptType.readFrom(in); if (in.readBoolean()) { - templateParams = (Map) in.readGenericValue(); + template = Template.readTemplate(in); } queryCache = in.readOptionalBoolean(); } @@ -550,12 +596,10 @@ public class SearchRequest extends ActionRequest implements Indic indicesOptions.writeIndicesOptions(out); out.writeBytesReference(templateSource); - out.writeOptionalString(templateName); - ScriptService.ScriptType.writeTo(templateType, out); - boolean existTemplateParams = templateParams != null; - out.writeBoolean(existTemplateParams); - if (existTemplateParams) { - out.writeGenericValue(templateParams); + boolean hasTemplate = template != null; + out.writeBoolean(hasTemplate); + if (hasTemplate) { + template.writeTo(out); } out.writeOptionalBoolean(queryCache); diff --git a/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index f7e84b0733a..690ad43b466 100644 --- a/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -29,7 +29,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -423,33 +425,60 @@ public class SearchRequestBuilder extends ActionRequestBuilder params) { sourceBuilder().scriptField(name, script, params); return this; } /** - * Adds a script based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. + * Adds a script based field to load and return. The field does not have to + * be stored, but its recommended to use non analyzed or numeric fields. * - * @param name The name that will represent this value in the return hit - * @param lang The language of the script - * @param script The script to use - * @param params Parameters that the script can use (can be null). + * @param name + * The name that will represent this value in the return hit + * @param lang + * The language of the script + * @param script + * The script to use + * @param params + * Parameters that the script can use (can be null). + * @deprecated Use {@link #addScriptField(String, Script)} instead. */ + @Deprecated public SearchRequestBuilder addScriptField(String name, String lang, String script, Map params) { sourceBuilder().scriptField(name, lang, script, params); return this; @@ -939,16 +968,33 @@ public class SearchRequestBuilder extends ActionRequestBuilder templateParams) { request.templateParams(templateParams); return this; diff --git a/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index ef8144fc095..173587bbbb7 100644 --- a/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.lookup.SourceLookup; @@ -94,7 +93,7 @@ public class UpdateHelper extends AbstractComponent { ctx.put("op", "create"); ctx.put("_source", upsertDoc); try { - ExecutableScript script = scriptService.executable(new Script(request.scriptLang, request.script, request.scriptType, request.scriptParams), ScriptContext.Standard.UPDATE); + ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE); script.setNextVar("ctx", ctx); script.run(); // we need to unwrap the ctx... @@ -111,7 +110,8 @@ public class UpdateHelper extends AbstractComponent { // (the default) or "none", meaning abort upsert if (!"create".equals(scriptOpChoice)) { if (!"none".equals(scriptOpChoice)) { - logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice, request.script); + logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice, + request.script.getScript()); } UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false); @@ -193,7 +193,7 @@ public class UpdateHelper extends AbstractComponent { ctx.put("_source", sourceAndContent.v2()); try { - ExecutableScript script = scriptService.executable(new Script(request.scriptLang, request.script, request.scriptType, request.scriptParams), ScriptContext.Standard.UPDATE); + ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE); script.setNextVar("ctx", ctx); script.run(); // we need to unwrap the ctx... @@ -246,7 +246,7 @@ public class UpdateHelper extends AbstractComponent { update.setGetResult(extractGetResult(request, indexShard.indexService().index().name(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType); } else { - logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script); + logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript()); UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false); return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType); } diff --git a/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index c3f03db7944..ac1b52a5493 100644 --- a/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -19,14 +19,12 @@ package org.elasticsearch.action.update; -import com.google.common.collect.Maps; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocumentRequest; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -37,11 +35,14 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptService.ScriptType; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -59,13 +60,7 @@ public class UpdateRequest extends InstanceShardOperationRequest private String parent; @Nullable - String script; - @Nullable - ScriptService.ScriptType scriptType; - @Nullable - String scriptLang; - @Nullable - Map scriptParams; + Script script; private String[] fields; @@ -205,105 +200,171 @@ public class UpdateRequest extends InstanceShardOperationRequest return this.shardId; } - public String script() { + public Script script() { return this.script; } - public ScriptService.ScriptType scriptType() { return this.scriptType; } + /** + * The script to execute. Note, make sure not to send different script each times and instead + * use script params if possible with the same (automatically compiled) script. + */ + public UpdateRequest script(Script script) { + this.script = script; + return this; + } + /** + * @deprecated Use {@link #script()} instead + */ + @Deprecated + public String scriptString() { + return this.script == null ? null : this.script.getScript(); + } + + /** + * @deprecated Use {@link #script()} instead + */ + @Deprecated + public ScriptService.ScriptType scriptType() { + return this.script == null ? null : this.script.getType(); + } + + /** + * @deprecated Use {@link #script()} instead + */ + @Deprecated public Map scriptParams() { - return this.scriptParams; + return this.script == null ? null : this.script.getParams(); } /** - * The script to execute. Note, make sure not to send different script each times and instead - * use script params if possible with the same (automatically compiled) script. + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest script(String script, ScriptService.ScriptType scriptType) { - this.script = script; - this.scriptType = scriptType; + updateOrCreateScript(script, scriptType, null, null); return this; } /** - * The script to execute. Note, make sure not to send different script each times and instead - * use script params if possible with the same (automatically compiled) script. + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest script(String script) { - this.script = script; - this.scriptType = ScriptService.ScriptType.INLINE; + updateOrCreateScript(script, ScriptType.INLINE, null, null); return this; } - /** * The language of the script to execute. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest scriptLang(String scriptLang) { - this.scriptLang = scriptLang; + updateOrCreateScript(null, null, scriptLang, null); return this; } + /** + * @deprecated Use {@link #script()} instead + */ + @Deprecated public String scriptLang() { - return scriptLang; + return script == null ? null : script.getLang(); } /** * Add a script parameter. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest addScriptParam(String name, Object value) { - if (scriptParams == null) { - scriptParams = Maps.newHashMap(); + Script script = script(); + if (script == null) { + HashMap scriptParams = new HashMap(); + scriptParams.put(name, value); + updateOrCreateScript(null, null, null, scriptParams); + } else { + Map scriptParams = script.getParams(); + if (scriptParams == null) { + scriptParams = new HashMap(); + scriptParams.put(name, value); + updateOrCreateScript(null, null, null, scriptParams); + } else { + scriptParams.put(name, value); + } } - scriptParams.put(name, value); return this; } /** * Sets the script parameters to use with the script. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest scriptParams(Map scriptParams) { - if (this.scriptParams == null) { - this.scriptParams = scriptParams; - } else { - this.scriptParams.putAll(scriptParams); - } + updateOrCreateScript(null, null, null, scriptParams); return this; } + private void updateOrCreateScript(String scriptContent, ScriptType type, String lang, Map params) { + Script script = script(); + if (script == null) { + script = new Script(scriptContent == null ? "" : scriptContent, type == null ? ScriptType.INLINE : type, lang, params); + } else { + String newScriptContent = scriptContent == null ? script.getScript() : scriptContent; + ScriptType newScriptType = type == null ? script.getType() : type; + String newScriptLang = lang == null ? script.getLang() : lang; + Map newScriptParams = params == null ? script.getParams() : params; + script = new Script(newScriptContent, newScriptType, newScriptLang, newScriptParams); + } + script(script); + } + /** - * The script to execute. Note, make sure not to send different script each times and instead - * use script params if possible with the same (automatically compiled) script. + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest script(String script, ScriptService.ScriptType scriptType, @Nullable Map scriptParams) { - this.script = script; - this.scriptType = scriptType; - if (this.scriptParams != null) { - this.scriptParams.putAll(scriptParams); - } else { - this.scriptParams = scriptParams; - } + this.script = new Script(script, scriptType, null, scriptParams); return this; } /** - * The script to execute. Note, make sure not to send different script each times and instead - * use script params if possible with the same (automatically compiled) script. + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. * - * @param script The script to execute - * @param scriptLang The script language - * @param scriptType The script type - * @param scriptParams The script parameters + * @param script + * The script to execute + * @param scriptLang + * The script language + * @param scriptType + * The script type + * @param scriptParams + * The script parameters + * + * @deprecated Use {@link #script(Script)} instead */ - public UpdateRequest script(String script, @Nullable String scriptLang, ScriptService.ScriptType scriptType, @Nullable Map scriptParams) { - this.script = script; - this.scriptLang = scriptLang; - this.scriptType = scriptType; - if (this.scriptParams != null) { - this.scriptParams.putAll(scriptParams); - } else { - this.scriptParams = scriptParams; - } + @Deprecated + public UpdateRequest script(String script, @Nullable String scriptLang, ScriptService.ScriptType scriptType, + @Nullable Map scriptParams) { + this.script = new Script(script, scriptType, scriptLang, scriptParams); return this; } @@ -574,6 +635,7 @@ public class UpdateRequest extends InstanceShardOperationRequest public UpdateRequest source(BytesReference source) throws Exception { ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); + Map scriptParams = null; XContentType xContentType = XContentFactory.xContentType(source); try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) { XContentParser.Token token = parser.nextToken(); @@ -584,6 +646,8 @@ public class UpdateRequest extends InstanceShardOperationRequest while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); + } else if ("script".equals(currentFieldName) && token == XContentParser.Token.START_OBJECT) { + script = Script.parse(parser); } else if ("params".equals(currentFieldName)) { scriptParams = parser.map(); } else if ("scripted_upsert".equals(currentFieldName)) { @@ -604,12 +668,13 @@ public class UpdateRequest extends InstanceShardOperationRequest scriptParameterParser.token(currentFieldName, token, parser); } } - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - script = scriptValue.script(); - scriptType = scriptValue.scriptType(); + // Don't have a script using the new API so see if it is specified with the old API + if (script == null) { + ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); + if (scriptValue != null) { + script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), scriptParams); + } } - scriptLang = scriptParameterParser.lang(); } return this; } @@ -639,12 +704,9 @@ public class UpdateRequest extends InstanceShardOperationRequest id = in.readString(); routing = in.readOptionalString(); parent = in.readOptionalString(); - script = in.readOptionalString(); - if(Strings.hasLength(script)) { - scriptType = ScriptService.ScriptType.readFrom(in); + if (in.readBoolean()) { + script = Script.readScript(in); } - scriptLang = in.readOptionalString(); - scriptParams = in.readMap(); retryOnConflict = in.readVInt(); refresh = in.readBoolean(); if (in.readBoolean()) { @@ -677,12 +739,11 @@ public class UpdateRequest extends InstanceShardOperationRequest out.writeString(id); out.writeOptionalString(routing); out.writeOptionalString(parent); - out.writeOptionalString(script); - if (Strings.hasLength(script)) { - ScriptService.ScriptType.writeTo(scriptType, out); + boolean hasScript = script != null; + out.writeBoolean(hasScript); + if (hasScript) { + script.writeTo(out); } - out.writeOptionalString(scriptLang); - out.writeMap(scriptParams); out.writeVInt(retryOnConflict); out.writeBoolean(refresh); if (doc == null) { diff --git a/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index 3bcb9c640df..7c30c47dd7c 100644 --- a/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import java.util.Map; @@ -80,21 +81,43 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilderctx, which is bound to the entry, * e.g. ctx._source.mycounter += 1. * + */ + public UpdateRequestBuilder setScript(Script script) { + request.script(script); + return this; + } + + /** + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. + *

    + * The script works with the variable ctx, which is bound to + * the entry, e.g. ctx._source.mycounter += 1. + * * @see #setScriptLang(String) * @see #setScriptParams(Map) + * + * @deprecated use {@link #setScript(Script)} instead */ + @Deprecated public UpdateRequestBuilder setScript(String script, ScriptService.ScriptType scriptType) { request.script(script, scriptType); return this; } /** - * The language of the script to execute. - * Valid options are: mvel, js, groovy, python, and native (Java)
    + * The language of the script to execute. Valid options are: mvel, js, + * groovy, python, and native (Java)
    * Default: groovy *

    - * Ref: http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-scripting.html + * Ref: + * http://www.elasticsearch.org/guide/en/elasticsearch/reference/current + * /modules-scripting.html + * + * @deprecated use {@link #setScript(Script)} instead */ + @Deprecated public UpdateRequestBuilder setScriptLang(String scriptLang) { request.scriptLang(scriptLang); return this; @@ -102,7 +125,10 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder scriptParams) { request.scriptParams(scriptParams); return this; @@ -110,7 +136,10 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder params; + private final Script sScript; private final SearchScript script; - public ScriptScoreFunction(String sScript, Map params, SearchScript script) { + public ScriptScoreFunction(Script sScript, SearchScript script) { super(CombineFunction.REPLACE); this.sScript = sScript; - this.params = params; this.script = script; } @@ -114,8 +111,8 @@ public class ScriptScoreFunction extends ScoreFunction { } else { double score = score(docId, subQueryScore.getValue()); String explanation = "script score function, computed with script:\"" + sScript; - if (params != null) { - explanation += "\" and parameters: \n" + params.toString(); + if (sScript.getParams() != null) { + explanation += "\" and parameters: \n" + sScript.getParams().toString(); } Explanation scoreExp = Explanation.match( subQueryScore.getValue(), "_score: ", @@ -131,7 +128,7 @@ public class ScriptScoreFunction extends ScoreFunction { @Override public String toString() { - return "script[" + sScript + "], params [" + params + "]"; + return "script" + sScript.toString(); } } \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentType.java b/src/main/java/org/elasticsearch/common/xcontent/XContentType.java index 4acee241603..329bad87265 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentType.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentType.java @@ -19,11 +19,15 @@ package org.elasticsearch.common.xcontent; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.cbor.CborXContent; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.smile.SmileXContent; import org.elasticsearch.common.xcontent.yaml.YamlXContent; +import java.io.IOException; + /** * The content type of {@link org.elasticsearch.common.xcontent.XContent}. */ @@ -144,4 +148,18 @@ public enum XContentType { public abstract String shortName(); public abstract XContent xContent(); + + public static XContentType readFrom(StreamInput in) throws IOException { + int index = in.readVInt(); + for (XContentType contentType : values()) { + if (index == contentType.index) { + return contentType; + } + } + throw new IllegalStateException("Unknown XContentType with index [" + index + "]"); + } + + public static void writeTo(XContentType contentType, StreamOutput out) throws IOException { + out.writeVInt(contentType.index); + } } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 7f8bb8ffa0a..b306396700a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -134,8 +134,18 @@ public class DocumentMapper implements ToXContent { return this; } - public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map parameters) { - sourceTransforms.add(new ScriptTransform(scriptService, script, scriptType, language, parameters)); + public Builder transform(ScriptService scriptService, Script script) { + sourceTransforms.add(new ScriptTransform(scriptService, script)); + return this; + } + + /** + * @deprecated Use {@link #transform(ScriptService, Script)} instead. + */ + @Deprecated + public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, + Map parameters) { + sourceTransforms.add(new ScriptTransform(scriptService, new Script(script, scriptType, language, parameters))); return this; } @@ -388,20 +398,20 @@ public class DocumentMapper implements ToXContent { private void addFieldMappers(Collection fieldMappers) { assert mappingLock.isWriteLockedByCurrentThread(); - this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers); + this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers); mapperService.addFieldMappers(fieldMappers); } private void addObjectMappers(Collection objectMappers) { assert mappingLock.isWriteLockedByCurrentThread(); - MapBuilder builder = MapBuilder.newMapBuilder(this.objectMappers); - for (ObjectMapper objectMapper : objectMappers) { - builder.put(objectMapper.fullPath(), objectMapper); - if (objectMapper.nested().isNested()) { - hasNestedObjects = true; + MapBuilder builder = MapBuilder.newMapBuilder(this.objectMappers); + for (ObjectMapper objectMapper : objectMappers) { + builder.put(objectMapper.fullPath(), objectMapper); + if (objectMapper.nested().isNested()) { + hasNestedObjects = true; + } } - } - this.objectMappers = builder.immutableMap(); + this.objectMappers = builder.immutableMap(); mapperService.addObjectMappers(objectMappers); } @@ -454,15 +464,15 @@ public class DocumentMapper implements ToXContent { public MergeResult merge(Mapping mapping, boolean simulate) { try (ReleasableLock lock = mappingWriteLock.acquire()) { - final MergeResult mergeResult = newMergeContext(simulate); - this.mapping.merge(mapping, mergeResult); - if (simulate == false) { - addFieldMappers(mergeResult.getNewFieldMappers()); - addObjectMappers(mergeResult.getNewObjectMappers()); - refreshSource(); - } - return mergeResult; + final MergeResult mergeResult = newMergeContext(simulate); + this.mapping.merge(mapping, mergeResult); + if (simulate == false) { + addFieldMappers(mergeResult.getNewFieldMappers()); + addObjectMappers(mergeResult.getNewObjectMappers()); + refreshSource(); } + return mergeResult; + } } private void refreshSource() throws ElasticsearchGenerationException { @@ -498,28 +508,13 @@ public class DocumentMapper implements ToXContent { private static class ScriptTransform implements SourceTransform { private final ScriptService scriptService; /** - * Contents of the script to transform the source document before indexing. + * The script to transform the source document before indexing. */ - private final String script; - /** - * The type of the script to run. - */ - private final ScriptType scriptType; - /** - * Language of the script to transform the source document before indexing. - */ - private final String language; - /** - * Parameters passed to the transform script. - */ - private final Map parameters; + private final Script script; - public ScriptTransform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map parameters) { + public ScriptTransform(ScriptService scriptService, Script script) { this.scriptService = scriptService; this.script = script; - this.scriptType = scriptType; - this.language = language; - this.parameters = parameters; } @Override @@ -527,7 +522,7 @@ public class DocumentMapper implements ToXContent { public Map transformSourceAsMap(Map sourceAsMap) { try { // We use the ctx variable and the _source name to be consistent with the update api. - ExecutableScript executable = scriptService.executable(new Script(language, script, scriptType, parameters), ScriptContext.Standard.MAPPING); + ExecutableScript executable = scriptService.executable(script, ScriptContext.Standard.MAPPING); Map ctx = new HashMap<>(1); ctx.put("_source", sourceAsMap); executable.setNextVar("ctx", ctx); @@ -541,16 +536,7 @@ public class DocumentMapper implements ToXContent { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("script", script); - if (language != null) { - builder.field("lang", language); - } - if (parameters != null) { - builder.field("params", parameters); - } - builder.endObject(); - return builder; + return script.toXContent(builder, params); } } } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index 4bb9e8d830e..d5a3ff1f9ad 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -71,10 +71,8 @@ import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.similarity.SimilarityLookupService; -import org.elasticsearch.script.ScriptParameterParser; -import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.ScriptService.ScriptType; import java.util.Iterator; import java.util.List; @@ -238,7 +236,6 @@ public class DocumentMapperParser extends AbstractIndexComponent { Object fieldNode = entry.getValue(); if ("transform".equals(fieldName)) { - iterator.remove(); if (fieldNode instanceof Map) { parseTransform(docBuilder, (Map) fieldNode, parserContext.indexVersionCreated()); } else if (fieldNode instanceof List) { @@ -251,6 +248,7 @@ public class DocumentMapperParser extends AbstractIndexComponent { } else { throw new MapperParsingException("Transform must be an object or an array but was: " + fieldNode); } + iterator.remove(); } else { Mapper.TypeParser typeParser = rootTypeParsers.get(fieldName); if (typeParser != null) { @@ -296,23 +294,10 @@ public class DocumentMapperParser extends AbstractIndexComponent { return remainingFields.toString(); } - @SuppressWarnings("unchecked") private void parseTransform(DocumentMapper.Builder docBuilder, Map transformConfig, Version indexVersionCreated) { - ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); - scriptParameterParser.parseConfig(transformConfig, true); - - String script = null; - ScriptType scriptType = null; - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - script = scriptValue.script(); - scriptType = scriptValue.scriptType(); - } - + Script script = Script.parse(transformConfig, true); if (script != null) { - String scriptLang = scriptParameterParser.lang(); - Map params = (Map)transformConfig.remove("params"); - docBuilder.transform(scriptService, script, scriptType, scriptLang, params); + docBuilder.transform(scriptService, script); } checkNoRemainingFields(transformConfig, indexVersionCreated, "Transform config has unsupported parameters: "); } diff --git a/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index fe2e572522b..c9070a24c37 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -27,7 +27,9 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.Template; import java.util.Collection; import java.util.Map; @@ -562,6 +564,13 @@ public abstract class QueryBuilders { return new GeoShapeQueryBuilder(name, shape); } + /** + * Facilitates creating template query requests using an inline script + */ + public static TemplateQueryBuilder templateQuery(Template template) { + return new TemplateQueryBuilder(template); + } + /** * Facilitates creating template query requests using an inline script */ @@ -596,6 +605,18 @@ public abstract class QueryBuilders { * * @param script The script to filter by. */ + public static ScriptQueryBuilder scriptQuery(Script script) { + return new ScriptQueryBuilder(script); + } + + /** + * A builder for filter based on a script. + * + * @param script + * The script to filter by. + * @deprecated Use {@link #scriptQuery(Script)} instead. + */ + @Deprecated public static ScriptQueryBuilder scriptQuery(String script) { return new ScriptQueryBuilder(script); } diff --git a/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index 9ae05159953..8a6f72190c2 100644 --- a/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -20,34 +20,56 @@ package org.elasticsearch.index.query; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; import java.io.IOException; +import java.util.HashMap; import java.util.Map; -import static com.google.common.collect.Maps.newHashMap; - public class ScriptQueryBuilder extends QueryBuilder { - private final String script; + private Script script; + @Deprecated + private String scriptString; + + @Deprecated private Map params; + @Deprecated private String lang; private String queryName; - public ScriptQueryBuilder(String script) { + public ScriptQueryBuilder(Script script) { this.script = script; } + /** + * @deprecated Use {@link #ScriptQueryBuilder(Script)} instead. + */ + @Deprecated + public ScriptQueryBuilder(String script) { + this.scriptString = script; + } + + /** + * @deprecated Use {@link #ScriptQueryBuilder(Script)} instead. + */ + @Deprecated public ScriptQueryBuilder addParam(String name, Object value) { if (params == null) { - params = newHashMap(); + params = new HashMap<>(); } params.put(name, value); return this; } + /** + * @deprecated Use {@link #ScriptQueryBuilder(Script)} instead. + */ + @Deprecated public ScriptQueryBuilder params(Map params) { if (this.params == null) { this.params = params; @@ -59,7 +81,10 @@ public class ScriptQueryBuilder extends QueryBuilder { /** * Sets the script language. + * + * @deprecated Use {@link #ScriptQueryBuilder(Script)} instead. */ + @Deprecated public ScriptQueryBuilder lang(String lang) { this.lang = lang; return this; @@ -74,15 +99,23 @@ public class ScriptQueryBuilder extends QueryBuilder { } @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { + protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException { + builder.startObject(ScriptQueryParser.NAME); - builder.field("script", script); - if (this.params != null) { - builder.field("params", this.params); - } - if (this.lang != null) { - builder.field("lang", lang); + if (script != null) { + builder.field(ScriptField.SCRIPT.getPreferredName(), script); + } else { + if (this.scriptString != null) { + builder.field("script", scriptString); + } + if (this.params != null) { + builder.field("params", this.params); + } + if (this.lang != null) { + builder.field("lang", lang); + } } + if (queryName != null) { builder.field("_name", queryName); } diff --git a/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java b/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java index 84dad2b5d92..9912cb94bac 100644 --- a/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.query; +import com.google.common.base.Objects; + import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -29,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; @@ -38,7 +41,6 @@ import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.util.Map; -import java.util.Objects; import static com.google.common.collect.Maps.newHashMap; @@ -55,7 +57,7 @@ public class ScriptQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[] { NAME }; } @Override @@ -66,13 +68,11 @@ public class ScriptQueryParser implements QueryParser { XContentParser.Token token; // also, when caching, since its isCacheable is false, will result in loading all bit set... - String script = null; - String scriptLang; + Script script = null; Map params = null; String queryName = null; String currentFieldName = null; - ScriptService.ScriptType scriptType = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -80,7 +80,9 @@ public class ScriptQueryParser implements QueryParser { } else if (parseContext.isDeprecatedSetting(currentFieldName)) { // skip } else if (token == XContentParser.Token.START_OBJECT) { - if ("params".equals(currentFieldName)) { + if (ScriptField.SCRIPT.match(currentFieldName)) { + script = Script.parse(parser); + } else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs) params = parser.map(); } else { throw new QueryParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]"); @@ -88,27 +90,29 @@ public class ScriptQueryParser implements QueryParser { } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); - } else if (!scriptParameterParser.token(currentFieldName, token, parser)){ + } else if (!scriptParameterParser.token(currentFieldName, token, parser)) { throw new QueryParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]"); } } } - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - script = scriptValue.script(); - scriptType = scriptValue.scriptType(); + if (script == null) { // Didn't find anything using the new API so try using the old one instead + ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); + if (scriptValue != null) { + if (params == null) { + params = newHashMap(); + } + script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params); + } + } else if (params != null) { + throw new QueryParsingException(parseContext, "script params must be specified inside script object in a [script] filter"); } - scriptLang = scriptParameterParser.lang(); if (script == null) { throw new QueryParsingException(parseContext, "script must be provided with a [script] filter"); } - if (params == null) { - params = newHashMap(); - } - Query query = new ScriptQuery(scriptLang, script, scriptType, params, parseContext.scriptService(), parseContext.lookup()); + Query query = new ScriptQuery(script, parseContext.scriptService(), parseContext.lookup()); if (queryName != null) { parseContext.addNamedQuery(queryName, query); } @@ -117,14 +121,13 @@ public class ScriptQueryParser implements QueryParser { static class ScriptQuery extends Query { - private final String script; - private final Map params; + private final Script script; + private final SearchScript searchScript; - private ScriptQuery(String scriptLang, String script, ScriptService.ScriptType scriptType, Map params, ScriptService scriptService, SearchLookup searchLookup) { + public ScriptQuery(Script script, ScriptService scriptService, SearchLookup searchLookup) { this.script = script; - this.params = params; - this.searchScript = scriptService.search(searchLookup, new Script(scriptLang, script, scriptType, newHashMap(params)), ScriptContext.Standard.SEARCH); + this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH); } @Override @@ -137,23 +140,20 @@ public class ScriptQueryParser implements QueryParser { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (super.equals(o) == false) return false; - - ScriptQuery that = (ScriptQuery) o; - - if (params != null ? !params.equals(that.params) : that.params != null) return false; - if (script != null ? !script.equals(that.script) : that.script != null) return false; - - return true; + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + ScriptQuery other = (ScriptQuery) obj; + return Objects.equal(script, other.script); } @Override public int hashCode() { + final int prime = 31; int result = super.hashCode(); - result = 31 * result + Objects.hashCode(script); - result = 31 * result + Objects.hashCode(params); + result = prime * result + Objects.hashCode(script); return result; } diff --git a/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java b/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java index 7154fe93fc0..852977fa0db 100644 --- a/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.Template; import java.io.IOException; import java.util.Map; @@ -29,51 +30,58 @@ import java.util.Map; * */ public class TemplateQueryBuilder extends QueryBuilder { + /** Template to fill. */ + private Template template; /** Parameters to fill the template with. */ private Map vars; /** Template to fill.*/ - private String template; + private String templateString; private ScriptService.ScriptType templateType; /** - * @param template the template to use for that query. - * @param vars the parameters to fill the template with. + * @param template + * the template to use for that query. * */ + public TemplateQueryBuilder(Template template) { + this.template = template; + } + + /** + * @param template + * the template to use for that query. + * @param vars + * the parameters to fill the template with. + * @deprecated Use {@link #TemplateQueryBuilder(Template)} instead. + * */ + @Deprecated public TemplateQueryBuilder(String template, Map vars) { this(template, ScriptService.ScriptType.INLINE, vars); } /** - * @param template the template to use for that query. - * @param vars the parameters to fill the template with. - * @param templateType what kind of template (INLINE,FILE,ID) + * @param template + * the template to use for that query. + * @param vars + * the parameters to fill the template with. + * @param templateType + * what kind of template (INLINE,FILE,ID) + * @deprecated Use {@link #TemplateQueryBuilder(Template)} instead. * */ + @Deprecated public TemplateQueryBuilder(String template, ScriptService.ScriptType templateType, Map vars) { - this.template = template; - this.vars =vars; + this.templateString = template; + this.vars = vars; this.templateType = templateType; } @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(TemplateQueryParser.NAME); - String fieldname; - switch(templateType){ - case FILE: - fieldname = "file"; - break; - case INDEXED: - fieldname = "id"; - break; - case INLINE: - fieldname = TemplateQueryParser.QUERY; - break; - default: - throw new IllegalArgumentException("Unknown template type " + templateType); + protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException { + builder.field(TemplateQueryParser.NAME); + if (template == null) { + new Template(templateString, templateType, null, null, this.vars).toXContent(builder, builderParams); + } else { + template.toXContent(builder, builderParams); } - builder.field(fieldname, template); - builder.field(TemplateQueryParser.PARAMS, vars); - builder.endObject(); } } diff --git a/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java b/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java index 32872f8f7a0..040df24ec9f 100644 --- a/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java @@ -22,22 +22,20 @@ import org.apache.lucene.search.Query; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.script.Template; import java.io.IOException; import java.util.HashMap; import java.util.Map; /** - * In the simplest case, parse template string and variables from the request, compile the template and - * execute the template against the given variables. + * In the simplest case, parse template string and variables from the request, + * compile the template and execute the template against the given variables. * */ public class TemplateQueryParser implements QueryParser { @@ -45,12 +43,10 @@ public class TemplateQueryParser implements QueryParser { public static final String NAME = "template"; /** Name of query parameter containing the template string. */ public static final String QUERY = "query"; - /** Name of query parameter containing the template parameters. */ - public static final String PARAMS = "params"; private final ScriptService scriptService; - private final static Map parametersToTypes = new HashMap<>(); + private final static Map parametersToTypes = new HashMap<>(); static { parametersToTypes.put("query", ScriptService.ScriptType.INLINE); parametersToTypes.put("file", ScriptService.ScriptType.FILE); @@ -64,21 +60,23 @@ public class TemplateQueryParser implements QueryParser { @Override public String[] names() { - return new String[] {NAME}; + return new String[] { NAME }; } /** - * Parses the template query replacing template parameters with provided values. - * Handles both submitting the template as part of the request as well as - * referencing only the template name. - * @param parseContext parse context containing the templated query. + * Parses the template query replacing template parameters with provided + * values. Handles both submitting the template as part of the request as + * well as referencing only the template name. + * + * @param parseContext + * parse context containing the templated query. */ @Override @Nullable public Query parse(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - TemplateContext templateContext = parse(parser, PARAMS, parametersToTypes); - ExecutableScript executable = this.scriptService.executable(new Script(MustacheScriptEngineService.NAME, templateContext.template(), templateContext.scriptType(), templateContext.params()), ScriptContext.Standard.SEARCH); + Template template = parse(parser); + ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH); BytesReference querySource = (BytesReference) executable.run(); @@ -89,72 +87,20 @@ public class TemplateQueryParser implements QueryParser { } } - public static TemplateContext parse(XContentParser parser, String paramsFieldname, String ... parameters) throws IOException { + public static Template parse(XContentParser parser, String... parameters) throws IOException { - Map parameterMap = new HashMap<>(parametersToTypes); + Map parameterMap = new HashMap<>(parametersToTypes); for (String parameter : parameters) { parameterMap.put(parameter, ScriptService.ScriptType.INLINE); } - return parse(parser,paramsFieldname,parameterMap); + return parse(parser, parameterMap); } - public static TemplateContext parse(XContentParser parser, String paramsFieldname) throws IOException { - return parse(parser,paramsFieldname,parametersToTypes); + public static Template parse(XContentParser parser) throws IOException { + return parse(parser, parametersToTypes); } - public static TemplateContext parse(XContentParser parser, String paramsFieldname, Map parameterMap) throws IOException { - Map params = null; - String templateNameOrTemplateContent = null; - - String currentFieldName = null; - XContentParser.Token token; - ScriptService.ScriptType type = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (parameterMap.containsKey(currentFieldName)) { - type = parameterMap.get(currentFieldName); - if (token == XContentParser.Token.START_OBJECT) { - XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent()); - builder.copyCurrentStructure(parser); - templateNameOrTemplateContent = builder.string(); - } else { - templateNameOrTemplateContent = parser.text(); - } - } else if (paramsFieldname.equals(currentFieldName)) { - params = parser.map(); - } - } - - return new TemplateContext(type, templateNameOrTemplateContent, params); - } - - public static class TemplateContext { - private Map params; - private String template; - private ScriptService.ScriptType type; - - public TemplateContext(ScriptService.ScriptType type, String template, Map params) { - this.params = params; - this.template = template; - this.type = type; - } - - public Map params() { - return params; - } - - public String template() { - return template; - } - - public ScriptService.ScriptType scriptType(){ - return type; - } - - @Override - public String toString(){ - return type + " " + template; - } + public static Template parse(XContentParser parser, Map parameterMap) throws IOException { + return Template.parse(parser, parameterMap); } } diff --git a/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java b/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java index 8ae38b5008f..ef9865395b3 100644 --- a/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java +++ b/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionBuilde import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; +import org.elasticsearch.script.Script; import java.util.Map; @@ -56,18 +57,38 @@ public class ScoreFunctionBuilders { return new LinearDecayFunctionBuilder(fieldName, null, scale); } + public static ScriptScoreFunctionBuilder scriptFunction(Script script) { + return (new ScriptScoreFunctionBuilder()).script(script); + } + + /** + * @deprecated Use {@link #scriptFunction(Script)} instead. + */ + @Deprecated public static ScriptScoreFunctionBuilder scriptFunction(String script) { return (new ScriptScoreFunctionBuilder()).script(script); } + /** + * @deprecated Use {@link #scriptFunction(Script)} instead. + */ + @Deprecated public static ScriptScoreFunctionBuilder scriptFunction(String script, String lang) { return (new ScriptScoreFunctionBuilder()).script(script).lang(lang); } + /** + * @deprecated Use {@link #scriptFunction(Script)} instead. + */ + @Deprecated public static ScriptScoreFunctionBuilder scriptFunction(String script, String lang, Map params) { return (new ScriptScoreFunctionBuilder()).script(script).lang(lang).params(params); } + /** + * @deprecated Use {@link #scriptFunction(Script)} instead. + */ + @Deprecated public static ScriptScoreFunctionBuilder scriptFunction(String script, Map params) { return (new ScriptScoreFunctionBuilder()).script(script).params(params); } diff --git a/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java b/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java index 3f715512bab..20dca88788a 100644 --- a/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java @@ -19,12 +19,13 @@ package org.elasticsearch.index.query.functionscore.script; -import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; - -import com.google.common.collect.Maps; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; import java.io.IOException; +import java.util.HashMap; import java.util.Map; /** @@ -33,7 +34,9 @@ import java.util.Map; */ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { - private String script; + private Script script; + + private String scriptString; private String lang; @@ -43,22 +46,35 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { } - public ScriptScoreFunctionBuilder script(String script) { + public ScriptScoreFunctionBuilder script(Script script) { this.script = script; return this; } /** - * Sets the language of the script. + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated + public ScriptScoreFunctionBuilder script(String script) { + this.scriptString = script; + return this; + } + + /** + * Sets the language of the script.@deprecated Use {@link #script(Script)} + * instead + */ + @Deprecated public ScriptScoreFunctionBuilder lang(String lang) { this.lang = lang; return this; } /** - * Additional parameters that can be provided to the script. + * Additional parameters that can be provided to the script.@deprecated Use + * {@link #script(Script)} instead */ + @Deprecated public ScriptScoreFunctionBuilder params(Map params) { if (this.params == null) { this.params = params; @@ -69,11 +85,13 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { } /** - * Additional parameters that can be provided to the script. + * Additional parameters that can be provided to the script.@deprecated Use + * {@link #script(Script)} instead */ + @Deprecated public ScriptScoreFunctionBuilder param(String key, Object value) { if (params == null) { - params = Maps.newHashMap(); + params = new HashMap<>(); } params.put(key, value); return this; @@ -82,12 +100,18 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); - builder.field("script", script); - if (lang != null) { - builder.field("lang", lang); - } - if (this.params != null) { - builder.field("params", this.params); + if (script != null) { + builder.field(ScriptField.SCRIPT.getPreferredName(), script); + } else { + if (scriptString != null) { + builder.field("script", scriptString); + } + if (lang != null) { + builder.field("lang", lang); + } + if (this.params != null) { + builder.field("params", this.params); + } } builder.endObject(); } diff --git a/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java b/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java index b01eaee3615..72a592da5b3 100644 --- a/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java +++ b/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java @@ -29,15 +29,17 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; import java.io.IOException; import java.util.Map; +import static com.google.common.collect.Maps.newHashMap; + /** * */ @@ -57,16 +59,17 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser { @Override public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException { ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); - String script = null; + Script script = null; Map vars = null; - ScriptService.ScriptType scriptType = null; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { - if ("params".equals(currentFieldName)) { + if (ScriptField.SCRIPT.match(currentFieldName)) { + script = Script.parse(parser); + } else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs) vars = parser.map(); } else { throw new QueryParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]"); @@ -78,19 +81,26 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser { } } - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - script = scriptValue.script(); - scriptType = scriptValue.scriptType(); + if (script == null) { // Didn't find anything using the new API so try using the old one instead + ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); + if (scriptValue != null) { + if (vars == null) { + vars = newHashMap(); + } + script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), vars); + } + } else if (vars != null) { + throw new QueryParsingException(parseContext, "script params must be specified inside script object"); } + if (script == null) { throw new QueryParsingException(parseContext, NAMES[0] + " requires 'script' field"); } SearchScript searchScript; try { - searchScript = parseContext.scriptService().search(parseContext.lookup(), new Script(scriptParameterParser.lang(), script, scriptType, vars), ScriptContext.Standard.SEARCH); - return new ScriptScoreFunction(script, vars, searchScript); + searchScript = parseContext.scriptService().search(parseContext.lookup(), script, ScriptContext.Standard.SEARCH); + return new ScriptScoreFunction(script, searchScript); } catch (Exception e) { throw new QueryParsingException(parseContext, NAMES[0] + " the script could not be loaded", e); } diff --git a/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java b/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java index 0987e5dd4f6..8e991c8f130 100644 --- a/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.script.Script; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; @@ -148,33 +149,60 @@ public abstract class BaseInnerHitBuilder impleme * @param name The name that will represent this value in the return hit * @param script The script to use */ + public T addScriptField(String name, Script script) { + sourceBuilder().scriptField(name, script); + return (T) this; + } + + /** + * Adds a script based field to load and return. The field does not have to + * be stored, but its recommended to use non analyzed or numeric fields. + * + * @param name + * The name that will represent this value in the return hit + * @param script + * The script to use + * @deprecated Use {@link #addScriptField(String, Script)} instead. + */ + @Deprecated public T addScriptField(String name, String script) { sourceBuilder().scriptField(name, script); return (T) this; } /** - * Adds a script based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. + * Adds a script based field to load and return. The field does not have to + * be stored, but its recommended to use non analyzed or numeric fields. * - * @param name The name that will represent this value in the return hit - * @param script The script to use - * @param params Parameters that the script can use. + * @param name + * The name that will represent this value in the return hit + * @param script + * The script to use + * @param params + * Parameters that the script can use. + * @deprecated Use {@link #addScriptField(String, Script)} instead. */ + @Deprecated public T addScriptField(String name, String script, Map params) { sourceBuilder().scriptField(name, script, params); return (T) this; } /** - * Adds a script based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. + * Adds a script based field to load and return. The field does not have to + * be stored, but its recommended to use non analyzed or numeric fields. * - * @param name The name that will represent this value in the return hit - * @param lang The language of the script - * @param script The script to use - * @param params Parameters that the script can use (can be null). + * @param name + * The name that will represent this value in the return hit + * @param lang + * The language of the script + * @param script + * The script to use + * @param params + * Parameters that the script can use (can be null). + * @deprecated Use {@link #addScriptField(String, Script)} instead. */ + @Deprecated public T addScriptField(String name, String lang, String script, Map params) { sourceBuilder().scriptField(name, lang, script, params); return (T) this; diff --git a/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java index d019e598cac..a23780db62e 100644 --- a/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java +++ b/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java @@ -31,12 +31,20 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -68,16 +76,13 @@ public class RestUpdateAction extends BaseRestHandler { scriptParameterParser.parseParams(request); ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); if (scriptValue != null) { - updateRequest.script(scriptValue.script(), scriptValue.scriptType()); - } - String scriptLang = scriptParameterParser.lang(); - if (scriptLang != null) { - updateRequest.scriptLang(scriptLang); - } - for (Map.Entry entry : request.params().entrySet()) { - if (entry.getKey().startsWith("sp_")) { - updateRequest.addScriptParam(entry.getKey().substring(3), entry.getValue()); + Map scriptParams = new HashMap<>(); + for (Map.Entry entry : request.params().entrySet()) { + if (entry.getKey().startsWith("sp_")) { + scriptParams.put(entry.getKey().substring(3), entry.getValue()); + } } + updateRequest.script(new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), scriptParams)); } String sField = request.param("fields"); if (sField != null) { diff --git a/src/main/java/org/elasticsearch/script/AbstractScriptParser.java b/src/main/java/org/elasticsearch/script/AbstractScriptParser.java new file mode 100644 index 00000000000..8198dd18ebc --- /dev/null +++ b/src/main/java/org/elasticsearch/script/AbstractScriptParser.java @@ -0,0 +1,196 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.script.Script.ScriptParseException; +import org.elasticsearch.script.ScriptService.ScriptType; + +import java.io.IOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; + +public abstract class AbstractScriptParser { + + protected abstract String parseInlineScript(XContentParser parser) throws IOException; + + protected abstract S createScript(String script, ScriptType type, String lang, Map params); + + protected abstract S createSimpleScript(XContentParser parser) throws IOException; + + @Deprecated + protected Map getAdditionalScriptParameters() { + return Collections.emptyMap(); + } + + public S parse(XContentParser parser) throws IOException { + + XContentParser.Token token = parser.currentToken(); + // If the parser hasn't yet been pushed to the first token, do it now + if (token == null) { + token = parser.nextToken(); + } + + if (token == XContentParser.Token.VALUE_STRING) { + return createSimpleScript(parser); + } + if (token != XContentParser.Token.START_OBJECT) { + throw new ScriptParseException("expected a string value or an object, but found [{}] instead", token); + } + + String script = null; + ScriptType type = null; + String lang = getDefaultScriptLang(); + Map params = null; + + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (ScriptType.INLINE.getParseField().match(currentFieldName) || ScriptService.SCRIPT_INLINE.match(currentFieldName)) { + type = ScriptType.INLINE; + script = parseInlineScript(parser); + } else if (ScriptType.FILE.getParseField().match(currentFieldName) || ScriptService.SCRIPT_FILE.match(currentFieldName)) { + type = ScriptType.FILE; + if (token == XContentParser.Token.VALUE_STRING) { + script = parser.text(); + } else { + throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token); + } + } else if (ScriptType.INDEXED.getParseField().match(currentFieldName) || ScriptService.SCRIPT_ID.match(currentFieldName)) { + type = ScriptType.INDEXED; + if (token == XContentParser.Token.VALUE_STRING) { + script = parser.text(); + } else { + throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token); + } + } else if (ScriptField.LANG.match(currentFieldName) || ScriptService.SCRIPT_LANG.match(currentFieldName)) { + if (token == XContentParser.Token.VALUE_STRING) { + lang = parser.text(); + } else { + throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token); + } + } else if (ScriptField.PARAMS.match(currentFieldName)) { + if (token == XContentParser.Token.START_OBJECT) { + params = parser.map(); + } else { + throw new ScriptParseException("expected an object for field [{}], but found [{}]", currentFieldName, token); + } + } else { + // TODO remove this in 2.0 + ScriptType paramScriptType = getAdditionalScriptParameters().get(currentFieldName); + if (paramScriptType != null) { + script = parseInlineScript(parser); + type = paramScriptType; + } else { + throw new ScriptParseException("unexpected field [{}]", currentFieldName); + } + } + } + if (script == null) { + throw new ScriptParseException("expected one of [{}], [{}] or [{}] fields, but found none", ScriptType.INLINE.getParseField() + .getPreferredName(), ScriptType.FILE.getParseField().getPreferredName(), ScriptType.INDEXED.getParseField() + .getPreferredName()); + } + assert type != null : "if script is not null, type should definitely not be null"; + return createScript(script, type, lang, params); + + } + + /** + * @return the default script language for this parser or null + * to use the default set in the ScriptService + */ + protected String getDefaultScriptLang() { + return null; + } + + public Script parse(Map config, boolean removeMatchedEntries) { + String script = null; + ScriptType type = null; + String lang = null; + Map params = null; + for (Iterator> itr = config.entrySet().iterator(); itr.hasNext();) { + Entry entry = itr.next(); + String parameterName = entry.getKey(); + Object parameterValue = entry.getValue(); + if (ScriptField.LANG.match(parameterName) || ScriptService.SCRIPT_LANG.match(parameterName)) { + if (parameterValue instanceof String || parameterValue == null) { + lang = (String) parameterValue; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (ScriptField.PARAMS.match(parameterName)) { + if (parameterValue instanceof Map || parameterValue == null) { + params = (Map) parameterValue; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (ScriptType.INLINE.getParseField().match(parameterName) || ScriptService.SCRIPT_INLINE.match(parameterName)) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptType.INLINE; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (ScriptType.FILE.getParseField().match(parameterName) || ScriptService.SCRIPT_FILE.match(parameterName)) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptType.FILE; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (ScriptType.INDEXED.getParseField().match(parameterName) || ScriptService.SCRIPT_ID.match(parameterName)) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptType.INDEXED; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } + } + if (script == null) { + throw new ScriptParseException("expected one of [{}], [{}] or [{}] fields, but found none", ScriptType.INLINE.getParseField() + .getPreferredName(), ScriptType.FILE.getParseField().getPreferredName(), ScriptType.INDEXED.getParseField() + .getPreferredName()); + } + assert type != null : "if script is not null, type should definitely not be null"; + return createScript(script, type, lang, params); + } + +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/script/Script.java b/src/main/java/org/elasticsearch/script/Script.java index 655ff82c08e..d826eaad8ed 100644 --- a/src/main/java/org/elasticsearch/script/Script.java +++ b/src/main/java/org/elasticsearch/script/Script.java @@ -19,52 +19,93 @@ package org.elasticsearch.script; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.logging.support.LoggerMessageFormat; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.ScriptService.ScriptType; + +import java.io.IOException; import java.util.Map; - -import static org.elasticsearch.script.ScriptService.ScriptType; - /** * Script holds all the parameters necessary to compile or find in cache and then execute a script. */ -public class Script { +public class Script implements ToXContent, Streamable { - private final String lang; - private final String script; - private final ScriptType type; - private final Map params; + public static final ScriptType DEFAULT_TYPE = ScriptType.INLINE; + private static final ScriptParser PARSER = new ScriptParser(); + + private String script; + private @Nullable ScriptType type; + private @Nullable String lang; + private @Nullable Map params; + + /** + * For Serialization + */ + Script() { + } + + /** + * Constructor for simple inline script. The script will have no lang or + * params set. + * + * @param script + * The inline script to execute. + */ + public Script(String script) { + if (script == null) { + throw new IllegalArgumentException("The parameter script (String) must not be null in Script."); + } + this.script = script; + } + + /** + * For sub-classes to use to override the default language + */ + protected Script(String script, String lang) { + if (script == null) { + throw new IllegalArgumentException("The parameter script (String) must not be null in Script."); + } + this.script = script; + this.lang = lang; + } /** * Constructor for Script. - * @param lang The language of the script to be compiled/executed. - * @param script The cache key of the script to be compiled/executed. For dynamic scripts this is the actual - * script source code. For indexed scripts this is the id used in the request. For on disk scripts - * this is the file name. - * @param type The type of script -- dynamic, indexed, or file. - * @param params The map of parameters the script will be executed with. + * + * @param script + * The cache key of the script to be compiled/executed. For + * inline scripts this is the actual script source code. For + * indexed scripts this is the id used in the request. For on + * file scripts this is the file name. + * @param type + * The type of script -- dynamic, indexed, or file. + * @param lang + * The language of the script to be compiled/executed. + * @param params + * The map of parameters the script will be executed with. */ - public Script(String lang, String script, ScriptType type, Map params) { + public Script(String script, ScriptType type, @Nullable String lang, @Nullable Map params) { if (script == null) { throw new IllegalArgumentException("The parameter script (String) must not be null in Script."); } if (type == null) { throw new IllegalArgumentException("The parameter type (ScriptType) must not be null in Script."); } - - this.lang = lang; this.script = script; this.type = type; + this.lang = lang; this.params = params; } - /** - * Method for getting language. - * @return The language of the script to be compiled/executed. - */ - public String getLang() { - return lang; - } - /** * Method for getting the script. * @return The cache key of the script to be compiled/executed. For dynamic scripts this is the actual @@ -77,17 +118,190 @@ public class Script { /** * Method for getting the type. - * @return The type of script -- dynamic, indexed, or file. + * + * @return The type of script -- inline, indexed, or file. */ public ScriptType getType() { - return type; + return type == null ? DEFAULT_TYPE : type; + } + + /** + * Method for getting language. + * + * @return The language of the script to be compiled/executed. + */ + public String getLang() { + return lang; } /** * Method for getting the parameters. + * * @return The map of parameters the script will be executed with. */ public Map getParams() { return params; } + + @Override + public final void readFrom(StreamInput in) throws IOException { + script = in.readString(); + if (in.readBoolean()) { + type = ScriptType.readFrom(in); + } + lang = in.readOptionalString(); + if (in.readBoolean()) { + params = in.readMap(); + } + doReadFrom(in); + } + + protected void doReadFrom(StreamInput in) throws IOException { + // For sub-classes to Override + } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + out.writeString(script); + boolean hasType = type != null; + out.writeBoolean(hasType); + if (hasType) { + ScriptType.writeTo(type, out); + } + out.writeOptionalString(lang); + boolean hasParams = params != null; + out.writeBoolean(hasParams); + if (hasParams) { + out.writeMap(params); + } + doWriteTo(out); + } + + protected void doWriteTo(StreamOutput out) throws IOException { + // For sub-classes to Override + } + + @Override + public final XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { + if (type == null) { + return builder.value(script); + } + + builder.startObject(); + scriptFieldToXContent(script, type, builder, builderParams); + if (lang != null) { + builder.field(ScriptField.LANG.getPreferredName(), lang); + } + if (params != null) { + builder.field(ScriptField.PARAMS.getPreferredName(), params); + } + builder.endObject(); + return builder; + } + + protected XContentBuilder scriptFieldToXContent(String script, ScriptType type, XContentBuilder builder, Params builderParams) + throws IOException { + builder.field(type.getParseField().getPreferredName(), script); + return builder; + } + + public static Script readScript(StreamInput in) throws IOException { + Script script = new Script(); + script.readFrom(in); + return script; + } + + public static Script parse(Map config, boolean removeMatchedEntries) { + return PARSER.parse(config, removeMatchedEntries); + } + + public static Script parse(XContentParser parser) throws IOException { + return PARSER.parse(parser); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((lang == null) ? 0 : lang.hashCode()); + result = prime * result + ((params == null) ? 0 : params.hashCode()); + result = prime * result + ((script == null) ? 0 : script.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Script other = (Script) obj; + if (lang == null) { + if (other.lang != null) + return false; + } else if (!lang.equals(other.lang)) + return false; + if (params == null) { + if (other.params != null) + return false; + } else if (!params.equals(other.params)) + return false; + if (script == null) { + if (other.script != null) + return false; + } else if (!script.equals(other.script)) + return false; + if (type != other.type) + return false; + return true; + } + + @Override + public String toString() { + return "[script: " + script + ", type: " + type.getParseField().getPreferredName() + ", lang: " + lang + ", params: " + params + + "]"; + } + + private static class ScriptParser extends AbstractScriptParser